From c3a31a65127a7da671441af645a1fc47c1a74e8b Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Mon, 13 Jan 2025 12:57:03 +0800 Subject: [PATCH 01/18] lib: add node:metrics module This takes ideas from various ecosystem metrics modules along with some ideas from the Swift metrics provider. The idea being to provide a generic metrics interface which publishes diagnostics_channel events and allowing consumers to listen to those events and either report them directly or aggregate them in some way. --- doc/api/diagnostics_channel.md | 29 + doc/api/index.md | 1 + doc/api/metrics.md | 1040 ++++++++++++++++++ lib/diagnostics_channel.js | 5 + lib/internal/bootstrap/realm.js | 1 + lib/metrics.js | 848 ++++++++++++++ test/parallel/test-metrics-counter.js | 41 + test/parallel/test-metrics-gauge.js | 39 + test/parallel/test-metrics-meter.js | 43 + test/parallel/test-metrics-metric-report.js | 31 + test/parallel/test-metrics-metric.js | 27 + test/parallel/test-metrics-periodic-gauge.js | 50 + test/parallel/test-metrics-streams.js | 118 ++ test/parallel/test-metrics-timer.js | 57 + test/parallel/test-metrics-unique-set.js | 48 + 15 files changed, 2378 insertions(+) create mode 100644 doc/api/metrics.md create mode 100644 lib/metrics.js create mode 100644 test/parallel/test-metrics-counter.js create mode 100644 test/parallel/test-metrics-gauge.js create mode 100644 test/parallel/test-metrics-meter.js create mode 100644 test/parallel/test-metrics-metric-report.js create mode 100644 test/parallel/test-metrics-metric.js create mode 100644 test/parallel/test-metrics-periodic-gauge.js create mode 100644 test/parallel/test-metrics-streams.js create mode 100644 test/parallel/test-metrics-timer.js create mode 100644 test/parallel/test-metrics-unique-set.js diff --git a/doc/api/diagnostics_channel.md b/doc/api/diagnostics_channel.md index 1c5e2b6535adbf..9d47174388a4f0 100644 --- a/doc/api/diagnostics_channel.md +++ b/doc/api/diagnostics_channel.md @@ -132,6 +132,35 @@ if (diagnostics_channel.hasSubscribers('my-channel')) { } ``` +#### `diagnostics_channel.hasChannel(name)` + + + +* `name` {string|symbol} The channel name + +* Returns: {boolean} If the channel exists + +Check if a channel with the given name exists. This is useful to check if a +channel has been created to add additional logic if it's the first time. + +```mjs +import diagnostics_channel from 'node:diagnostics_channel'; + +if (!diagnostics_channel.hasChannel('my-channel')) { + // Channel does not exist yet, do additional setup +} +``` + +```cjs +const diagnostics_channel = require('node:diagnostics_channel'); + +if (!diagnostics_channel.hasChannel('my-channel')) { + // Channel does not exist yet, do additional setup +} +``` + #### `diagnostics_channel.channel(name)` + +> Stability: 1 - Experimental + + + +The `node:metrics` module provides an API for application instrumentation and +performance monitoring. It offers various metric types and built-in exporters +for popular monitoring systems. + +The module can be accessed using: + +```mjs +import * as metrics from 'node:metrics'; +``` + +```cjs +const metrics = require('node:metrics'); +``` + +## Overview + +The metrics API enables developers to instrument their applications with custom +metrics that can be collected and exported to monitoring systems. All metrics +publish their data through the `node:diagnostics_channel` module, allowing for +flexible consumption patterns. + +### Example + +```mjs +import { counter, timer, statsdStream } from 'node:metrics'; +import { createWriteStream } from 'node:fs'; + +// Create a counter metric +const apiCalls = counter('api.calls', { service: 'web' }); + +// Create a timer factory +const requestTimer = timer('api.request.duration', { service: 'web' }); + +// Export metrics to StatsD format +const statsd = statsdStream(); +statsd.pipe(createWriteStream('metrics.log')); + +// Use metrics in your application +function handleRequest(req, res) { + const timer = requestTimer.create({ endpoint: req.url }); + + apiCalls.increment(); + + // Process request... + + timer.stop(); +} +``` + +```cjs +const { counter, timer, statsdStream } = require('node:metrics'); +const { createWriteStream } = require('node:fs'); + +// Create a counter metric +const apiCalls = counter('api.calls', { service: 'web' }); + +// Create a timer factory +const requestTimer = timer('api.request.duration', { service: 'web' }); + +// Export metrics to StatsD format +const statsd = statsdStream(); +statsd.pipe(createWriteStream('metrics.log')); + +// Use metrics in your application +function handleRequest(req, res) { + const timer = requestTimer.create({ endpoint: req.url }); + + apiCalls.increment(); + + // Process request... + + timer.stop(); +} +``` + +## Metric Types + +### `metrics.counter(name[, meta])` + + + +* `name` {string} The name of the counter metric. +* `meta` {Object} Optional metadata to attach to all reports. +* Returns: {metrics.Counter} + +Creates a counter metric that tracks cumulative values. + +```mjs +import { counter } from 'node:metrics'; + +const errorCount = counter('errors.total', { component: 'database' }); + +errorCount.increment(); // Increment by 1 +errorCount.increment(5); // Increment by 5 +errorCount.decrement(2); // Decrement by 2 +``` + + +### `metrics.gauge(name[, meta])` + + + +* `name` {string} The name of the gauge metric. +* `meta` {Object} Optional metadata to attach to all reports. +* Returns: {metrics.Gauge} + +Creates a gauge metric that represents a single value at a point in time. + +```mjs +import { gauge } from 'node:metrics'; +import { memoryUsage } from 'node:process'; + +const memory = gauge('memory.usage.bytes'); + +memory.reset(memoryUsage().heapUsed); +memory.applyDelta(1024); // Add 1024 to current value +``` + +### `metrics.meter(name, interval[, meta])` + + + +* `name` {string} The name of the meter metric. +* `interval` {number} The time window in milliseconds for rate calculation. +* `meta` {Object} Optional metadata to attach to all reports. +* Returns: {metrics.Meter} + +Creates a meter metric that measures the rate of events over time. + +```mjs +import { meter } from 'node:metrics'; + +const requestRate = meter('requests.rate', 60000); // 1 minute window + +requestRate.mark(); // Mark one event +requestRate.mark(10); // Mark 10 events +``` + +### `metrics.timer(name[, meta])` + + + +* `name` {string} The name of the timer metric. +* `meta` {Object} Optional metadata to attach to all reports. +* Returns: {metrics.TimerFactory} + +Creates a timer factory for measuring durations. + +```mjs +import { timer } from 'node:metrics'; + +const dbQueryTimer = timer('db.query.duration'); + +const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); +// Perform database query... +const duration = t.stop(); // Returns duration in milliseconds +``` + +### `metrics.uniqueSet(name[, meta])` + + + +* `name` {string} The name of the unique set metric. +* `meta` {Object} Optional metadata to attach to all reports. +* Returns: {metrics.UniqueSet} + +Creates a unique set metric which counts distinct objects. Uniqueness is +determined through a `WeakSet`, so it follows the same identity rules. + +```mjs +import { uniqueSet } from 'node:metrics'; + +const uniqueUsers = uniqueSet('users.unique'); + +uniqueUsers.add(user); // Only counted once per unique value +uniqueUsers.add(anotherUser); +``` + +### `metrics.periodicGauge(name, interval, fn[, meta])` + + + +* `name` {string} The name of the periodic gauge metric. +* `interval` {number} The interval in milliseconds between samples. +* `fn` {Function} A function that returns the current value. +* `meta` {Object} Optional metadata to attach to all reports. +* Returns: {metrics.PeriodicGauge} + +Creates a gauge that automatically samples a value at regular intervals. + +```mjs +import { periodicGauge } from 'node:metrics'; +import { cpuUsage } from 'node:process'; + +const cpu = periodicGauge('cpu.usage', 5000, () => { + return cpuUsage().user; +}); + +// Stop sampling when no longer needed +cpu.stop(); +``` + +## Classes + +### Class: `MetricReport` + + + +Represents a single metric measurement. + +#### `metricReport.type` + + + +* {string} + +The type of the metric (e.g., 'counter', 'gauge', 'meter', 'periodicGauge', +'timer', 'uniqueSet'). + +#### `metricReport.name` + + + +* {string} + +The name of the metric. + +#### `metricReport.value` + + + +* {number} + +The numeric value of the measurement. + +#### `metricReport.meta` + + + +* {Object} + +Additional metadata associated with the measurement. + +#### `metricReport.time` + + + +* {number} + +The `performance.now()` timestamp when the measurement was recorded in +milliseconds since `performance.timeOrigin`. + +#### `metricReport.toStatsd()` + + + +* Returns: {string} + +Formats the metric report as a StatsD-compatible string. + +```js +console.log(report.toStatsd()); // 'api.calls:1|c' +``` + +#### `metricReport.toDogStatsd()` + + + +* Returns: {string} + +Formats the metric report as a DogStatsD-compatible string with tags. + +```js +console.log(report.toDogStatsd()); // 'api.calls:1|c|service:web' +``` + +#### `metricReport.toGraphite()` + + + +* Returns: {string} + +Formats the metric report as a Graphite-compatible string. + +```js +console.log(report.toGraphite()); // 'api.calls 1 1234567890' +``` + +#### `metricReport.toPrometheus()` + + + +* Returns: {string} + +Formats the metric report as a Prometheus-compatible string. + +```js +console.log(report.toPrometheus()); // 'api_calls{service="web"} 1 1234567890.123' +``` + +### Class: `Metric` + + + +Manages the lifecycle of a metric channel and provides methods for reporting +values to it. Each metric type holds a `Metric` instance which it reports to. + +#### `metric.type` + + + +* {string} + +The type of the metric (e.g., 'counter', 'gauge', 'meter', 'periodicGauge', +'timer', 'uniqueSet'). + +#### `metric.name` + + + +* {string} + +The name of the metric. + +#### `metric.meta` + + + +* {Object} + +Additional metadata associated with the metric. + +#### `metric.channelName` + + + +* {string} + +The name of the diagnostics_channel used for this metric. + +#### `metric.channel` + + + +* {Channel} + +The diagnostics channel instance used for this metric. + +#### `metric.shouldReport` + + + +* {boolean} + +Indicates whether the metric should report values. This can be used to +conditionally enable or disable value preparation work. + +#### `metric.report(value[, meta])` + + + +* `value` {number} The value to report. +* `meta` {Object} Additional metadata for this report. +* Returns: {metrics.MetricReport} + +Reports a value for the metric, creating a `MetricReport` instance. +This bypasses the metric type specific methods, allowing direct reporting +to a channel. + +Generally this method should not be used directly. Instead, use the +specific methods provided by each metric type (e.g., `increment`, `reset`, +`mark`, etc.) which internally call this method with the appropriate value and +metadata. + +```mjs +import { meter } from 'node:metrics'; + +const apiCalls = meter('api.calls', { service: 'web' }); + +apiCalls.metric.report(1); // Reports a value of 1 +apiCalls.metric.report(5, { endpoint: '/api/users' }); // Reports 5 with metadata +``` + +### Class: `Counter` + +* Extends: {metrics.Gauge} + + + +A metric that only increases or decreases. + +#### `counter.metric` + + + +* {metrics.Metric} + +The underlying metric instance used for reporting. + +#### `counter.increment([n[, meta]])` + + + +* `n` {number} The amount to increment. **Default:** `1` +* `meta` {Object} Additional metadata for this report. + +Increments the counter by the specified amount. + +```mjs +import { counter } from 'node:metrics'; + +const apiCalls = counter('api.calls', { service: 'web' }); + +apiCalls.increment(); // Increment by 1 +apiCalls.increment(5); // Increment by 5 +apiCalls.increment(10, { endpoint: '/api/users' }); // Increment by 10 with metadata +apiCalls.increment({ endpoint: '/api/orders' }); // Increment by 1 with metadata +``` + +#### `counter.decrement([n[, meta]])` + + + +* `n` {number} The amount to decrement. **Default:** `1` +* `meta` {Object} Additional metadata for this report. + +Decrements the counter by the specified amount. + +```mjs +import { counter } from 'node:metrics'; + +const errorCount = counter('errors.total', { component: 'database' }); + +errorCount.decrement(); // Decrement by 1 +errorCount.decrement(3); // Decrement by 3 +errorCount.decrement(2, { errorType: 'timeout' }); // Decrement by 2 with metadata +errorCount.decrement({ errorType: 'timeout' }); // Decrement by 1 with metadata +``` + +#### `counter.value` + + + +* {number} + +The current value of the counter. + +### Class: `Gauge` + + + +A metric representing a single value that can go up or down. + +#### `gauge.metric` + + + +* {metrics.Metric} + +The underlying metric instance used for reporting. + +#### `gauge.value` + + + +* {number} + +The current value of the metric. + +#### `gauge.reset([value[, meta]])` + + + +* `value` {number} The new value. **Default:** `0` +* `meta` {Object} Additional metadata for this report. + +Sets the gauge to a specific value and reports it. + +```mjs +import { gauge } from 'node:metrics'; +import { memoryUsage } from 'node:process'; + +const memory = gauge('memory.usage.bytes'); + +memory.reset(); // Reset to 0 +memory.reset(memoryUsage().heapUsed); // Set to current memory usage +memory.reset(1024, { source: 'system' }); // Set to 1024 with metadata +``` + +#### `gauge.applyDelta(delta[, meta])` + + + +* `delta` {number} The amount to add to the current value. +* `meta` {Object} Additional metadata for this report. + +Adds a delta to the current value and reports the new value. + +```mjs +import { gauge } from 'node:metrics'; + +const cpuUsage = gauge('cpu.usage.percent'); + +cpuUsage.applyDelta(5); // Increase by 5 +cpuUsage.applyDelta(-2, { source: 'system' }); // Decrease by 2 with metadata +``` + +### Class: `Meter` + +* Extends: {metrics.Gauge} + + + +A metric that measures the rate of events over a sliding time window. + +#### `meter.metric` + + + +* {metrics.Metric} + +The underlying metric instance used for reporting. + +#### `meter.mark([n[, meta]])` + + + +* `n` {number} The number of events to mark. **Default:** `1` +* `meta` {Object} Additional metadata for this report. + +Records events and updates the rate calculation. + +```mjs +import { meter } from 'node:metrics'; + +const requestRate = meter('requests.rate', 60000); // 1 minute window + +requestRate.mark(); // Mark one event +requestRate.mark(10); // Mark 10 events +requestRate.mark(5, { endpoint: '/api/users' }); // Mark 5 with metadata +requestRate.mark({ endpoint: '/api/orders' }); // Mark 1 with metadata +``` + +### Class: `Timer` + +* Extends: {metrics.Gauge} + + + +A metric for measuring durations. + +#### `timer.metric` + + + +* {metrics.Metric} + +The underlying metric instance used for reporting. + +#### `timer.start` + + + +* {number} + +The start time of the timer (milliseconds since epoch). + +#### `timer.end` + + + +* {number} + +The end time of the timer (milliseconds since epoch). Zero if timer is running. + +#### `timer.duration` + + + +* {number} + +The duration in milliseconds. Zero if timer is still running. + +#### `timer.stop([meta])` + + + +* `meta` {Object} Additional metadata for this report. +* Returns: {number} The duration in milliseconds. + +Stops the timer and reports the duration. Can only be called once. + +```mjs +import { timer } from 'node:metrics'; + +const dbQueryTimer = timer('db.query.duration'); + +const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); + +// Perform database query... + +// Stop the timer and get the duration +const duration = t.stop(); // Returns duration in milliseconds +``` + +#### `timer[Symbol.dispose]()` + + + +Allows `using` syntax to automatically stop the timer when done. + +```mjs +import { timer } from 'node:metrics'; + +const dbQueryTimer = timer('db.query.duration'); + +{ + using t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); + // Perform database query... + + // Timer is automatically stopped here +} +``` + +### Class: `TimerFactory` + + + +A factory for creating timer instances. + +#### `timer.metric` + + + +* {metrics.Metric} + +The underlying metric instance used for reporting. + +#### `timerFactory.create([meta])` + + + +* `meta` {Object} Additional metadata for this timer. +* Returns: {metrics.Timer} + +Creates a new timer instance with the specified metadata. + +```mjs +import { timer } from 'node:metrics'; + +const dbQueryTimer = timer('db.query.duration'); + +const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); +``` + +### Class: `UniqueSet` + +* Extends: {metrics.Gauge} + + + +A metric that counts unique values. + +#### `uniqueSet.metric` + + + +* {metrics.Metric} + +The underlying metric instance used for reporting. + +#### `uniqueSet.add(value[, meta])` + + + +* `value` {any} The value to add to the set. +* `meta` {Object} Additional metadata for this report. + +Adds a value to the set. Only reports if the value hasn't been seen before. + +```mjs +import { uniqueSet } from 'node:metrics'; + +const uniqueUsers = uniqueSet('users.unique'); + +uniqueUsers.add(user); // Only counted once per unique value +uniqueUsers.add(user); // Ignored because user is already in the set +uniqueUsers.add(anotherUser, { source: 'login' }); // Count another user with metadata +``` + +#### `uniqueSet.count` + + + +* {number} + +The number of unique values seen. + +### Class: `PeriodicGauge` + +* Extends: {metrics.Gauge} + + + +A gauge that automatically samples values at regular intervals. + +#### `periodicGauge.metric` + + + +* {metrics.Metric} + +The underlying metric instance used for reporting. + +#### `periodicGauge.interval` + + + +* {number} + +The sampling interval in milliseconds. Setting this property reschedules the timer. + +#### `periodicGauge.schedule()` + + + +Schedules the periodic sampling based on the configured interval. This is called +automatically when the gauge is created, but can be called again to reschedule +after it has been stopped. + +```mjs +import { periodicGauge } from 'node:metrics'; +import { cpuUsage } from 'node:process'; + +const cpu = periodicGauge('cpu.usage', 5000, () => { + return cpuUsage().user; +}); + +cpu.stop(); + +// Reschedule sampling +cpu.schedule(); +``` + +#### `periodicGauge.stop()` + + + +Stops the periodic sampling. + +```mjs +import { periodicGauge } from 'node:metrics'; +import { cpuUsage } from 'node:process'; + +const cpu = periodicGauge('cpu.usage', 5000, () => { + return cpuUsage().user; +}); + +// Stop sampling when no longer needed +cpu.stop(); +``` + +#### `periodicGauge[Symbol.dispose]()` + + + +Allows `using` syntax to automatically stop the periodic gauge when done. + +```mjs +import { periodicGauge } from 'node:metrics'; +import { cpuUsage } from 'node:process'; + +{ + using cpu = periodicGauge('cpu.usage', 1000, () => { + return cpuUsage().user; + }); + + // Perform operations that require periodic sampling... + + // Sampling is automatically stopped here +} +``` + +## Metric Streams + +### `metrics.statsdStream()` + + + +* Returns: {stream.Transform} + +Creates a transform stream that converts all metrics to StatsD format. + +```mjs +import { statsdStream } from 'node:metrics'; +import { stdout } from 'node:process'; + +const stream = statsdStream(); +stream.pipe(stdout); +``` + +Output format: `metric.name:value|type` + +### `metrics.dogstatsdStream()` + + + +* Returns: {stream.Transform} + +Creates a transform stream that converts all metrics to DogStatsD format with tags. + +```mjs +import { dogstatsdStream } from 'node:metrics'; +import { stdout } from 'node:process'; + +const stream = dogstatsdStream(); +stream.pipe(stdout); +``` + +Output format: `metric.name:value|type|key:value,key2:value2` + +### `metrics.graphiteStream()` + + + +* Returns: {stream.Transform} + +Creates a transform stream that converts all metrics to Graphite plaintext protocol. + +```mjs +import { dogstatsdStream } from 'node:metrics'; +import { stdout } from 'node:process'; + +const stream = graphiteStream(); +stream.pipe(stdout); +``` + +Output format: `metric.name value timestamp` + +### `metrics.prometheusStream()` + + + +* Returns: {stream.Transform} + +Creates a transform stream that converts all metrics to Prometheus exposition format. + +```mjs +import { prometheusStream } from 'node:metrics'; +import { stdout } from 'node:process'; + +const stream = prometheusStream(); +stream.pipe(stdout); +``` + +Output format: `metric_name{label="value"} value timestamp` + +## Integration with Diagnostics Channel + +All metrics publish their reports through `node:diagnostics_channel`. The channel +name format is `metrics:{type}:{name}` where `{type}` is the metric type and +`{name}` is the metric name. + +```mjs +import { subscribe } from 'node:diagnostics_channel'; + +// Subscribe to a specific metric +subscribe('metrics:counter:api.calls', (report) => { + console.log(`API calls: ${report.value}`); +}); +``` + +```cjs +const { subscribe } = require('node:diagnostics_channel'); + +subscribe('metrics:counter:api.calls', (report) => { + console.log(`API calls: ${report.value}`); +}); +``` + +Additionally there is a specialized channel `metrics:new` which publishes any +newly created metrics, allowing subcribing to all metrics without needing to +know their names in advance. + +```mjs +import { subscribe } from 'node:diagnostics_channel'; + +subscribe('metrics:new', (metric) => { + console.log(`New metric created: ${metric.type} - ${metric.name}`); +}); +``` + +```cjs +const { subscribe } = require('node:diagnostics_channel'); + +subscribe('metrics:new', (metric) => { + console.log(`New metric created: ${metric.type} - ${metric.name}`); +}); +``` + +## Best Practices + +1. **Naming Conventions**: Use dot-separated hierarchical names (e.g., `http.requests.total`). + +2. **Metadata**: Use metadata to add dimensions to your metrics without creating separate metric instances. + +3. **Performance**: Metric types are designed to be lightweight. However, avoid + creating metric types in hot code paths. As with diagnostics_channel, metric + creation is optimized for capture time performance by moving costly + operations to metric type creation time. diff --git a/lib/diagnostics_channel.js b/lib/diagnostics_channel.js index cceadafbd84d3a..743a177e3c485e 100644 --- a/lib/diagnostics_channel.js +++ b/lib/diagnostics_channel.js @@ -232,6 +232,10 @@ class Channel { const channels = new WeakRefMap(); +function hasChannel(name) { + return channels.has(name); +} + function channel(name) { const channel = channels.get(name); if (channel) return channel; @@ -457,6 +461,7 @@ function tracingChannel(nameOrChannels) { dc_binding.linkNativeChannel((name) => channel(name)); module.exports = { + hasChannel, channel, hasSubscribers, subscribe, diff --git a/lib/internal/bootstrap/realm.js b/lib/internal/bootstrap/realm.js index f49f0814bbc687..3b72209146705c 100644 --- a/lib/internal/bootstrap/realm.js +++ b/lib/internal/bootstrap/realm.js @@ -124,6 +124,7 @@ const legacyWrapperList = new SafeSet([ // beginning with "internal/". // Modules that can only be imported via the node: scheme. const schemelessBlockList = new SafeSet([ + 'metrics', 'sea', 'sqlite', 'quic', diff --git a/lib/metrics.js b/lib/metrics.js new file mode 100644 index 00000000000000..96369d8af80ec8 --- /dev/null +++ b/lib/metrics.js @@ -0,0 +1,848 @@ +/** + * A metrics provider which reports to diagnostics_channel. + * + * # Metric Types + * + * - Counter: An increasing or decreasing value. + * - Gauge: A snapshot of a single value in time. + * - Meter: A number of events per interval. + * - Timer: A duration in milliseconds. + * - UniqueSet: A unique count of number of unique values seen. + * - PeriodicGauge: A gauge which periodically updates its value by calling a function. + * + * # Exporting Metrics + * + * Several text format exporters are provided as streams: + * - Statsd + * - Dogstatsd + * - Graphite + * - Prometheus + * + * # TODO(qard): + * - Histograms + * - Distributions/Summaries + */ + +'use strict'; + +const { + ArrayPrototypeJoin, + ArrayPrototypeMap, + MathFloor, + ObjectAssign, + ObjectEntries, + ObjectFreeze, + ObjectKeys, + SafeMap, + SafeWeakSet, + SymbolDispose, +} = primordials; + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + }, +} = require('internal/errors'); +const { setInterval, clearInterval } = require('internal/timers'); + +const { + channel, + hasChannel, + subscribe, + unsubscribe, +} = require('diagnostics_channel'); +const { performance } = require('perf_hooks'); +const { Transform } = require('stream'); + +const newMetricChannel = channel('metrics:new'); + +/** + * Mix two metadata objects together. + * @param {object} a The first metadata object. + * @param {object} b The second metadata object. + * @returns {object} The mixed metadata. + * @private + */ +function mixMeta(a, b) { + if (a === undefined) return b; + if (b === undefined) return a; + return ObjectAssign({}, a, b); +} + +/** + * Represents a single reported metric. + */ +class MetricReport { + /** + * The type of metric. + * @property {string} type + */ + + /** + * The name of the metric. + * @property {string} name + */ + + /** + * The value of the metric. + * @property {number} value + */ + + /** + * Additional metadata to include with the report. + * @property {object} meta + */ + + /** + * Constructs a new metric report. + * @param {string} type The type of metric. + * @param {string} name The name of the metric. + * @param {number} value The value of the metric. + * @param {object} [meta] Additional metadata to include with the report. + */ + constructor(type, name, value, meta) { + this.type = type; + this.name = name; + this.value = value; + this.meta = meta; + this.time = performance.now(); + ObjectFreeze(this); + } + + /** + * Convert the metric report to a statsd-compatible string. + * @returns {string} The statsd-formatted metric report. + */ + toStatsd() { + const { type, name, value } = this; + return `${name}:${value}|${this.#statsdType(type)}`; + } + + /* + * Convert the metric type to a statsd type. + * + * @param {string} type The metric type. + * @returns {string} The statsd type. + * @private + */ + #statsdType(type) { + return { + counter: 'c', + gauge: 'g', + meter: 'm', + periodicGauge: 'g', + timer: 'ms', + uniqueSet: 's', + }[type]; + } + + /** + * Convert the metric report to a Dogstatsd-compatible string. + * @returns {string} The Dogstatsd-formatted metric report. + */ + toDogStatsd() { + return `${this.toStatsd()}${this.#dogstatsdTags()}`; + } + + /* + * Pack metadata into Dogstatsd-compatible tags. + * + * @returns {string} The packed metadata. + * @private + */ + #dogstatsdTags() { + const entries = ObjectEntries(this.meta); + const pairs = ArrayPrototypeMap(entries, ({ 0: k, 1: v }) => `${k}:${v}`); + const tags = ArrayPrototypeJoin(pairs, ','); + return tags.length ? `|${tags}` : ''; + } + + /** + * Convert the metric report to a graphite-compatible string. + * @returns {string} The graphite-formatted metric report. + */ + toGraphite() { + const { name, value, time } = this; + return `${name} ${value} ${MathFloor(time / 1000)}`; + } + + /** + * Convert the metric report to a Prometheus-compatible string. + * @returns {string} The Prometheus-formatted metric report. + */ + toPrometheus() { + const { name, value, time } = this; + return `${name}${this.#prometheusLabels()} ${value} ${time}`; + } + + /* + * Pack metadata into Prometheus-compatible labels. + * + * @returns {string} The packed metadata. + * @private + */ + #prometheusLabels() { + const entries = ObjectEntries(this.meta); + const pairs = ArrayPrototypeMap(entries, ({ 0: k, 1: v }) => `${k}="${v}"`); + const labels = ArrayPrototypeJoin(pairs, ','); + return labels.length ? `{${labels}}` : ''; + } +} + +/** + * Represents a metric which can be reported to. + */ +class Metric { + #channel; + + /** + * The type of metric. + * @property {string} type + */ + + /** + * The name of the metric. + * @property {string} name + */ + + /** + * Additional metadata to include with the metric. + * @property {object} meta + */ + + /** + * Constructs a new metric. + * @param {string} type The type of metric. + * @param {string} name The name of the metric. + * @param {object} [meta] Additional metadata to include with the metric. + */ + constructor(type, name, meta) { + if (!metricTypeNames.includes(type)) { + throw new ERR_INVALID_ARG_VALUE('type', type, wrongTypeErr); + } + if (typeof name !== 'string' || !name) { + throw new ERR_INVALID_ARG_TYPE('name', ['string'], name); + } + if (meta !== undefined && typeof meta !== 'object') { + throw new ERR_INVALID_ARG_TYPE('meta', ['object', 'undefined'], meta); + } + + this.type = type; + this.name = name; + this.meta = meta; + + // Before acquiring the channel, check if it already exists. + const exists = hasChannel(this.channelName); + this.#channel = channel(this.channelName); + + // If the channel is new and there are new channel subscribers, + // publish the metric to the new metric channel. + if (!exists && newMetricChannel.hasSubscribers) { + newMetricChannel.publish(this); + } + + ObjectFreeze(this); + } + + /** + * The channel name of the metric. + * @property {string} channelName + */ + get channelName() { + return `metrics:${this.type}:${this.name}`; + } + + /** + * The channel for this metric. + * @property {Channel} channel + */ + get channel() { + return this.#channel; + } + + /** + * Whether the metric should report values. If there are no subscribers, + * metric preparation and report construction can be skipped. + * @property {boolean} shouldReport + */ + get shouldReport() { + return this.#channel.hasSubscribers; + } + + /** + * Report a value to the metric. + * @param {number} value The value to report. + * @param {object} [meta] Additional metadata to include with the report. + */ + report(value, meta) { + // Skip report construction if there are no subscribers. + if (!this.shouldReport) return; + const report = new MetricReport(this.type, this.name, value, + mixMeta(this.meta, meta)); + this.#channel.publish(report); + } +} + +/** + * Represents a snapshot of a value in time. Will report the value every time + * reset() is called, or when applyDelta() is called with a non-zero value. + */ +class Gauge { + /** + * The metric to report to. + * @property {Metric} metric + */ + + /** + * The value of the gauge. + * @property {number} value + */ + + /** + * @param {Metric} metric The metric to report to. + */ + constructor(metric) { + if (!(metric instanceof Metric)) { + throw new ERR_INVALID_ARG_TYPE('metric', ['Metric'], metric); + } + this.metric = metric; + this.value = 0; + } + + /** + * Set the gauge value. + * @param {number} value The value to set the gauge to. + * @param {object} [meta] Additional metadata to include with the report. + */ + reset(value = 0, meta) { + this.value = value; + this.metric.report(value, meta); + } + + /** + * Apply a delta to the gauge. + * @param {number} value The delta to apply to the gauge. + * @param {object} [meta] Additional metadata to include with the report. + */ + applyDelta(value, meta) { + this.reset(this.value + value, meta); + } +} + +/** + * Number of events per interval. This will report at every mark() call, + * but will report an aggregate value if a sliding window of marks which + * occurred within the interval period. + */ +class Meter extends Gauge { + #window; + + /** + * The metric to report to. + * @property {Metric} metric + */ + + /** + * The interval in milliseconds to aggregate marks over. + * @property {number} interval + */ + + /** + * Construct a new meter. + * @param {Metric} metric The metric to report to. + * @param {number} interval The interval in milliseconds to aggregate marks over. + */ + constructor(metric, interval) { + super(metric); + if (typeof interval !== 'number' || interval <= 0) { + throw new ERR_INVALID_ARG_TYPE('interval', ['number'], interval); + } + this.interval = interval; + this.#window = []; + } + + /** + * Mark an event in the meter. + * @param {number} [n] The number of events to mark. Defaults to 1. + * @param {object} [meta] Additional metadata to include with the report. + */ + mark(n = 1, meta) { + if (!this.metric.shouldReport) return; + + if (typeof n === 'object') { + meta = n; + n = 1; + } + + const now = performance.now(); + this.#window.push({ value: n, time: now }); + + let { value } = this; + while (this.#window.length && (now - this.#window[0].time) > this.interval) { + const cached = this.#window.shift(); + value -= cached.value; + } + + value += n; + this.reset(value, meta); + } +} + +/** + * An increasing or decreasing value. + */ +class Counter extends Gauge { + /** + * Increment the counter. Negative values invert to positive. + * @param {number} [n] The amount to increment the counter by. Defaults to 1. + * @param {object} [meta] Additional metadata to include with the report. + */ + increment(n = 1, meta) { + if (!this.metric.shouldReport) return; + + if (typeof n === 'object') { + meta = n; + n = 1; + } + + this.applyDelta(n, meta); + } + + /** + * Decrement the counter. Negative values invert to positive. + * @param {number} [n] The amount to decrement the counter by. Defaults to 1. + * @param {object} [meta] Additional metadata to include with the report. + */ + decrement(n = 1, meta) { + if (!this.metric.shouldReport) return; + + if (typeof n === 'object') { + meta = n; + n = 1; + } + + this.applyDelta(-n, meta); + } +} + +/** + * A floating point number which represents a length of time in milliseconds. + */ +class Timer extends Gauge { + #meta; + + /** + * The start time of the timer. + * @property {number} start + */ + + /** + * End time of timer. If undefined, timer is still running. + * @property {number|undefined} end + */ + + /** + * Duration of timer in milliseconds. If undefined, timer is still running. + * @property {number|undefined} duration + */ + + /** + * Construct a new timer. + * @param {Metric} metric The metric to report to. + * @param {object} [meta] Additional metadata to include with the report. + */ + constructor(metric, meta) { + super(metric); + if (meta !== undefined && typeof meta !== 'object') { + throw new ERR_INVALID_ARG_TYPE('meta', ['object', 'undefined'], meta); + } + this.#meta = meta; + + this.start = performance.now(); + this.end = undefined; + this.duration = undefined; + } + + /** + * Additional metadata to include with the report. + * @property {object} meta + */ + get meta() { + return mixMeta(this.metric.meta, this.#meta); + } + + /** + * Stop the timer and report the duration. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {number} The duration in milliseconds. + */ + stop(meta) { + if (this.end !== undefined) return; + if (!this.metric.shouldReport) return; + this.end = performance.now(); + this.duration = this.end - this.start; + this.reset(this.duration, mixMeta(this.#meta, meta)); + return this.duration; + } + + /** + * Support `using` syntax to automatically stop the timer when done. + */ + [SymbolDispose]() { + this.stop(); + } +} + +/** + * A count of the number of unique values that have been seen. + */ +class UniqueSet extends Gauge { + // Use a weak set to track unique values without retaining references. + #seen = new SafeWeakSet(); + + /** + * The number of unique values seen. + * @property {number} count + */ + count = 0; + + /** + * Add value to set. If value was not already present, report it. + * @param {any} value The value to track in the set. + * @param {object} meta Additional metadata to include with the report. + */ + add(value, meta) { + // If already seen, do nothing. + if (this.#seen.has(value)) { + return; + } + + this.count += 1; + this.#seen.add(value); + this.reset(this.count, meta); + } +} + +/** + * A gauge which periodically updates its value by calling a function and + * setting the value to the result. + */ +class PeriodicGauge extends Gauge { + #timer; + #interval; + #fn; + + /** + * Construct a new periodic gauge. + * @param {Metric} metric The metric to report to. + * @param {number} interval The interval in milliseconds to update the gauge. + * @param {Function} fn The function to call to update the gauge. + */ + constructor(metric, interval, fn) { + super(metric); + + if (typeof interval !== 'number' || interval <= 0) { + throw new ERR_INVALID_ARG_TYPE('interval', ['number'], interval); + } + if (typeof fn !== 'function') { + throw new ERR_INVALID_ARG_TYPE('fn', ['function'], fn); + } + + this.#timer = undefined; + this.#interval = interval; + this.#fn = fn; + + this.schedule(); + } + + /** + * Schedule the update timer. + */ + schedule() { + this.stop(); + + this.#timer = setInterval(() => { + this.reset(this.#fn()); + }, this.interval); + + // Don't keep the process alive just for this timer. + this.#timer.unref(); + } + + /** + * The interval in milliseconds at which to update the value. If changed, + * the timer will be rescheduled. + * @property {number} interval + */ + set interval(interval) { + if (typeof interval !== 'number' || interval <= 0) { + throw new ERR_INVALID_ARG_TYPE('interval', ['number'], interval); + } + this.#interval = interval; + this.schedule(); + } + get interval() { + return this.#interval; + } + + /** + * Stop the periodic gauge. + */ + stop() { + if (this.#timer !== undefined) { + clearInterval(this.#timer); + this.#timer = undefined; + } + } + + /** + * Reference the timer to prevent to loop from exiting. + */ + ref() { + this.#timer?.ref(); + } + + /** + * Unreference the timer to allow the loop to exit. + */ + unref() { + this.#timer?.unref(); + } + + /** + * Support `using` syntax to automatically stop the periodic gauge when done. + */ + [SymbolDispose]() { + this.stop(); + } +} + +/** + * A factory for creating Timers for the given metric. + */ +class TimerFactory { + /** + * The metric to report to. + * @property {Metric} metric + */ + + /** + * Construct a new Timer factory. + * @param {Metric} metric The metric to report to. + * @param {object} [meta] Additional metadata to include with the report. + */ + constructor(metric) { + if (!(metric instanceof Metric)) { + throw new ERR_INVALID_ARG_TYPE('metric', ['Metric'], metric); + } + this.metric = metric; + ObjectFreeze(this); + } + + /** + * Create a new timer with the given metadata. + * @param {object} [meta] Additional metadata to include with this timer. + * @returns {Timer} A new Timer instance with the combined metadata. + */ + create(meta) { + return new Timer(this.metric, meta); + } +} + +/** + * Create a timer metric. + * @param {string} name The name of the timer. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {object} An object with a create method to create new timers. + */ +function timer(name, meta) { + const metric = new Metric('timer', name, meta); + return new TimerFactory(metric); +} + +// Map of metric types to their constructors. +const metricTypes = { + counter: Counter, + gauge: Gauge, + meter: Meter, + periodicGauge: PeriodicGauge, + timer: Timer, + uniqueSet: UniqueSet, +}; + +const metricTypeNames = ObjectKeys(metricTypes); +const wrongTypeErr = `must be one of: ${metricTypeNames.join(', ')}`; + +/** + * Create a function to directly create a metric of a specific type. + * @param {string} type The type of metric to create. + * @returns {Function} A function which creates a metric of the specified type. + * @private + */ +function direct(type) { + if (!metricTypeNames.includes(type)) { + throw new ERR_INVALID_ARG_VALUE('type', type, wrongTypeErr); + } + const Type = metricTypes[type]; + + return function makeMetricType(name, ...args) { + let meta; + if (typeof args[args.length - 1] === 'object') { + meta = args.pop(); + } + + const metric = new Metric(type, name, meta); + return new Type(metric, ...args); + }; +} + +/** + * Create a stream which converts metrics to a string using a converter. + * @param {Function} converter The function to convert metrics to strings. + * @returns {Transform} A readable stream of converted metrics. + * @private + */ +function metricStreamFactory(converter) { + return function makeMetricStream() { + const stream = new Transform({ + // Receives Metric objects + writableObjectMode: true, + + // Emits statsd-formatted strings + transform(metric, encoding, callback) { + if (!this.closed) { + callback(null, `${converter(metric)}\n`); + } + }, + }); + + // Track all channel subscriptions to unsubscribe when the stream ends. + const subscribers = new SafeMap(); + + // Subscribe and track subscriptions. + function sub(name, listener) { + subscribe(name, listener); + subscribers.set(name, listener); + } + + // Subscribe to new metric channel to discover and subscribe to new metrics. + newMetricChannel.subscribe(({ type, name }) => { + const key = `metrics:${type}:${name}`; + sub(key, (result) => stream.write(result)); + }); + + // If the stream is ended, unsubscribe all listeners. + stream.on('finish', () => { + for (const { 0: name, 1: listener } of subscribers.entries()) { + unsubscribe(name, listener); + } + subscribers.clear(); + }); + + return stream; + }; +} + +/** + * Create a stream converting all metrics to a statsd-compatible format. + * @returns {Transform} A readable stream of statsd-formatted metrics. + */ +const statsdStream = metricStreamFactory((r) => r.toStatsd()); + +/** + * Create a stream converting all metrics to a Dogstatsd-compatible format. + * @returns {Transform} A readable stream of Dogstatsd-formatted metrics. + */ +const dogstatsdStream = metricStreamFactory((r) => r.toDogStatsd()); + +/** + * Create a stream converting all metrics to a Prometheus-compatible format. + * @returns {Transform} A readable stream of Prometheus-formatted metrics. + */ +const prometheusStream = metricStreamFactory((r) => r.toPrometheus()); + +/** + * Create a stream converting all metrics to a graphite-compatible format. + * @returns {Transform} A readable stream of graphite-formatted metrics. + */ +const graphiteStream = metricStreamFactory((r) => r.toGraphite()); + +/** + * Create a counter metric. + * @param {string} name The name of the counter. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {Counter} The counter metric. + */ +const counter = direct('counter'); + +/** + * Create a gauge metric. + * @param {string} name The name of the gauge. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {Gauge} The gauge metric. + */ +const gauge = direct('gauge'); + +/** + * Create a meter metric. + * @param {string} name The name of the meter. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {Gauge} The meter metric. + */ +const meter = direct('meter'); + +/** + * Create a raw metric. + * @param {string} type The type of metric to create (e.g., 'gauge', 'counter'). + * @param {string} name The name of the metric. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {Metric} The raw metric. + */ +function metric(type, name, meta) { + return new Metric(type, name, meta); +} + +/** + * Create a periodic gauge metric. + * @param {string} name The name of the periodic gauge. + * @param {number} interval The interval in milliseconds to update the gauge. + * @param {Function} fn The function to call to update the gauge. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {PeriodicGauge} The periodic gauge metric. + */ +const periodicGauge = direct('periodicGauge'); + +/** + * Create a unique set metric. + * @param {string} name The name of the set. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {Set} The set metric. + */ +const uniqueSet = direct('uniqueSet'); + +module.exports = { + MetricReport, + Metric, + Gauge, + Counter, + Timer, + UniqueSet, + PeriodicGauge, + Meter, + TimerFactory, + + metricStreamFactory, + statsdStream, + dogstatsdStream, + graphiteStream, + prometheusStream, + + counter, + gauge, + meter, + metric, + periodicGauge, + uniqueSet, + timer, +}; diff --git a/test/parallel/test-metrics-counter.js b/test/parallel/test-metrics-counter.js new file mode 100644 index 00000000000000..3cdbb3ad1b28f1 --- /dev/null +++ b/test/parallel/test-metrics-counter.js @@ -0,0 +1,41 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { counter, Counter, Metric, MetricReport } = require('node:metrics'); + +const testCounter = counter('test', { base: 'test' }); +assert.ok(testCounter instanceof Counter); +assert.strictEqual(testCounter.value, 0); +assert.ok(testCounter.metric instanceof Metric); + +const { metric } = testCounter; +assert.strictEqual(metric.type, 'counter'); +assert.strictEqual(metric.name, 'test'); +assert.deepStrictEqual(metric.meta, { base: 'test' }); +assert.strictEqual(metric.channelName, 'metrics:counter:test'); + +const messages = [ + [1, { base: 'test' }], + [124, { base: 'test', meta: 'extra' }], + [123, { base: 'test' }], + [0, { base: 'test', meta: 'extra' }], +]; + +subscribe(metric.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'counter'); + assert.strictEqual(report.name, 'test'); + assert.ok(report.time > 0); + + const [value, meta] = messages.shift(); + assert.strictEqual(report.value, value); + assert.deepStrictEqual(report.meta, meta); +}, 4)); + +testCounter.increment(); +testCounter.increment(123, { meta: 'extra' }); +testCounter.decrement(); +testCounter.decrement(123, { meta: 'extra' }); diff --git a/test/parallel/test-metrics-gauge.js b/test/parallel/test-metrics-gauge.js new file mode 100644 index 00000000000000..d5b38072fa4396 --- /dev/null +++ b/test/parallel/test-metrics-gauge.js @@ -0,0 +1,39 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { gauge, Gauge, Metric, MetricReport } = require('node:metrics'); + +const testGauge = gauge('test', { base: 'test' }); +assert.ok(testGauge instanceof Gauge); +assert.strictEqual(testGauge.value, 0); + +const { metric } = testGauge; +assert.ok(metric instanceof Metric); +assert.strictEqual(metric.type, 'gauge'); +assert.strictEqual(metric.name, 'test'); +assert.deepStrictEqual(metric.meta, { base: 'test' }); +assert.strictEqual(metric.channelName, 'metrics:gauge:test'); + +const messages = [ + [123, { base: 'test', meta: 'first' }], + [357, { base: 'test', meta: 'second' }], + [0, { base: 'test' }], +]; + +subscribe(metric.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'gauge'); + assert.strictEqual(report.name, 'test'); + assert.ok(report.time > 0); + + const [value, meta] = messages.shift(); + assert.strictEqual(report.value, value); + assert.deepStrictEqual(report.meta, meta); +}, 3)); + +testGauge.reset(123, { meta: 'first' }); +testGauge.applyDelta(234, { meta: 'second' }); +testGauge.reset(); diff --git a/test/parallel/test-metrics-meter.js b/test/parallel/test-metrics-meter.js new file mode 100644 index 00000000000000..36148bd88db766 --- /dev/null +++ b/test/parallel/test-metrics-meter.js @@ -0,0 +1,43 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { meter, Meter, Metric, MetricReport } = require('node:metrics'); + +const testMeter = meter('test', 100, { base: 'test' }); +assert.ok(testMeter instanceof Meter); +assert.strictEqual(testMeter.value, 0); +assert.strictEqual(testMeter.interval, 100); +assert.ok(testMeter.metric instanceof Metric); + +const { metric } = testMeter; +assert.strictEqual(metric.type, 'meter'); +assert.strictEqual(metric.name, 'test'); +assert.deepStrictEqual(metric.meta, { base: 'test' }); +assert.strictEqual(metric.channelName, 'metrics:meter:test'); + +const messages = [ + [1, { base: 'test' }], + [124, { base: 'test', meta: 'extra' }], + [1, { base: 'test' }], +]; + +subscribe(metric.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'meter'); + assert.strictEqual(report.name, 'test'); + assert.ok(report.time > 0); + + const [value, meta] = messages.shift(); + assert.strictEqual(report.value, value); + assert.deepStrictEqual(report.meta, meta); +}, 3)); + +testMeter.mark(); +testMeter.mark(123, { meta: 'extra' }); + +setTimeout(() => { + testMeter.mark(); +}, 200); diff --git a/test/parallel/test-metrics-metric-report.js b/test/parallel/test-metrics-metric-report.js new file mode 100644 index 00000000000000..ae57540d872c2c --- /dev/null +++ b/test/parallel/test-metrics-metric-report.js @@ -0,0 +1,31 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { MetricReport } = require('node:metrics'); + +const report = new MetricReport('counter', 'test-counter', 123, { + meta: 'test' +}); + +assert.ok(report instanceof MetricReport); +assert.strictEqual(report.type, 'counter'); +assert.strictEqual(report.name, 'test-counter'); +assert.strictEqual(report.value, 123); +assert.deepStrictEqual(report.meta, { meta: 'test' }); +assert.ok(report.time > 0); + +assert.strictEqual(report.toStatsd(), 'test-counter:123|c'); +assert.strictEqual( + report.toPrometheus(), + `test-counter{meta="test"} 123 ${report.time}` +); +assert.strictEqual( + report.toDogStatsd(), + 'test-counter:123|c|meta:test' +); +assert.strictEqual( + report.toGraphite(), + `test-counter 123 ${Math.floor(report.time / 1000)}` +); diff --git a/test/parallel/test-metrics-metric.js b/test/parallel/test-metrics-metric.js new file mode 100644 index 00000000000000..d02ae5b2250899 --- /dev/null +++ b/test/parallel/test-metrics-metric.js @@ -0,0 +1,27 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { Metric, MetricReport } = require('node:metrics'); + +const metric = new Metric('counter', 'test-counter', { base: 'test' }); + +assert.ok(metric instanceof Metric); +assert.strictEqual(metric.type, 'counter'); +assert.strictEqual(metric.name, 'test-counter'); +assert.deepStrictEqual(metric.meta, { base: 'test' }); +assert.strictEqual(metric.channelName, 'metrics:counter:test-counter'); + +subscribe(metric.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'counter'); + assert.strictEqual(report.name, 'test-counter'); + assert.ok(report.time > 0); + + assert.strictEqual(report.value, 123); + assert.deepStrictEqual(report.meta, { base: 'test', meta: 'test' }); +})); + +metric.report(123, { meta: 'test' }); diff --git a/test/parallel/test-metrics-periodic-gauge.js b/test/parallel/test-metrics-periodic-gauge.js new file mode 100644 index 00000000000000..8ebc59e0e91fd0 --- /dev/null +++ b/test/parallel/test-metrics-periodic-gauge.js @@ -0,0 +1,50 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { periodicGauge, PeriodicGauge, Metric, MetricReport } = require('node:metrics'); + +// NOTE: If this test is flaky, tune the interval to give more leeway to the timing +const interval = 50; +const values = [ 1, 5, 10, 4, 6 ]; + +const testPeriodicGauge = periodicGauge('test', 10, () => { + const value = values.shift(); + if (!values.length) { + testPeriodicGauge.stop(); + } else { + testPeriodicGauge.interval = interval; + testPeriodicGauge.ref(); + assert.strictEqual(testPeriodicGauge.interval, interval); + } + return value; +}, { base: 'test' }); + +// Keep the loop alive +testPeriodicGauge.ref(); + +assert.ok(testPeriodicGauge instanceof PeriodicGauge); +assert.strictEqual(testPeriodicGauge.value, 0); +assert.strictEqual(testPeriodicGauge.interval, 10); +assert.ok(testPeriodicGauge.metric instanceof Metric); + +const { metric } = testPeriodicGauge; +assert.strictEqual(metric.type, 'periodicGauge'); +assert.strictEqual(metric.name, 'test'); +assert.deepStrictEqual(metric.meta, { base: 'test' }); +assert.strictEqual(metric.channelName, 'metrics:periodicGauge:test'); + +const messages = values.map((v) => [v, { base: 'test' }]); + +subscribe(metric.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'periodicGauge'); + assert.strictEqual(report.name, 'test'); + assert.ok(report.time > 0); + + const [value, meta] = messages.shift(); + assert.strictEqual(report.value, value); + assert.deepStrictEqual(report.meta, meta); +}, values.length)); diff --git a/test/parallel/test-metrics-streams.js b/test/parallel/test-metrics-streams.js new file mode 100644 index 00000000000000..b59a8e3ce9b01c --- /dev/null +++ b/test/parallel/test-metrics-streams.js @@ -0,0 +1,118 @@ +'use strict'; + +const common = require('../common'); + +const { setTimeout: wait } = require('node:timers/promises'); +const assert = require('assert'); +const { + counter, + gauge, + meter, + timer, + uniqueSet, + periodicGauge, + + statsdStream, + dogstatsdStream, + graphiteStream, + prometheusStream, +} = require('node:metrics'); + +const statsd = statsdStream(); +const dogstatsd = dogstatsdStream(); +const graphite = graphiteStream(); +const prometheus = prometheusStream(); + +async function assertStream(stream, expected) { + // Mark stream as ended so the toArray() can resolve. + stream.end(); + const chunks = await stream.toArray(); + const actual = Buffer.concat(chunks).toString().split('\n').filter((v) => v); + assert.strictEqual(actual.length, expected.length, `Stream should yield ${expected.length} lines`); + for (let i = 0; i < actual.length; i++) { + if (typeof expected[i] === 'string') { + assert.strictEqual(actual[i], expected[i], `Stream line ${i + 1} should match expected output`); + } else { + assert.ok(expected[i].test(actual[i]), `Stream line ${i + 1} should match expected output (${actual[i]})`); + } + } +} + +async function main() { + const c = counter('my-counter', { metaFor: 'my-counter' }); + c.increment(1, { more: 'meta' }); + + const g = gauge('my-gauge', { metaFor: 'my-gauge' }); + g.reset(123, { more: 'meta' }); + + const m = meter('my-meter', 100, { metaFor: 'my-meter' }); + m.mark(1, { more: 'meta' }); + + const t = timer('my-timer', { metaFor: 'my-timer' }); + const t1 = t.create({ more: 't1' }); + const t2 = t.create({ more: 't2' }); + + await wait(50); + t1.stop(); + + await wait(100); + t2.stop(); + + const s = uniqueSet('my-set', { metaFor: 'my-set' }); + s.reset(123, { more: 'meta' }); + + await new Promise((resolve) => { + const pg = periodicGauge('my-periodic-gauge', 50, () => { + setImmediate(resolve); + clearInterval(timer); + pg.stop(); + return 100; + }, { + metaFor: 'my-periodic-gauge' + }); + + // Keep the loop alive + pg.ref(); + }); + + await Promise.all([ + assertStream(statsd, [ + 'my-counter:1|c', + 'my-gauge:123|g', + 'my-meter:1|m', + /^my-timer:\d+(\.\d+)?\|ms$/, + /^my-timer:\d+(\.\d+)?\|ms$/, + 'my-set:123|s', + 'my-periodic-gauge:100|g', + ]), + assertStream(dogstatsd, [ + 'my-counter:1|c|metaFor:my-counter,more:meta', + 'my-gauge:123|g|metaFor:my-gauge,more:meta', + 'my-meter:1|m|metaFor:my-meter,more:meta', + /^my-timer:\d+(\.\d+)?\|ms\|metaFor:my-timer,more:t1$/, + /^my-timer:\d+(\.\d+)?\|ms\|metaFor:my-timer,more:t2$/, + 'my-set:123|s|metaFor:my-set,more:meta', + 'my-periodic-gauge:100|g|metaFor:my-periodic-gauge', + ]), + assertStream(graphite, [ + 'my-counter 1 0', + 'my-gauge 123 0', + 'my-meter 1 0', + /^my-timer \d+(\.\d+)? 0$/, + /^my-timer \d+(\.\d+)? 0$/, + 'my-set 123 0', + 'my-periodic-gauge 100 0', + ]), + assertStream(prometheus, [ + /^my-counter{metaFor="my-counter",more="meta"} 1 \d+(\.\d+)?$/, + /^my-gauge{metaFor="my-gauge",more="meta"} 123 \d+(\.\d+)?$/, + /^my-meter{metaFor="my-meter",more="meta"} 1 \d+(\.\d+)?$/, + /^my-timer{metaFor="my-timer",more="t1"} \d+(\.\d+)? \d+(\.\d+)?$/, + /^my-timer{metaFor="my-timer",more="t2"} \d+(\.\d+)? \d+(\.\d+)?$/, + /^my-set{metaFor="my-set",more="meta"} 123 \d+(\.\d+)?$/, + /^my-periodic-gauge{metaFor="my-periodic-gauge"} 100 \d+(\.\d+)?$/, + ]), + ]); +} + +main().then(common.mustCall()); diff --git a/test/parallel/test-metrics-timer.js b/test/parallel/test-metrics-timer.js new file mode 100644 index 00000000000000..3efb20691d6476 --- /dev/null +++ b/test/parallel/test-metrics-timer.js @@ -0,0 +1,57 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { timer, Timer, TimerFactory, Metric, MetricReport } = require('node:metrics'); + +const testTimer = timer('test', { base: 'test' }); +assert.ok(testTimer instanceof TimerFactory); +assert.ok(testTimer.metric instanceof Metric); + +const { metric } = testTimer; +assert.strictEqual(metric.type, 'timer'); +assert.strictEqual(metric.name, 'test'); +assert.deepStrictEqual(metric.meta, { base: 'test' }); +assert.strictEqual(metric.channelName, 'metrics:timer:test'); + +const a = testTimer.create({ timer: 'a' }); +const b = testTimer.create({ timer: 'b' }); + +assert.ok(a instanceof Timer); +assert.deepStrictEqual(a.meta, { base: 'test', timer: 'a' }); +assert.strictEqual(a.value, 0); +assert.ok(a.metric instanceof Metric); + +const messages = [ + [50, { base: 'test', timer: 'a', meta: 'extra' }], + [100, { base: 'test', timer: 'b' }], +]; + +subscribe(metric.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'timer'); + assert.strictEqual(report.name, 'test'); + assert.ok(report.time > 0); + + const [value, meta] = messages.shift(); + assert.ok(near(report.value, value)); + assert.deepStrictEqual(report.meta, meta); +}, 2)); + +// NOTE: If this test is flaky, tune the threshold to give more leeway to the timing +function near(actual, expected, threshold = 10) { + return Math.abs(actual - expected) <= threshold; +} + +setTimeout(common.mustCall(() => { + a.stop({ meta: 'extra' }); + assert.ok(a.start > 0); + assert.ok(a.end > 0); + assert.ok(a.duration > 0); +}), 50); + +setTimeout(common.mustCall(() => { + b.stop(); +}), 100); diff --git a/test/parallel/test-metrics-unique-set.js b/test/parallel/test-metrics-unique-set.js new file mode 100644 index 00000000000000..c4accbeb6c79fd --- /dev/null +++ b/test/parallel/test-metrics-unique-set.js @@ -0,0 +1,48 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { uniqueSet, UniqueSet, Metric, MetricReport } = require('node:metrics'); + +const testUniqueSet = uniqueSet('test', { base: 'test' }); +assert.ok(testUniqueSet instanceof UniqueSet); +assert.strictEqual(testUniqueSet.value, 0); +assert.ok(testUniqueSet.metric instanceof Metric); + +const { metric } = testUniqueSet; +assert.strictEqual(metric.type, 'uniqueSet'); +assert.strictEqual(metric.name, 'test'); +assert.deepStrictEqual(metric.meta, { base: 'test' }); +assert.strictEqual(metric.channelName, 'metrics:uniqueSet:test'); + +const messages = [ + [1, { base: 'test', meta: 'foo' }], + [2, { base: 'test', meta: 'baz' }], +]; + +subscribe(metric.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'uniqueSet'); + assert.strictEqual(report.name, 'test'); + assert.ok(report.time > 0); + + const [value, meta] = messages.shift(); + assert.strictEqual(report.value, value); + assert.deepStrictEqual(report.meta, meta); +}, 2)); + +const foo = { foo: 'bar' }; +const baz = { baz: 'buz' }; + +assert.strictEqual(testUniqueSet.count, 0); + +testUniqueSet.add(foo, { meta: 'foo' }); +assert.strictEqual(testUniqueSet.count, 1); + +testUniqueSet.add(foo, { meta: 'should not trigger or report!' }); +assert.strictEqual(testUniqueSet.count, 1); + +testUniqueSet.add(baz, { meta: 'baz' }); +assert.strictEqual(testUniqueSet.count, 2); From 7d8d64ac48d5a65cfa4bf1fa2f9ded2ae109fc77 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 19:17:51 +0800 Subject: [PATCH 02/18] lib: remove metric streams and unique sets --- doc/api/metrics.md | 172 +---------------------- lib/metrics.js | 130 ----------------- test/parallel/test-metrics-streams.js | 118 ---------------- test/parallel/test-metrics-unique-set.js | 48 ------- 4 files changed, 4 insertions(+), 464 deletions(-) delete mode 100644 test/parallel/test-metrics-streams.js delete mode 100644 test/parallel/test-metrics-unique-set.js diff --git a/doc/api/metrics.md b/doc/api/metrics.md index 5c23c361e130c7..f482bc9a5e8ce4 100644 --- a/doc/api/metrics.md +++ b/doc/api/metrics.md @@ -30,8 +30,7 @@ flexible consumption patterns. ### Example ```mjs -import { counter, timer, statsdStream } from 'node:metrics'; -import { createWriteStream } from 'node:fs'; +import { counter, timer } from 'node:metrics'; // Create a counter metric const apiCalls = counter('api.calls', { service: 'web' }); @@ -39,10 +38,6 @@ const apiCalls = counter('api.calls', { service: 'web' }); // Create a timer factory const requestTimer = timer('api.request.duration', { service: 'web' }); -// Export metrics to StatsD format -const statsd = statsdStream(); -statsd.pipe(createWriteStream('metrics.log')); - // Use metrics in your application function handleRequest(req, res) { const timer = requestTimer.create({ endpoint: req.url }); @@ -56,8 +51,7 @@ function handleRequest(req, res) { ``` ```cjs -const { counter, timer, statsdStream } = require('node:metrics'); -const { createWriteStream } = require('node:fs'); +const { counter, timer } = require('node:metrics'); // Create a counter metric const apiCalls = counter('api.calls', { service: 'web' }); @@ -65,10 +59,6 @@ const apiCalls = counter('api.calls', { service: 'web' }); // Create a timer factory const requestTimer = timer('api.request.duration', { service: 'web' }); -// Export metrics to StatsD format -const statsd = statsdStream(); -statsd.pipe(createWriteStream('metrics.log')); - // Use metrics in your application function handleRequest(req, res) { const timer = requestTimer.create({ endpoint: req.url }); @@ -172,28 +162,6 @@ const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); const duration = t.stop(); // Returns duration in milliseconds ``` -### `metrics.uniqueSet(name[, meta])` - - - -* `name` {string} The name of the unique set metric. -* `meta` {Object} Optional metadata to attach to all reports. -* Returns: {metrics.UniqueSet} - -Creates a unique set metric which counts distinct objects. Uniqueness is -determined through a `WeakSet`, so it follows the same identity rules. - -```mjs -import { uniqueSet } from 'node:metrics'; - -const uniqueUsers = uniqueSet('users.unique'); - -uniqueUsers.add(user); // Only counted once per unique value -uniqueUsers.add(anotherUser); -``` - ### `metrics.periodicGauge(name, interval, fn[, meta])` - -A metric that counts unique values. - -#### `uniqueSet.metric` - - - -* {metrics.Metric} - -The underlying metric instance used for reporting. - -#### `uniqueSet.add(value[, meta])` - - - -* `value` {any} The value to add to the set. -* `meta` {Object} Additional metadata for this report. - -Adds a value to the set. Only reports if the value hasn't been seen before. - -```mjs -import { uniqueSet } from 'node:metrics'; - -const uniqueUsers = uniqueSet('users.unique'); - -uniqueUsers.add(user); // Only counted once per unique value -uniqueUsers.add(user); // Ignored because user is already in the set -uniqueUsers.add(anotherUser, { source: 'login' }); // Count another user with metadata -``` - -#### `uniqueSet.count` - - - -* {number} - -The number of unique values seen. - ### Class: `PeriodicGauge` * Extends: {metrics.Gauge} @@ -903,87 +820,6 @@ import { cpuUsage } from 'node:process'; } ``` -## Metric Streams - -### `metrics.statsdStream()` - - - -* Returns: {stream.Transform} - -Creates a transform stream that converts all metrics to StatsD format. - -```mjs -import { statsdStream } from 'node:metrics'; -import { stdout } from 'node:process'; - -const stream = statsdStream(); -stream.pipe(stdout); -``` - -Output format: `metric.name:value|type` - -### `metrics.dogstatsdStream()` - - - -* Returns: {stream.Transform} - -Creates a transform stream that converts all metrics to DogStatsD format with tags. - -```mjs -import { dogstatsdStream } from 'node:metrics'; -import { stdout } from 'node:process'; - -const stream = dogstatsdStream(); -stream.pipe(stdout); -``` - -Output format: `metric.name:value|type|key:value,key2:value2` - -### `metrics.graphiteStream()` - - - -* Returns: {stream.Transform} - -Creates a transform stream that converts all metrics to Graphite plaintext protocol. - -```mjs -import { dogstatsdStream } from 'node:metrics'; -import { stdout } from 'node:process'; - -const stream = graphiteStream(); -stream.pipe(stdout); -``` - -Output format: `metric.name value timestamp` - -### `metrics.prometheusStream()` - - - -* Returns: {stream.Transform} - -Creates a transform stream that converts all metrics to Prometheus exposition format. - -```mjs -import { prometheusStream } from 'node:metrics'; -import { stdout } from 'node:process'; - -const stream = prometheusStream(); -stream.pipe(stdout); -``` - -Output format: `metric_name{label="value"} value timestamp` ## Integration with Diagnostics Channel diff --git a/lib/metrics.js b/lib/metrics.js index 96369d8af80ec8..66b7b68d4f5466 100644 --- a/lib/metrics.js +++ b/lib/metrics.js @@ -7,17 +7,7 @@ * - Gauge: A snapshot of a single value in time. * - Meter: A number of events per interval. * - Timer: A duration in milliseconds. - * - UniqueSet: A unique count of number of unique values seen. * - PeriodicGauge: A gauge which periodically updates its value by calling a function. - * - * # Exporting Metrics - * - * Several text format exporters are provided as streams: - * - Statsd - * - Dogstatsd - * - Graphite - * - Prometheus - * * # TODO(qard): * - Histograms * - Distributions/Summaries @@ -34,7 +24,6 @@ const { ObjectFreeze, ObjectKeys, SafeMap, - SafeWeakSet, SymbolDispose, } = primordials; @@ -53,7 +42,6 @@ const { unsubscribe, } = require('diagnostics_channel'); const { performance } = require('perf_hooks'); -const { Transform } = require('stream'); const newMetricChannel = channel('metrics:new'); @@ -133,7 +121,6 @@ class MetricReport { meter: 'm', periodicGauge: 'g', timer: 'ms', - uniqueSet: 's', }[type]; } @@ -494,36 +481,6 @@ class Timer extends Gauge { } } -/** - * A count of the number of unique values that have been seen. - */ -class UniqueSet extends Gauge { - // Use a weak set to track unique values without retaining references. - #seen = new SafeWeakSet(); - - /** - * The number of unique values seen. - * @property {number} count - */ - count = 0; - - /** - * Add value to set. If value was not already present, report it. - * @param {any} value The value to track in the set. - * @param {object} meta Additional metadata to include with the report. - */ - add(value, meta) { - // If already seen, do nothing. - if (this.#seen.has(value)) { - return; - } - - this.count += 1; - this.#seen.add(value); - this.reset(this.count, meta); - } -} - /** * A gauge which periodically updates its value by calling a function and * setting the value to the result. @@ -668,7 +625,6 @@ const metricTypes = { meter: Meter, periodicGauge: PeriodicGauge, timer: Timer, - uniqueSet: UniqueSet, }; const metricTypeNames = ObjectKeys(metricTypes); @@ -697,77 +653,6 @@ function direct(type) { }; } -/** - * Create a stream which converts metrics to a string using a converter. - * @param {Function} converter The function to convert metrics to strings. - * @returns {Transform} A readable stream of converted metrics. - * @private - */ -function metricStreamFactory(converter) { - return function makeMetricStream() { - const stream = new Transform({ - // Receives Metric objects - writableObjectMode: true, - - // Emits statsd-formatted strings - transform(metric, encoding, callback) { - if (!this.closed) { - callback(null, `${converter(metric)}\n`); - } - }, - }); - - // Track all channel subscriptions to unsubscribe when the stream ends. - const subscribers = new SafeMap(); - - // Subscribe and track subscriptions. - function sub(name, listener) { - subscribe(name, listener); - subscribers.set(name, listener); - } - - // Subscribe to new metric channel to discover and subscribe to new metrics. - newMetricChannel.subscribe(({ type, name }) => { - const key = `metrics:${type}:${name}`; - sub(key, (result) => stream.write(result)); - }); - - // If the stream is ended, unsubscribe all listeners. - stream.on('finish', () => { - for (const { 0: name, 1: listener } of subscribers.entries()) { - unsubscribe(name, listener); - } - subscribers.clear(); - }); - - return stream; - }; -} - -/** - * Create a stream converting all metrics to a statsd-compatible format. - * @returns {Transform} A readable stream of statsd-formatted metrics. - */ -const statsdStream = metricStreamFactory((r) => r.toStatsd()); - -/** - * Create a stream converting all metrics to a Dogstatsd-compatible format. - * @returns {Transform} A readable stream of Dogstatsd-formatted metrics. - */ -const dogstatsdStream = metricStreamFactory((r) => r.toDogStatsd()); - -/** - * Create a stream converting all metrics to a Prometheus-compatible format. - * @returns {Transform} A readable stream of Prometheus-formatted metrics. - */ -const prometheusStream = metricStreamFactory((r) => r.toPrometheus()); - -/** - * Create a stream converting all metrics to a graphite-compatible format. - * @returns {Transform} A readable stream of graphite-formatted metrics. - */ -const graphiteStream = metricStreamFactory((r) => r.toGraphite()); - /** * Create a counter metric. * @param {string} name The name of the counter. @@ -813,36 +698,21 @@ function metric(type, name, meta) { */ const periodicGauge = direct('periodicGauge'); -/** - * Create a unique set metric. - * @param {string} name The name of the set. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {Set} The set metric. - */ -const uniqueSet = direct('uniqueSet'); - module.exports = { MetricReport, Metric, Gauge, Counter, Timer, - UniqueSet, PeriodicGauge, Meter, TimerFactory, - metricStreamFactory, - statsdStream, - dogstatsdStream, - graphiteStream, - prometheusStream, counter, gauge, meter, metric, periodicGauge, - uniqueSet, timer, }; diff --git a/test/parallel/test-metrics-streams.js b/test/parallel/test-metrics-streams.js deleted file mode 100644 index b59a8e3ce9b01c..00000000000000 --- a/test/parallel/test-metrics-streams.js +++ /dev/null @@ -1,118 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const { setTimeout: wait } = require('node:timers/promises'); -const assert = require('assert'); -const { - counter, - gauge, - meter, - timer, - uniqueSet, - periodicGauge, - - statsdStream, - dogstatsdStream, - graphiteStream, - prometheusStream, -} = require('node:metrics'); - -const statsd = statsdStream(); -const dogstatsd = dogstatsdStream(); -const graphite = graphiteStream(); -const prometheus = prometheusStream(); - -async function assertStream(stream, expected) { - // Mark stream as ended so the toArray() can resolve. - stream.end(); - const chunks = await stream.toArray(); - const actual = Buffer.concat(chunks).toString().split('\n').filter((v) => v); - assert.strictEqual(actual.length, expected.length, `Stream should yield ${expected.length} lines`); - for (let i = 0; i < actual.length; i++) { - if (typeof expected[i] === 'string') { - assert.strictEqual(actual[i], expected[i], `Stream line ${i + 1} should match expected output`); - } else { - assert.ok(expected[i].test(actual[i]), `Stream line ${i + 1} should match expected output (${actual[i]})`); - } - } -} - -async function main() { - const c = counter('my-counter', { metaFor: 'my-counter' }); - c.increment(1, { more: 'meta' }); - - const g = gauge('my-gauge', { metaFor: 'my-gauge' }); - g.reset(123, { more: 'meta' }); - - const m = meter('my-meter', 100, { metaFor: 'my-meter' }); - m.mark(1, { more: 'meta' }); - - const t = timer('my-timer', { metaFor: 'my-timer' }); - const t1 = t.create({ more: 't1' }); - const t2 = t.create({ more: 't2' }); - - await wait(50); - t1.stop(); - - await wait(100); - t2.stop(); - - const s = uniqueSet('my-set', { metaFor: 'my-set' }); - s.reset(123, { more: 'meta' }); - - await new Promise((resolve) => { - const pg = periodicGauge('my-periodic-gauge', 50, () => { - setImmediate(resolve); - clearInterval(timer); - pg.stop(); - return 100; - }, { - metaFor: 'my-periodic-gauge' - }); - - // Keep the loop alive - pg.ref(); - }); - - await Promise.all([ - assertStream(statsd, [ - 'my-counter:1|c', - 'my-gauge:123|g', - 'my-meter:1|m', - /^my-timer:\d+(\.\d+)?\|ms$/, - /^my-timer:\d+(\.\d+)?\|ms$/, - 'my-set:123|s', - 'my-periodic-gauge:100|g', - ]), - assertStream(dogstatsd, [ - 'my-counter:1|c|metaFor:my-counter,more:meta', - 'my-gauge:123|g|metaFor:my-gauge,more:meta', - 'my-meter:1|m|metaFor:my-meter,more:meta', - /^my-timer:\d+(\.\d+)?\|ms\|metaFor:my-timer,more:t1$/, - /^my-timer:\d+(\.\d+)?\|ms\|metaFor:my-timer,more:t2$/, - 'my-set:123|s|metaFor:my-set,more:meta', - 'my-periodic-gauge:100|g|metaFor:my-periodic-gauge', - ]), - assertStream(graphite, [ - 'my-counter 1 0', - 'my-gauge 123 0', - 'my-meter 1 0', - /^my-timer \d+(\.\d+)? 0$/, - /^my-timer \d+(\.\d+)? 0$/, - 'my-set 123 0', - 'my-periodic-gauge 100 0', - ]), - assertStream(prometheus, [ - /^my-counter{metaFor="my-counter",more="meta"} 1 \d+(\.\d+)?$/, - /^my-gauge{metaFor="my-gauge",more="meta"} 123 \d+(\.\d+)?$/, - /^my-meter{metaFor="my-meter",more="meta"} 1 \d+(\.\d+)?$/, - /^my-timer{metaFor="my-timer",more="t1"} \d+(\.\d+)? \d+(\.\d+)?$/, - /^my-timer{metaFor="my-timer",more="t2"} \d+(\.\d+)? \d+(\.\d+)?$/, - /^my-set{metaFor="my-set",more="meta"} 123 \d+(\.\d+)?$/, - /^my-periodic-gauge{metaFor="my-periodic-gauge"} 100 \d+(\.\d+)?$/, - ]), - ]); -} - -main().then(common.mustCall()); diff --git a/test/parallel/test-metrics-unique-set.js b/test/parallel/test-metrics-unique-set.js deleted file mode 100644 index c4accbeb6c79fd..00000000000000 --- a/test/parallel/test-metrics-unique-set.js +++ /dev/null @@ -1,48 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { uniqueSet, UniqueSet, Metric, MetricReport } = require('node:metrics'); - -const testUniqueSet = uniqueSet('test', { base: 'test' }); -assert.ok(testUniqueSet instanceof UniqueSet); -assert.strictEqual(testUniqueSet.value, 0); -assert.ok(testUniqueSet.metric instanceof Metric); - -const { metric } = testUniqueSet; -assert.strictEqual(metric.type, 'uniqueSet'); -assert.strictEqual(metric.name, 'test'); -assert.deepStrictEqual(metric.meta, { base: 'test' }); -assert.strictEqual(metric.channelName, 'metrics:uniqueSet:test'); - -const messages = [ - [1, { base: 'test', meta: 'foo' }], - [2, { base: 'test', meta: 'baz' }], -]; - -subscribe(metric.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'uniqueSet'); - assert.strictEqual(report.name, 'test'); - assert.ok(report.time > 0); - - const [value, meta] = messages.shift(); - assert.strictEqual(report.value, value); - assert.deepStrictEqual(report.meta, meta); -}, 2)); - -const foo = { foo: 'bar' }; -const baz = { baz: 'buz' }; - -assert.strictEqual(testUniqueSet.count, 0); - -testUniqueSet.add(foo, { meta: 'foo' }); -assert.strictEqual(testUniqueSet.count, 1); - -testUniqueSet.add(foo, { meta: 'should not trigger or report!' }); -assert.strictEqual(testUniqueSet.count, 1); - -testUniqueSet.add(baz, { meta: 'baz' }); -assert.strictEqual(testUniqueSet.count, 2); From b63358110cffeb5a6556fb395c0a80c903e1bfdd Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 19:38:59 +0800 Subject: [PATCH 03/18] lib: remove meter metric --- doc/api/metrics.md | 76 +++-------------------------- lib/metrics.js | 72 --------------------------- test/parallel/test-metrics-meter.js | 43 ---------------- 3 files changed, 6 insertions(+), 185 deletions(-) delete mode 100644 test/parallel/test-metrics-meter.js diff --git a/doc/api/metrics.md b/doc/api/metrics.md index f482bc9a5e8ce4..c30bc9924b265d 100644 --- a/doc/api/metrics.md +++ b/doc/api/metrics.md @@ -118,28 +118,6 @@ memory.reset(memoryUsage().heapUsed); memory.applyDelta(1024); // Add 1024 to current value ``` -### `metrics.meter(name, interval[, meta])` - - - -* `name` {string} The name of the meter metric. -* `interval` {number} The time window in milliseconds for rate calculation. -* `meta` {Object} Optional metadata to attach to all reports. -* Returns: {metrics.Meter} - -Creates a meter metric that measures the rate of events over time. - -```mjs -import { meter } from 'node:metrics'; - -const requestRate = meter('requests.rate', 60000); // 1 minute window - -requestRate.mark(); // Mark one event -requestRate.mark(10); // Mark 10 events -``` - ### `metrics.timer(name[, meta])` - -A metric that measures the rate of events over a sliding time window. - -#### `meter.metric` - - - -* {metrics.Metric} - -The underlying metric instance used for reporting. - -#### `meter.mark([n[, meta]])` - - - -* `n` {number} The number of events to mark. **Default:** `1` -* `meta` {Object} Additional metadata for this report. - -Records events and updates the rate calculation. - -```mjs -import { meter } from 'node:metrics'; - -const requestRate = meter('requests.rate', 60000); // 1 minute window - -requestRate.mark(); // Mark one event -requestRate.mark(10); // Mark 10 events -requestRate.mark(5, { endpoint: '/api/users' }); // Mark 5 with metadata -requestRate.mark({ endpoint: '/api/orders' }); // Mark 1 with metadata -``` - ### Class: `Timer` * Extends: {metrics.Gauge} diff --git a/lib/metrics.js b/lib/metrics.js index 66b7b68d4f5466..93d76f1d2fb5f1 100644 --- a/lib/metrics.js +++ b/lib/metrics.js @@ -5,7 +5,6 @@ * * - Counter: An increasing or decreasing value. * - Gauge: A snapshot of a single value in time. - * - Meter: A number of events per interval. * - Timer: A duration in milliseconds. * - PeriodicGauge: A gauge which periodically updates its value by calling a function. * # TODO(qard): @@ -118,7 +117,6 @@ class MetricReport { return { counter: 'c', gauge: 'g', - meter: 'm', periodicGauge: 'g', timer: 'ms', }[type]; @@ -317,65 +315,6 @@ class Gauge { } } -/** - * Number of events per interval. This will report at every mark() call, - * but will report an aggregate value if a sliding window of marks which - * occurred within the interval period. - */ -class Meter extends Gauge { - #window; - - /** - * The metric to report to. - * @property {Metric} metric - */ - - /** - * The interval in milliseconds to aggregate marks over. - * @property {number} interval - */ - - /** - * Construct a new meter. - * @param {Metric} metric The metric to report to. - * @param {number} interval The interval in milliseconds to aggregate marks over. - */ - constructor(metric, interval) { - super(metric); - if (typeof interval !== 'number' || interval <= 0) { - throw new ERR_INVALID_ARG_TYPE('interval', ['number'], interval); - } - this.interval = interval; - this.#window = []; - } - - /** - * Mark an event in the meter. - * @param {number} [n] The number of events to mark. Defaults to 1. - * @param {object} [meta] Additional metadata to include with the report. - */ - mark(n = 1, meta) { - if (!this.metric.shouldReport) return; - - if (typeof n === 'object') { - meta = n; - n = 1; - } - - const now = performance.now(); - this.#window.push({ value: n, time: now }); - - let { value } = this; - while (this.#window.length && (now - this.#window[0].time) > this.interval) { - const cached = this.#window.shift(); - value -= cached.value; - } - - value += n; - this.reset(value, meta); - } -} - /** * An increasing or decreasing value. */ @@ -622,7 +561,6 @@ function timer(name, meta) { const metricTypes = { counter: Counter, gauge: Gauge, - meter: Meter, periodicGauge: PeriodicGauge, timer: Timer, }; @@ -669,14 +607,6 @@ const counter = direct('counter'); */ const gauge = direct('gauge'); -/** - * Create a meter metric. - * @param {string} name The name of the meter. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {Gauge} The meter metric. - */ -const meter = direct('meter'); - /** * Create a raw metric. * @param {string} type The type of metric to create (e.g., 'gauge', 'counter'). @@ -705,13 +635,11 @@ module.exports = { Counter, Timer, PeriodicGauge, - Meter, TimerFactory, counter, gauge, - meter, metric, periodicGauge, timer, diff --git a/test/parallel/test-metrics-meter.js b/test/parallel/test-metrics-meter.js deleted file mode 100644 index 36148bd88db766..00000000000000 --- a/test/parallel/test-metrics-meter.js +++ /dev/null @@ -1,43 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { meter, Meter, Metric, MetricReport } = require('node:metrics'); - -const testMeter = meter('test', 100, { base: 'test' }); -assert.ok(testMeter instanceof Meter); -assert.strictEqual(testMeter.value, 0); -assert.strictEqual(testMeter.interval, 100); -assert.ok(testMeter.metric instanceof Metric); - -const { metric } = testMeter; -assert.strictEqual(metric.type, 'meter'); -assert.strictEqual(metric.name, 'test'); -assert.deepStrictEqual(metric.meta, { base: 'test' }); -assert.strictEqual(metric.channelName, 'metrics:meter:test'); - -const messages = [ - [1, { base: 'test' }], - [124, { base: 'test', meta: 'extra' }], - [1, { base: 'test' }], -]; - -subscribe(metric.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'meter'); - assert.strictEqual(report.name, 'test'); - assert.ok(report.time > 0); - - const [value, meta] = messages.shift(); - assert.strictEqual(report.value, value); - assert.deepStrictEqual(report.meta, meta); -}, 3)); - -testMeter.mark(); -testMeter.mark(123, { meta: 'extra' }); - -setTimeout(() => { - testMeter.mark(); -}, 200); From 7a06154820010f4b1a6f545986421f8f422176af Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 19:46:59 +0800 Subject: [PATCH 04/18] lib: replace periodic gauge with a pull gauge --- doc/api/metrics.md | 99 +++++------------ lib/metrics.js | 106 ++++--------------- test/parallel/test-metrics-periodic-gauge.js | 50 --------- 3 files changed, 45 insertions(+), 210 deletions(-) delete mode 100644 test/parallel/test-metrics-periodic-gauge.js diff --git a/doc/api/metrics.md b/doc/api/metrics.md index c30bc9924b265d..eaa13f256cca01 100644 --- a/doc/api/metrics.md +++ b/doc/api/metrics.md @@ -140,30 +140,29 @@ const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); const duration = t.stop(); // Returns duration in milliseconds ``` -### `metrics.periodicGauge(name, interval, fn[, meta])` +### `metrics.pullGauge(name, fn[, meta])` -* `name` {string} The name of the periodic gauge metric. -* `interval` {number} The interval in milliseconds between samples. +* `name` {string} The name of the pull gauge metric. * `fn` {Function} A function that returns the current value. * `meta` {Object} Optional metadata to attach to all reports. -* Returns: {metrics.PeriodicGauge} +* Returns: {metrics.PullGauge} -Creates a gauge that automatically samples a value at regular intervals. +Creates a gauge that samples a value on-demand by calling the provided function. ```mjs -import { periodicGauge } from 'node:metrics'; +import { pullGauge } from 'node:metrics'; import { cpuUsage } from 'node:process'; -const cpu = periodicGauge('cpu.usage', 5000, () => { +const cpu = pullGauge('cpu.usage', () => { return cpuUsage().user; }); -// Stop sampling when no longer needed -cpu.stop(); +// Sample the gauge when needed +cpu.sample(); ``` ## Classes @@ -184,7 +183,7 @@ added: REPLACEME * {string} -The type of the metric (e.g., 'counter', 'gauge', 'periodicGauge', +The type of the metric (e.g., 'counter', 'gauge', 'pullGauge', 'timer'). #### `metricReport.name` @@ -301,7 +300,7 @@ added: REPLACEME * {string} -The type of the metric (e.g., 'counter', 'gauge', 'periodicGauge', +The type of the metric (e.g., 'counter', 'gauge', 'pullGauge', 'timer'). #### `metric.name` @@ -659,7 +658,7 @@ const dbQueryTimer = timer('db.query.duration'); const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); ``` -### Class: `PeriodicGauge` +### Class: `PullGauge` * Extends: {metrics.Gauge} @@ -667,9 +666,9 @@ const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); added: REPLACEME --> -A gauge that automatically samples values at regular intervals. +A gauge that samples values on-demand when the `sample()` method is called. -#### `periodicGauge.metric` +#### `pullGauge.metric` -* {number} - -The sampling interval in milliseconds. Setting this property reschedules the timer. - -#### `periodicGauge.schedule()` - - - -Schedules the periodic sampling based on the configured interval. This is called -automatically when the gauge is created, but can be called again to reschedule -after it has been stopped. - -```mjs -import { periodicGauge } from 'node:metrics'; -import { cpuUsage } from 'node:process'; - -const cpu = periodicGauge('cpu.usage', 5000, () => { - return cpuUsage().user; -}); - -cpu.stop(); - -// Reschedule sampling -cpu.schedule(); -``` - -#### `periodicGauge.stop()` +* `meta` {Object} Additional metadata for this specific sample. +* Returns: {number} The sampled value. - - -Stops the periodic sampling. +Calls the configured function to get the current value and reports it. ```mjs -import { periodicGauge } from 'node:metrics'; +import { pullGauge } from 'node:metrics'; import { cpuUsage } from 'node:process'; -const cpu = periodicGauge('cpu.usage', 5000, () => { +const cpu = pullGauge('cpu.usage', () => { return cpuUsage().user; }); -// Stop sampling when no longer needed -cpu.stop(); -``` - -#### `periodicGauge[Symbol.dispose]()` - - - -Allows `using` syntax to automatically stop the periodic gauge when done. - -```mjs -import { periodicGauge } from 'node:metrics'; -import { cpuUsage } from 'node:process'; +// Sample the gauge when needed +const value = cpu.sample(); +console.log(`Current CPU usage: ${value}`); -{ - using cpu = periodicGauge('cpu.usage', 1000, () => { - return cpuUsage().user; - }); - - // Perform operations that require periodic sampling... - - // Sampling is automatically stopped here -} +// Sample with additional metadata +cpu.sample({ threshold: 'high' }); ``` diff --git a/lib/metrics.js b/lib/metrics.js index 93d76f1d2fb5f1..eeb112ccdf6eff 100644 --- a/lib/metrics.js +++ b/lib/metrics.js @@ -6,7 +6,7 @@ * - Counter: An increasing or decreasing value. * - Gauge: A snapshot of a single value in time. * - Timer: A duration in milliseconds. - * - PeriodicGauge: A gauge which periodically updates its value by calling a function. + * - PullGauge: A gauge which updates its value by calling a function when sampled. * # TODO(qard): * - Histograms * - Distributions/Summaries @@ -32,7 +32,6 @@ const { ERR_INVALID_ARG_VALUE, }, } = require('internal/errors'); -const { setInterval, clearInterval } = require('internal/timers'); const { channel, @@ -117,7 +116,7 @@ class MetricReport { return { counter: 'c', gauge: 'g', - periodicGauge: 'g', + pullGauge: 'g', timer: 'ms', }[type]; } @@ -421,96 +420,34 @@ class Timer extends Gauge { } /** - * A gauge which periodically updates its value by calling a function and - * setting the value to the result. + * A gauge which updates its value by calling a function when sampled. */ -class PeriodicGauge extends Gauge { - #timer; - #interval; +class PullGauge extends Gauge { #fn; /** - * Construct a new periodic gauge. + * Construct a new pull gauge. * @param {Metric} metric The metric to report to. - * @param {number} interval The interval in milliseconds to update the gauge. - * @param {Function} fn The function to call to update the gauge. + * @param {Function} fn The function to call to get the gauge value. */ - constructor(metric, interval, fn) { + constructor(metric, fn) { super(metric); - if (typeof interval !== 'number' || interval <= 0) { - throw new ERR_INVALID_ARG_TYPE('interval', ['number'], interval); - } if (typeof fn !== 'function') { throw new ERR_INVALID_ARG_TYPE('fn', ['function'], fn); } - this.#timer = undefined; - this.#interval = interval; this.#fn = fn; - - this.schedule(); - } - - /** - * Schedule the update timer. - */ - schedule() { - this.stop(); - - this.#timer = setInterval(() => { - this.reset(this.#fn()); - }, this.interval); - - // Don't keep the process alive just for this timer. - this.#timer.unref(); - } - - /** - * The interval in milliseconds at which to update the value. If changed, - * the timer will be rescheduled. - * @property {number} interval - */ - set interval(interval) { - if (typeof interval !== 'number' || interval <= 0) { - throw new ERR_INVALID_ARG_TYPE('interval', ['number'], interval); - } - this.#interval = interval; - this.schedule(); - } - get interval() { - return this.#interval; } /** - * Stop the periodic gauge. - */ - stop() { - if (this.#timer !== undefined) { - clearInterval(this.#timer); - this.#timer = undefined; - } - } - - /** - * Reference the timer to prevent to loop from exiting. - */ - ref() { - this.#timer?.ref(); - } - - /** - * Unreference the timer to allow the loop to exit. - */ - unref() { - this.#timer?.unref(); - } - - /** - * Support `using` syntax to automatically stop the periodic gauge when done. + * Sample the gauge by calling the function and reporting the value. + * @param {object} [meta] Additional metadata to include with the report. */ - [SymbolDispose]() { - this.stop(); + sample(meta) { + const value = this.#fn(); + this.reset(value, meta); + return value; } } @@ -561,7 +498,7 @@ function timer(name, meta) { const metricTypes = { counter: Counter, gauge: Gauge, - periodicGauge: PeriodicGauge, + pullGauge: PullGauge, timer: Timer, }; @@ -619,14 +556,13 @@ function metric(type, name, meta) { } /** - * Create a periodic gauge metric. - * @param {string} name The name of the periodic gauge. - * @param {number} interval The interval in milliseconds to update the gauge. - * @param {Function} fn The function to call to update the gauge. + * Create a pull gauge metric. + * @param {string} name The name of the pull gauge. + * @param {Function} fn The function to call to get the gauge value. * @param {object} [meta] Additional metadata to include with the report. - * @returns {PeriodicGauge} The periodic gauge metric. + * @returns {PullGauge} The pull gauge metric. */ -const periodicGauge = direct('periodicGauge'); +const pullGauge = direct('pullGauge'); module.exports = { MetricReport, @@ -634,13 +570,13 @@ module.exports = { Gauge, Counter, Timer, - PeriodicGauge, + PullGauge, TimerFactory, counter, gauge, metric, - periodicGauge, + pullGauge, timer, }; diff --git a/test/parallel/test-metrics-periodic-gauge.js b/test/parallel/test-metrics-periodic-gauge.js deleted file mode 100644 index 8ebc59e0e91fd0..00000000000000 --- a/test/parallel/test-metrics-periodic-gauge.js +++ /dev/null @@ -1,50 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { periodicGauge, PeriodicGauge, Metric, MetricReport } = require('node:metrics'); - -// NOTE: If this test is flaky, tune the interval to give more leeway to the timing -const interval = 50; -const values = [ 1, 5, 10, 4, 6 ]; - -const testPeriodicGauge = periodicGauge('test', 10, () => { - const value = values.shift(); - if (!values.length) { - testPeriodicGauge.stop(); - } else { - testPeriodicGauge.interval = interval; - testPeriodicGauge.ref(); - assert.strictEqual(testPeriodicGauge.interval, interval); - } - return value; -}, { base: 'test' }); - -// Keep the loop alive -testPeriodicGauge.ref(); - -assert.ok(testPeriodicGauge instanceof PeriodicGauge); -assert.strictEqual(testPeriodicGauge.value, 0); -assert.strictEqual(testPeriodicGauge.interval, 10); -assert.ok(testPeriodicGauge.metric instanceof Metric); - -const { metric } = testPeriodicGauge; -assert.strictEqual(metric.type, 'periodicGauge'); -assert.strictEqual(metric.name, 'test'); -assert.deepStrictEqual(metric.meta, { base: 'test' }); -assert.strictEqual(metric.channelName, 'metrics:periodicGauge:test'); - -const messages = values.map((v) => [v, { base: 'test' }]); - -subscribe(metric.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'periodicGauge'); - assert.strictEqual(report.name, 'test'); - assert.ok(report.time > 0); - - const [value, meta] = messages.shift(); - assert.strictEqual(report.value, value); - assert.deepStrictEqual(report.meta, meta); -}, values.length)); From 989475e4c341a8aace6337c423cf3195cc504d20 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 19:50:16 +0800 Subject: [PATCH 05/18] lib: do not freeze MetricReport and Metric --- lib/metrics.js | 97 ++++++++++++++++++++++++++++++++++---------------- 1 file changed, 66 insertions(+), 31 deletions(-) diff --git a/lib/metrics.js b/lib/metrics.js index eeb112ccdf6eff..da6945ff3fc18a 100644 --- a/lib/metrics.js +++ b/lib/metrics.js @@ -60,40 +60,65 @@ function mixMeta(a, b) { * Represents a single reported metric. */ class MetricReport { + #type; + #name; + #value; + #meta; + #time; + + /** + * Constructs a new metric report. + * @param {string} type The type of metric. + * @param {string} name The name of the metric. + * @param {number} value The value of the metric. + * @param {object} [meta] Additional metadata to include with the report. + */ + constructor(type, name, value, meta) { + this.#type = type; + this.#name = name; + this.#value = value; + this.#meta = meta; + this.#time = performance.now(); + } + /** * The type of metric. * @property {string} type */ + get type() { + return this.#type; + } /** * The name of the metric. * @property {string} name */ + get name() { + return this.#name; + } /** * The value of the metric. * @property {number} value */ + get value() { + return this.#value; + } /** * Additional metadata to include with the report. * @property {object} meta */ + get meta() { + return this.#meta; + } /** - * Constructs a new metric report. - * @param {string} type The type of metric. - * @param {string} name The name of the metric. - * @param {number} value The value of the metric. - * @param {object} [meta] Additional metadata to include with the report. + * The timestamp of the report. + * @property {number} time */ - constructor(type, name, value, meta) { - this.type = type; - this.name = name; - this.value = value; - this.meta = meta; - this.time = performance.now(); - ObjectFreeze(this); + get time() { + return this.#time; } /** @@ -179,21 +204,9 @@ class MetricReport { */ class Metric { #channel; - - /** - * The type of metric. - * @property {string} type - */ - - /** - * The name of the metric. - * @property {string} name - */ - - /** - * Additional metadata to include with the metric. - * @property {object} meta - */ + #type; + #name; + #meta; /** * Constructs a new metric. @@ -212,9 +225,9 @@ class Metric { throw new ERR_INVALID_ARG_TYPE('meta', ['object', 'undefined'], meta); } - this.type = type; - this.name = name; - this.meta = meta; + this.#type = type; + this.#name = name; + this.#meta = meta; // Before acquiring the channel, check if it already exists. const exists = hasChannel(this.channelName); @@ -225,8 +238,30 @@ class Metric { if (!exists && newMetricChannel.hasSubscribers) { newMetricChannel.publish(this); } + } - ObjectFreeze(this); + /** + * The type of metric. + * @property {string} type + */ + get type() { + return this.#type; + } + + /** + * The name of the metric. + * @property {string} name + */ + get name() { + return this.#name; + } + + /** + * Additional metadata to include with the metric. + * @property {object} meta + */ + get meta() { + return this.#meta; } /** From bdb5fdf7c7f67e1f394fd13cfdd6349beefdf391 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 20:11:01 +0800 Subject: [PATCH 06/18] lib: remove metric report format output --- doc/api/metrics.md | 56 --------------- lib/metrics.js | 77 --------------------- test/parallel/test-metrics-metric-report.js | 14 ---- 3 files changed, 147 deletions(-) diff --git a/doc/api/metrics.md b/doc/api/metrics.md index eaa13f256cca01..b7bba409ef3f24 100644 --- a/doc/api/metrics.md +++ b/doc/api/metrics.md @@ -227,62 +227,6 @@ added: REPLACEME The `performance.now()` timestamp when the measurement was recorded in milliseconds since `performance.timeOrigin`. -#### `metricReport.toStatsd()` - - - -* Returns: {string} - -Formats the metric report as a StatsD-compatible string. - -```js -console.log(report.toStatsd()); // 'api.calls:1|c' -``` - -#### `metricReport.toDogStatsd()` - - - -* Returns: {string} - -Formats the metric report as a DogStatsD-compatible string with tags. - -```js -console.log(report.toDogStatsd()); // 'api.calls:1|c|service:web' -``` - -#### `metricReport.toGraphite()` - - - -* Returns: {string} - -Formats the metric report as a Graphite-compatible string. - -```js -console.log(report.toGraphite()); // 'api.calls 1 1234567890' -``` - -#### `metricReport.toPrometheus()` - - - -* Returns: {string} - -Formats the metric report as a Prometheus-compatible string. - -```js -console.log(report.toPrometheus()); // 'api_calls{service="web"} 1 1234567890.123' -``` - ### Class: `Metric` - -* `delta` {number} The amount to add to the current value. -* `meta` {Object} Additional metadata for this report. - -Adds a delta to the current value and reports the new value. - -```mjs -import { gauge } from 'node:metrics'; - -const cpuUsage = gauge('cpu.usage.percent'); - -cpuUsage.applyDelta(5); // Increase by 5 -cpuUsage.applyDelta(-2, { source: 'system' }); // Decrease by 2 with metadata -``` - ### Class: `Timer` * Extends: {metrics.Gauge} diff --git a/lib/metrics.js b/lib/metrics.js index ff4aedbb33bccd..e822384b4060c0 100644 --- a/lib/metrics.js +++ b/lib/metrics.js @@ -218,25 +218,34 @@ class Metric { * reset() is called, or when applyDelta() is called with a non-zero value. */ class Gauge { + #metric; + #value; + + /** + * @param {Metric} metric The metric to report to. + */ + constructor(metric) { + if (!(metric instanceof Metric)) { + throw new ERR_INVALID_ARG_TYPE('metric', ['Metric'], metric); + } + this.#metric = metric; + this.#value = 0; + } + /** * The metric to report to. * @property {Metric} metric */ + get metric() { + return this.#metric; + } /** * The value of the gauge. * @property {number} value */ - - /** - * @param {Metric} metric The metric to report to. - */ - constructor(metric) { - if (!(metric instanceof Metric)) { - throw new ERR_INVALID_ARG_TYPE('metric', ['Metric'], metric); - } - this.metric = metric; - this.value = 0; + get value() { + return this.#value; } /** @@ -245,18 +254,9 @@ class Gauge { * @param {object} [meta] Additional metadata to include with the report. */ reset(value = 0, meta) { - this.value = value; + this.#value = value; this.metric.report(value, meta); } - - /** - * Apply a delta to the gauge. - * @param {number} value The delta to apply to the gauge. - * @param {object} [meta] Additional metadata to include with the report. - */ - applyDelta(value, meta) { - this.reset(this.value + value, meta); - } } /** @@ -276,7 +276,7 @@ class Counter extends Gauge { n = 1; } - this.applyDelta(n, meta); + this.reset(this.value + n, meta); } /** @@ -292,7 +292,7 @@ class Counter extends Gauge { n = 1; } - this.applyDelta(-n, meta); + this.reset(this.value + -n, meta); } } diff --git a/test/parallel/test-metrics-gauge.js b/test/parallel/test-metrics-gauge.js index d5b38072fa4396..e7270dc1b66d78 100644 --- a/test/parallel/test-metrics-gauge.js +++ b/test/parallel/test-metrics-gauge.js @@ -19,7 +19,6 @@ assert.strictEqual(metric.channelName, 'metrics:gauge:test'); const messages = [ [123, { base: 'test', meta: 'first' }], - [357, { base: 'test', meta: 'second' }], [0, { base: 'test' }], ]; @@ -32,8 +31,7 @@ subscribe(metric.channelName, common.mustCall((report) => { const [value, meta] = messages.shift(); assert.strictEqual(report.value, value); assert.deepStrictEqual(report.meta, meta); -}, 3)); +}, 2)); testGauge.reset(123, { meta: 'first' }); -testGauge.applyDelta(234, { meta: 'second' }); testGauge.reset(); From c31e0cd3fd94238c57954471f14bf10e7407bda1 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 20:23:25 +0800 Subject: [PATCH 09/18] lib: detach counter from gauge --- doc/api/metrics.md | 20 +++++++------- lib/metrics.js | 38 ++++++++++++++++++++++++--- test/parallel/test-metrics-counter.js | 6 ++--- 3 files changed, 48 insertions(+), 16 deletions(-) diff --git a/doc/api/metrics.md b/doc/api/metrics.md index e8b3b3b16a6282..621ce4db9b993e 100644 --- a/doc/api/metrics.md +++ b/doc/api/metrics.md @@ -345,6 +345,16 @@ added: REPLACEME The underlying metric instance used for reporting. +#### `counter.value` + + + +* {number} + +The current value of the counter. + #### `counter.increment([n[, meta]])` - -* {number} - -The current value of the counter. - ### Class: `Gauge` - -* {metrics.Metric} - -The underlying metric instance used for reporting. - #### `counter.value` - -A metric representing a single value that can go up or down. - -#### `gauge.metric` +* Extends: {metrics.Metric} -* {metrics.Metric} - -The underlying metric instance used for reporting. +A metric representing a single value that can go up or down. #### `gauge.value` @@ -451,7 +433,7 @@ memory.reset(1024, { source: 'system' }); // Set to 1024 with metadata ### Class: `Timer` -* Extends: {metrics.Gauge} +* Extends: {metrics.Metric} - -* {metrics.Metric} - -The underlying metric instance used for reporting. - #### `timer.start` - -A factory for creating timer instances. - -#### `timer.metric` +* Extends: {metrics.Metric} -* {metrics.Metric} - -The underlying metric instance used for reporting. +A factory for creating timer instances. #### `timerFactory.create([meta])` @@ -583,7 +547,7 @@ const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); ### Class: `PullGauge` -* Extends: {metrics.Gauge} +* Extends: {metrics.Metric} - -* {metrics.Metric} - -The underlying metric instance used for reporting. - #### `pullGauge.sample([meta])` - -* `name` {string} The name of the timer metric. -* `meta` {Object} Optional metadata to attach to all reports. -* Returns: {metrics.TimerFactory} - -Creates a timer factory for measuring durations. - -```mjs -import { timer } from 'node:metrics'; - -const dbQueryTimer = timer('db.query.duration'); - -const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); -// Perform database query... -const duration = t.stop(); // Returns duration in milliseconds -``` ### `metrics.pullGauge(name, fn[, meta])` @@ -389,6 +364,27 @@ errorCount.decrement(2, { errorType: 'timeout' }); // Decrement by 2 with metada errorCount.decrement({ errorType: 'timeout' }); // Decrement by 1 with metadata ``` +#### `counter.createTimer([meta])` + + + +* `meta` {Object} Additional metadata to include with the report. +* Returns: {Timer} + +Creates a timer that will increment this counter with its duration when stopped. + +```mjs +import { counter } from 'node:metrics'; + +const requestDuration = counter('request.duration.ms'); + +const timer = requestDuration.createTimer({ endpoint: '/api/users' }); +// Process request... +const duration = timer.stop(); // Counter is incremented with duration +``` + ### Class: `Gauge` * Extends: {metrics.Metric} @@ -431,15 +427,34 @@ memory.reset(memoryUsage().heapUsed); // Set to current memory usage memory.reset(1024, { source: 'system' }); // Set to 1024 with metadata ``` -### Class: `Timer` +#### `gauge.createTimer([meta])` -* Extends: {metrics.Metric} + + +* `meta` {Object} Additional metadata to include with the report. +* Returns: {Timer} + +Creates a timer that will set this gauge to its duration when stopped. + +```mjs +import { gauge } from 'node:metrics'; + +const responseTime = gauge('response.time.ms'); + +const timer = responseTime.createTimer({ endpoint: '/api/users' }); +// Process request... +const duration = timer.stop(); // Gauge is set to duration +``` + +### Class: `Timer` -A metric for measuring durations. +A helper for measuring durations that reports the elapsed time via a callback when stopped. #### `timer.start` @@ -449,7 +464,7 @@ added: REPLACEME * {number} -The start time of the timer (milliseconds since epoch). +The start time of the timer (milliseconds since `performance.timeOrigin`). This property is read-only. #### `timer.end` @@ -457,9 +472,9 @@ The start time of the timer (milliseconds since epoch). added: REPLACEME --> -* {number} +* {number|undefined} -The end time of the timer (milliseconds since epoch). Zero if timer is running. +The end time of the timer (milliseconds since `performance.timeOrigin`). `undefined` if timer is still running. This property is read-only. #### `timer.duration` @@ -467,27 +482,26 @@ The end time of the timer (milliseconds since epoch). Zero if timer is running. added: REPLACEME --> -* {number} +* {number|undefined} -The duration in milliseconds. Zero if timer is still running. +The duration in milliseconds. `undefined` if timer is still running. This property is read-only. -#### `timer.stop([meta])` +#### `timer.stop()` -* `meta` {Object} Additional metadata for this report. * Returns: {number} The duration in milliseconds. Stops the timer and reports the duration. Can only be called once. ```mjs -import { timer } from 'node:metrics'; +import { counter } from 'node:metrics'; -const dbQueryTimer = timer('db.query.duration'); +const dbQueryDuration = counter('db.query.duration'); -const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); +const t = dbQueryDuration.createTimer({ query: 'SELECT * FROM users' }); // Perform database query... @@ -504,46 +518,18 @@ added: REPLACEME Allows `using` syntax to automatically stop the timer when done. ```mjs -import { timer } from 'node:metrics'; +import { counter } from 'node:metrics'; -const dbQueryTimer = timer('db.query.duration'); +const dbQueryDuration = counter('db.query.duration'); { - using t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); + using t = dbQueryDuration.createTimer({ query: 'SELECT * FROM users' }); // Perform database query... // Timer is automatically stopped here } ``` -### Class: `TimerFactory` - -* Extends: {metrics.Metric} - - - -A factory for creating timer instances. - -#### `timerFactory.create([meta])` - - - -* `meta` {Object} Additional metadata for this timer. -* Returns: {metrics.Timer} - -Creates a new timer instance with the specified metadata. - -```mjs -import { timer } from 'node:metrics'; - -const dbQueryTimer = timer('db.query.duration'); - -const t = dbQueryTimer.create({ query: 'SELECT * FROM users' }); -``` ### Class: `PullGauge` diff --git a/lib/metrics.js b/lib/metrics.js index 850383810ede4b..5b14ddaf99c3b7 100644 --- a/lib/metrics.js +++ b/lib/metrics.js @@ -246,6 +246,17 @@ class Gauge extends Metric { this.#value = value; this.report(value, meta); } + + /** + * Create a timer that will set this gauge to its duration when stopped. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {Timer} A new timer instance. + */ + createTimer(meta) { + return new Timer((duration) => { + this.reset(duration, meta); + }); + } } /** @@ -304,59 +315,76 @@ class Counter extends Metric { this.#value -= n; this.report(-n, meta); } + + /** + * Create a timer that will increment this counter with its duration when stopped. + * @param {object} [meta] Additional metadata to include with the report. + * @returns {Timer} A new timer instance. + */ + createTimer(meta) { + return new Timer((duration) => { + this.increment(duration, meta); + }); + } } /** - * A floating point number which represents a length of time in milliseconds. + * A timer that measures duration and reports the measured time via a callback. */ -class Timer extends Metric { - #instanceMeta; +class Timer { + #report; + #start; + #end; + #duration; + + /** + * Construct a new timer. + * @param {Function} reportCallback The function to call with the duration when stopped. + */ + constructor(report) { + if (typeof report !== 'function') { + throw new ERR_INVALID_ARG_TYPE('report', ['function'], report); + } + this.#report = report; + this.#start = performance.now(); + this.#end = undefined; + this.#duration = undefined; + } /** * The start time of the timer. * @property {number} start */ + get start() { + return this.#start; + } /** * End time of timer. If undefined, timer is still running. * @property {number|undefined} end */ + get end() { + return this.#end; + } /** * Duration of timer in milliseconds. If undefined, timer is still running. * @property {number|undefined} duration */ - - /** - * Construct a new timer. - * @param {string} name The name of the timer metric. - * @param {object} [baseMeta] Base metadata to include with the metric. - * @param {object} [instanceMeta] Additional metadata to include with this timer instance. - */ - constructor(name, baseMeta, instanceMeta) { - super('timer', name, baseMeta); - if (instanceMeta !== undefined && typeof instanceMeta !== 'object') { - throw new ERR_INVALID_ARG_TYPE('instanceMeta', ['object', 'undefined'], instanceMeta); - } - this.#instanceMeta = instanceMeta; - - this.start = performance.now(); - this.end = undefined; - this.duration = undefined; + get duration() { + return this.#duration; } /** - * Stop the timer and report the duration. - * @param {object} [meta] Additional metadata to include with the report. + * Stop the timer and report the duration via the callback. * @returns {number} The duration in milliseconds. */ - stop(meta) { - if (this.end !== undefined) return; - if (!this.shouldReport) return; - this.end = performance.now(); - this.duration = this.end - this.start; - this.report(this.duration, ObjectAssign({}, this.#instanceMeta, meta)); - return this.duration; + stop() { + if (this.#end !== undefined) return this.#duration; + this.#end = performance.now(); + this.#duration = this.#end - this.#start; + this.#report(this.#duration); + return this.#duration; } /** @@ -411,46 +439,12 @@ class PullGauge extends Metric { } } -/** - * A factory for creating Timers for the given metric. - */ -class TimerFactory extends Metric { - /** - * Construct a new Timer factory. - * @param {string} name The name of the timer metric. - * @param {object} [meta] Additional metadata to include with the metric. - */ - constructor(name, meta) { - super('timer', name, meta); - ObjectFreeze(this); - } - - /** - * Create a new timer with the given metadata. - * @param {object} [meta] Additional metadata to include with this timer instance. - * @returns {Timer} A new Timer instance with the combined metadata. - */ - create(meta) { - return new Timer(this.name, this.meta, meta); - } -} - -/** - * Create a timer metric. - * @param {string} name The name of the timer. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {TimerFactory} An object with a create method to create new timers. - */ -function timer(name, meta) { - return new TimerFactory(name, meta); -} // Map of metric types to their constructors. const metricTypes = { counter: Counter, gauge: Gauge, pullGauge: PullGauge, - timer: Timer, }; const metricTypeNames = ObjectKeys(metricTypes); @@ -516,12 +510,9 @@ module.exports = { Counter, Timer, PullGauge, - TimerFactory, - counter, gauge, metric, pullGauge, - timer, }; diff --git a/test/parallel/test-metrics-timer.js b/test/parallel/test-metrics-timer.js index cf18e8cb7b1840..ae4400c65ff8e8 100644 --- a/test/parallel/test-metrics-timer.js +++ b/test/parallel/test-metrics-timer.js @@ -4,32 +4,33 @@ const common = require('../common'); const assert = require('assert'); const { subscribe } = require('node:diagnostics_channel'); -const { timer, Timer, TimerFactory, Metric, MetricReport } = require('node:metrics'); +const { counter, Counter, Timer, MetricReport } = require('node:metrics'); -const testTimer = timer('test', { base: 'test' }); -assert.ok(testTimer instanceof TimerFactory); -assert.ok(testTimer instanceof Metric); +// Create a counter for timing +const testCounter = counter('test.duration', { base: 'test' }); +assert.ok(testCounter instanceof Counter); -assert.strictEqual(testTimer.type, 'timer'); -assert.strictEqual(testTimer.name, 'test'); -assert.deepStrictEqual(testTimer.meta, { base: 'test' }); -assert.strictEqual(testTimer.channelName, 'metrics:timer:test'); +assert.strictEqual(testCounter.type, 'counter'); +assert.strictEqual(testCounter.name, 'test.duration'); +assert.deepStrictEqual(testCounter.meta, { base: 'test' }); +assert.strictEqual(testCounter.channelName, 'metrics:counter:test.duration'); -const a = testTimer.create({ timer: 'a' }); -const b = testTimer.create({ timer: 'b' }); +// Create timers from the counter +const a = testCounter.createTimer({ timer: 'a', meta: 'extra' }); +const b = testCounter.createTimer({ timer: 'b' }); assert.ok(a instanceof Timer); -assert.ok(a instanceof Metric); +assert.ok(b instanceof Timer); const messages = [ [50, { base: 'test', timer: 'a', meta: 'extra' }], [100, { base: 'test', timer: 'b' }], ]; -subscribe(testTimer.channelName, common.mustCall((report) => { +subscribe(testCounter.channelName, common.mustCall((report) => { assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'timer'); - assert.strictEqual(report.name, 'test'); + assert.strictEqual(report.type, 'counter'); + assert.strictEqual(report.name, 'test.duration'); assert.ok(report.time > 0); const [value, meta] = messages.shift(); @@ -42,13 +43,10 @@ function near(actual, expected, threshold = 10) { return Math.abs(actual - expected) <= threshold; } -setTimeout(common.mustCall(() => { - a.stop({ meta: 'extra' }); - assert.ok(a.start > 0); - assert.ok(a.end > 0); - assert.ok(a.duration > 0); -}), 50); +setTimeout(() => { + a.stop(); +}, 50); -setTimeout(common.mustCall(() => { - b.stop(); -}), 100); +setTimeout(() => { + b[Symbol.dispose](); +}, 100); From ccbfd160a621ddf0a0dd382806d0f263c4a67593 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 21:47:46 +0800 Subject: [PATCH 12/18] perf_hooks: move metrics under perf_hooks --- doc/api/metrics.md | 52 ++++++++++------ lib/{ => internal/perf}/metrics.js | 2 +- lib/perf_hooks.js | 3 + test/parallel/test-metrics-counter.js | 3 +- test/parallel/test-metrics-gauge.js | 3 +- test/parallel/test-metrics-metric-report.js | 3 +- test/parallel/test-metrics-metric.js | 3 +- test/parallel/test-metrics-pull-gauge.js | 61 +++++++++++++++++++ test/parallel/test-metrics-timer-readonly.js | 51 ++++++++++++++++ .../parallel/test-metrics-timer-with-gauge.js | 53 ++++++++++++++++ test/parallel/test-metrics-timer.js | 3 +- 11 files changed, 212 insertions(+), 25 deletions(-) rename lib/{ => internal/perf}/metrics.js (99%) create mode 100644 test/parallel/test-metrics-pull-gauge.js create mode 100644 test/parallel/test-metrics-timer-readonly.js create mode 100644 test/parallel/test-metrics-timer-with-gauge.js diff --git a/doc/api/metrics.md b/doc/api/metrics.md index 768af086757f20..f096089ef08b3d 100644 --- a/doc/api/metrics.md +++ b/doc/api/metrics.md @@ -4,20 +4,20 @@ > Stability: 1 - Experimental - + -The `node:metrics` module provides an API for application instrumentation and +The metrics API provides an API for application instrumentation and performance monitoring. It offers various metric types and built-in exporters for popular monitoring systems. -The module can be accessed using: +The metrics API can be accessed using: ```mjs -import * as metrics from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; ``` ```cjs -const metrics = require('node:metrics'); +const { metrics } = require('node:perf_hooks'); ``` ## Overview @@ -30,7 +30,8 @@ flexible consumption patterns. ### Example ```mjs -import { counter } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { counter } = metrics; // Create counter metrics const apiCalls = counter('api.calls', { service: 'web' }); @@ -49,7 +50,8 @@ function handleRequest(req, res) { ``` ```cjs -const { counter } = require('node:metrics'); +const { metrics } = require('node:perf_hooks'); +const { counter } = metrics; // Create counter metrics const apiCalls = counter('api.calls', { service: 'web' }); @@ -82,7 +84,8 @@ added: REPLACEME Creates a counter metric that tracks cumulative values. ```mjs -import { counter } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { counter } = metrics; const errorCount = counter('errors.total', { component: 'database' }); @@ -105,7 +108,8 @@ added: REPLACEME Creates a gauge metric that represents a single value at a point in time. ```mjs -import { gauge } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { gauge } = metrics; import { memoryUsage } from 'node:process'; const memory = gauge('memory.usage.bytes'); @@ -128,7 +132,8 @@ added: REPLACEME Creates a gauge that samples a value on-demand by calling the provided function. ```mjs -import { pullGauge } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { pullGauge } = metrics; import { cpuUsage } from 'node:process'; const cpu = pullGauge('cpu.usage', () => { @@ -292,7 +297,8 @@ specific methods provided by each metric type (e.g., `increment`, `reset`, metadata. ```mjs -import { gauge } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { gauge } = metrics; const memoryUsage = gauge('memory.usage', { service: 'web' }); @@ -332,7 +338,8 @@ added: REPLACEME Increments the counter by the specified amount. ```mjs -import { counter } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { counter } = metrics; const apiCalls = counter('api.calls', { service: 'web' }); @@ -354,7 +361,8 @@ added: REPLACEME Decrements the counter by the specified amount. ```mjs -import { counter } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { counter } = metrics; const errorCount = counter('errors.total', { component: 'database' }); @@ -376,7 +384,8 @@ added: REPLACEME Creates a timer that will increment this counter with its duration when stopped. ```mjs -import { counter } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { counter } = metrics; const requestDuration = counter('request.duration.ms'); @@ -417,7 +426,8 @@ added: REPLACEME Sets the gauge to a specific value and reports it. ```mjs -import { gauge } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { gauge } = metrics; import { memoryUsage } from 'node:process'; const memory = gauge('memory.usage.bytes'); @@ -439,7 +449,8 @@ added: REPLACEME Creates a timer that will set this gauge to its duration when stopped. ```mjs -import { gauge } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { gauge } = metrics; const responseTime = gauge('response.time.ms'); @@ -497,7 +508,8 @@ added: REPLACEME Stops the timer and reports the duration. Can only be called once. ```mjs -import { counter } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { counter } = metrics; const dbQueryDuration = counter('db.query.duration'); @@ -518,7 +530,8 @@ added: REPLACEME Allows `using` syntax to automatically stop the timer when done. ```mjs -import { counter } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { counter } = metrics; const dbQueryDuration = counter('db.query.duration'); @@ -553,7 +566,8 @@ added: REPLACEME Calls the configured function to get the current value and reports it. ```mjs -import { pullGauge } from 'node:metrics'; +import { metrics } from 'node:perf_hooks'; +const { pullGauge } = metrics; import { cpuUsage } from 'node:process'; const cpu = pullGauge('cpu.usage', () => { diff --git a/lib/metrics.js b/lib/internal/perf/metrics.js similarity index 99% rename from lib/metrics.js rename to lib/internal/perf/metrics.js index 5b14ddaf99c3b7..0df81cb8185bf8 100644 --- a/lib/metrics.js +++ b/lib/internal/perf/metrics.js @@ -39,7 +39,7 @@ const { subscribe, unsubscribe, } = require('diagnostics_channel'); -const { performance } = require('perf_hooks'); +const { performance } = require('internal/perf/performance'); const newMetricChannel = channel('metrics:new'); diff --git a/lib/perf_hooks.js b/lib/perf_hooks.js index 18e979630e0e19..1f46a674862486 100644 --- a/lib/perf_hooks.js +++ b/lib/perf_hooks.js @@ -31,6 +31,8 @@ const monitorEventLoopDelay = require('internal/perf/event_loop_delay'); const { eventLoopUtilization } = require('internal/perf/event_loop_utilization'); const timerify = require('internal/perf/timerify'); +const metrics = require('internal/perf/metrics'); + module.exports = { Performance, PerformanceEntry, @@ -44,6 +46,7 @@ module.exports = { timerify, createHistogram, performance, + metrics, }; ObjectDefineProperty(module.exports, 'constants', { diff --git a/test/parallel/test-metrics-counter.js b/test/parallel/test-metrics-counter.js index 7e70c0ee06ca19..ebe9c44bec6d8b 100644 --- a/test/parallel/test-metrics-counter.js +++ b/test/parallel/test-metrics-counter.js @@ -4,7 +4,8 @@ const common = require('../common'); const assert = require('assert'); const { subscribe } = require('node:diagnostics_channel'); -const { counter, Counter, Metric, MetricReport } = require('node:metrics'); +const { metrics } = require('node:perf_hooks'); +const { counter, Counter, Metric, MetricReport } = metrics; const testCounter = counter('test', { base: 'test' }); assert.ok(testCounter instanceof Counter); diff --git a/test/parallel/test-metrics-gauge.js b/test/parallel/test-metrics-gauge.js index deb23e8b538b3a..f637d4b3068afc 100644 --- a/test/parallel/test-metrics-gauge.js +++ b/test/parallel/test-metrics-gauge.js @@ -4,7 +4,8 @@ const common = require('../common'); const assert = require('assert'); const { subscribe } = require('node:diagnostics_channel'); -const { gauge, Gauge, Metric, MetricReport } = require('node:metrics'); +const { metrics } = require('node:perf_hooks'); +const { gauge, Gauge, Metric, MetricReport } = metrics; const testGauge = gauge('test', { base: 'test' }); assert.ok(testGauge instanceof Gauge); diff --git a/test/parallel/test-metrics-metric-report.js b/test/parallel/test-metrics-metric-report.js index 75ee070e642abe..294e15633a38aa 100644 --- a/test/parallel/test-metrics-metric-report.js +++ b/test/parallel/test-metrics-metric-report.js @@ -3,7 +3,8 @@ require('../common'); const assert = require('assert'); -const { MetricReport } = require('node:metrics'); +const { metrics } = require('node:perf_hooks'); +const { MetricReport } = metrics; const report = new MetricReport('counter', 'test-counter', 123, { meta: 'test' diff --git a/test/parallel/test-metrics-metric.js b/test/parallel/test-metrics-metric.js index d02ae5b2250899..29fe5201a9127d 100644 --- a/test/parallel/test-metrics-metric.js +++ b/test/parallel/test-metrics-metric.js @@ -4,7 +4,8 @@ const common = require('../common'); const assert = require('assert'); const { subscribe } = require('node:diagnostics_channel'); -const { Metric, MetricReport } = require('node:metrics'); +const { metrics } = require('node:perf_hooks'); +const { Metric, MetricReport } = metrics; const metric = new Metric('counter', 'test-counter', { base: 'test' }); diff --git a/test/parallel/test-metrics-pull-gauge.js b/test/parallel/test-metrics-pull-gauge.js new file mode 100644 index 00000000000000..008907f1201191 --- /dev/null +++ b/test/parallel/test-metrics-pull-gauge.js @@ -0,0 +1,61 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { metrics } = require('node:perf_hooks'); +const { pullGauge, PullGauge, Metric, MetricReport } = metrics; + +// Test values to return from the pull function +const values = [ 1, 5, 10, 4, 6 ]; +let currentIndex = 0; + +const testPullGauge = pullGauge('test', () => { + return values[currentIndex]; +}, { base: 'test' }); + +assert.ok(testPullGauge instanceof PullGauge); +assert.ok(testPullGauge instanceof Metric); +assert.strictEqual(testPullGauge.value, 0); + +assert.strictEqual(testPullGauge.type, 'pullGauge'); +assert.strictEqual(testPullGauge.name, 'test'); +assert.deepStrictEqual(testPullGauge.meta, { base: 'test' }); +assert.strictEqual(testPullGauge.channelName, 'metrics:pullGauge:test'); + +// Subscribe to metric reports +let reportCount = 0; +subscribe(testPullGauge.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'pullGauge'); + assert.strictEqual(report.name, 'test'); + assert.ok(report.time > 0); + assert.strictEqual(report.value, values[reportCount]); + + if (reportCount < values.length - 1) { + assert.deepStrictEqual(report.meta, { base: 'test' }); + } else { + // Last sample includes additional metadata + assert.deepStrictEqual(report.meta, { base: 'test', extra: 'metadata' }); + } + + reportCount++; +}, values.length)); + +// Test sampling +for (let i = 0; i < values.length; i++) { + currentIndex = i; + + if (i === values.length - 1) { + // Test sampling with additional metadata + const value = testPullGauge.sample({ extra: 'metadata' }); + assert.strictEqual(value, values[i]); + } else { + const value = testPullGauge.sample(); + assert.strictEqual(value, values[i]); + } +} + +// Verify all reports were received +assert.strictEqual(reportCount, values.length); \ No newline at end of file diff --git a/test/parallel/test-metrics-timer-readonly.js b/test/parallel/test-metrics-timer-readonly.js new file mode 100644 index 00000000000000..d6280f1231bf59 --- /dev/null +++ b/test/parallel/test-metrics-timer-readonly.js @@ -0,0 +1,51 @@ +'use strict'; + +const common = require('../common'); +const assert = require('assert'); +const { metrics } = require('node:perf_hooks'); +const { Timer } = metrics; + +// Test that Timer properties are read-only +const timer = new Timer(() => {}); + +// Verify initial values +assert.ok(typeof timer.start === 'number'); +assert.ok(timer.start > 0); +assert.strictEqual(timer.end, undefined); +assert.strictEqual(timer.duration, undefined); + +// Try to modify properties (should throw) +const originalStart = timer.start; +assert.throws(() => { + timer.start = 0; +}, TypeError); +assert.strictEqual(timer.start, originalStart); // Should remain unchanged + +assert.throws(() => { + timer.end = 123; +}, TypeError); +assert.strictEqual(timer.end, undefined); // Should remain undefined + +assert.throws(() => { + timer.duration = 456; +}, TypeError); +assert.strictEqual(timer.duration, undefined); // Should remain undefined + +// Stop the timer and verify values are still read-only +timer.stop(); +assert.ok(typeof timer.end === 'number'); +assert.ok(typeof timer.duration === 'number'); +assert.ok(timer.end > timer.start); +assert.ok(timer.duration > 0); + +// Try to modify after stopping (should throw) +const stoppedEnd = timer.end; +const stoppedDuration = timer.duration; +assert.throws(() => { + timer.end = 0; +}, TypeError); +assert.throws(() => { + timer.duration = 0; +}, TypeError); +assert.strictEqual(timer.end, stoppedEnd); +assert.strictEqual(timer.duration, stoppedDuration); \ No newline at end of file diff --git a/test/parallel/test-metrics-timer-with-gauge.js b/test/parallel/test-metrics-timer-with-gauge.js new file mode 100644 index 00000000000000..01068c0cc76019 --- /dev/null +++ b/test/parallel/test-metrics-timer-with-gauge.js @@ -0,0 +1,53 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { subscribe } = require('node:diagnostics_channel'); +const { metrics } = require('node:perf_hooks'); +const { gauge, Gauge, Timer, MetricReport } = metrics; + +// Create a gauge for timing +const testGauge = gauge('test.response.time', { base: 'test' }); +assert.ok(testGauge instanceof Gauge); + +assert.strictEqual(testGauge.type, 'gauge'); +assert.strictEqual(testGauge.name, 'test.response.time'); +assert.deepStrictEqual(testGauge.meta, { base: 'test' }); +assert.strictEqual(testGauge.channelName, 'metrics:gauge:test.response.time'); + +// Create timers from the gauge +const a = testGauge.createTimer({ timer: 'a', meta: 'extra' }); +const b = testGauge.createTimer({ timer: 'b' }); + +assert.ok(a instanceof Timer); +assert.ok(b instanceof Timer); + +const messages = [ + [50, { base: 'test', timer: 'a', meta: 'extra' }], + [100, { base: 'test', timer: 'b' }], +]; + +subscribe(testGauge.channelName, common.mustCall((report) => { + assert.ok(report instanceof MetricReport); + assert.strictEqual(report.type, 'gauge'); + assert.strictEqual(report.name, 'test.response.time'); + assert.ok(report.time > 0); + + const [value, meta] = messages.shift(); + assert.ok(near(report.value, value)); + assert.deepStrictEqual(report.meta, meta); +}, 2)); + +// NOTE: If this test is flaky, tune the threshold to give more leeway to the timing +function near(actual, expected, threshold = 10) { + return Math.abs(actual - expected) <= threshold; +} + +setTimeout(() => { + a.stop(); +}, 50); + +setTimeout(() => { + b[Symbol.dispose](); +}, 100); \ No newline at end of file diff --git a/test/parallel/test-metrics-timer.js b/test/parallel/test-metrics-timer.js index ae4400c65ff8e8..fee58f742cdc94 100644 --- a/test/parallel/test-metrics-timer.js +++ b/test/parallel/test-metrics-timer.js @@ -4,7 +4,8 @@ const common = require('../common'); const assert = require('assert'); const { subscribe } = require('node:diagnostics_channel'); -const { counter, Counter, Timer, MetricReport } = require('node:metrics'); +const { metrics } = require('node:perf_hooks'); +const { counter, Counter, Timer, MetricReport } = metrics; // Create a counter for timing const testCounter = counter('test.duration', { base: 'test' }); From 452489ac15d067c6e10f423a0b6577f2909c03a8 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 22:34:27 +0800 Subject: [PATCH 13/18] lib: use validators for metrics type checks --- lib/internal/perf/metrics.js | 85 ++++++++++++++---------------------- 1 file changed, 33 insertions(+), 52 deletions(-) diff --git a/lib/internal/perf/metrics.js b/lib/internal/perf/metrics.js index 0df81cb8185bf8..4965f6f1587cb6 100644 --- a/lib/internal/perf/metrics.js +++ b/lib/internal/perf/metrics.js @@ -28,11 +28,19 @@ const { const { codes: { - ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, }, } = require('internal/errors'); +const { + kValidateObjectAllowNullable, + validateNumber, + validateObject, + validateOneOf, + validateString, + validateFunction, +} = require('internal/validators'); + const { channel, hasChannel, @@ -61,6 +69,10 @@ class MetricReport { * @param {object} [meta] Additional metadata to include with the report. */ constructor(type, name, value, meta) { + validateString(type, 'type'); + validateString(name, 'name'); + validateNumber(value, 'value'); + validateObject(meta, 'meta', kValidateObjectAllowNullable); this.#type = type; this.#name = name; this.#value = value; @@ -125,14 +137,11 @@ class Metric { * @param {object} [meta] Additional metadata to include with the metric. */ constructor(type, name, meta) { - if (!metricTypeNames.includes(type)) { - throw new ERR_INVALID_ARG_VALUE('type', type, wrongTypeErr); - } - if (typeof name !== 'string' || !name) { - throw new ERR_INVALID_ARG_TYPE('name', ['string'], name); - } - if (meta !== undefined && typeof meta !== 'object') { - throw new ERR_INVALID_ARG_TYPE('meta', ['object', 'undefined'], meta); + validateOneOf(type, 'type', [ 'gauge', 'counter', 'pullGauge' ]); + validateString(name, 'name'); + validateObject(meta, 'meta', kValidateObjectAllowNullable); + if (name === '') { + throw new ERR_INVALID_ARG_VALUE('name', name, 'must not be empty'); } this.#type = type; @@ -342,9 +351,7 @@ class Timer { * @param {Function} reportCallback The function to call with the duration when stopped. */ constructor(report) { - if (typeof report !== 'function') { - throw new ERR_INVALID_ARG_TYPE('report', ['function'], report); - } + validateFunction(report, 'report'); this.#report = report; this.#start = performance.now(); this.#end = undefined; @@ -399,7 +406,7 @@ class Timer { * A gauge which updates its value by calling a function when sampled. */ class PullGauge extends Metric { - #fn; + #puller; #value; /** @@ -408,14 +415,10 @@ class PullGauge extends Metric { * @param {Function} fn The function to call to get the gauge value. * @param {object} [meta] Additional metadata to include with the metric. */ - constructor(name, fn, meta) { + constructor(name, puller, meta) { super('pullGauge', name, meta); - - if (typeof fn !== 'function') { - throw new ERR_INVALID_ARG_TYPE('fn', ['function'], fn); - } - - this.#fn = fn; + validateFunction(puller, 'puller'); + this.#puller = puller; this.#value = 0; } @@ -432,48 +435,22 @@ class PullGauge extends Metric { * @param {object} [meta] Additional metadata to include with the report. */ sample(meta) { - const value = this.#fn(); + const value = this.#puller(); this.#value = value; this.report(value, meta); return value; } } - -// Map of metric types to their constructors. -const metricTypes = { - counter: Counter, - gauge: Gauge, - pullGauge: PullGauge, -}; - -const metricTypeNames = ObjectKeys(metricTypes); -const wrongTypeErr = `must be one of: ${metricTypeNames.join(', ')}`; - -/** - * Create a function to directly create a metric of a specific type. - * @param {string} type The type of metric to create. - * @returns {Function} A function which creates a metric of the specified type. - * @private - */ -function direct(type) { - if (!metricTypeNames.includes(type)) { - throw new ERR_INVALID_ARG_VALUE('type', type, wrongTypeErr); - } - const Type = metricTypes[type]; - - return function makeMetricType(name, ...args) { - return new Type(name, ...args); - }; -} - /** * Create a counter metric. * @param {string} name The name of the counter. * @param {object} [meta] Additional metadata to include with the report. * @returns {Counter} The counter metric. */ -const counter = direct('counter'); +function counter(name, meta) { + return new Counter(name, meta); +} /** * Create a gauge metric. @@ -481,7 +458,9 @@ const counter = direct('counter'); * @param {object} [meta] Additional metadata to include with the report. * @returns {Gauge} The gauge metric. */ -const gauge = direct('gauge'); +function gauge(name, meta) { + return new Gauge(name, meta); +} /** * Create a raw metric. @@ -501,7 +480,9 @@ function metric(type, name, meta) { * @param {object} [meta] Additional metadata to include with the report. * @returns {PullGauge} The pull gauge metric. */ -const pullGauge = direct('pullGauge'); +function pullGauge(name, puller, meta) { + return new PullGauge(name, puller, meta); +} module.exports = { MetricReport, From b5e7ee0cc08288dcd677d9380fd507ca3514b75b Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 22:40:12 +0800 Subject: [PATCH 14/18] lib: skip shouldReport on counter as report will do it anyway --- lib/internal/perf/metrics.js | 4 ---- 1 file changed, 4 deletions(-) diff --git a/lib/internal/perf/metrics.js b/lib/internal/perf/metrics.js index 4965f6f1587cb6..0fce2413f4b17e 100644 --- a/lib/internal/perf/metrics.js +++ b/lib/internal/perf/metrics.js @@ -297,8 +297,6 @@ class Counter extends Metric { * @param {object} [meta] Additional metadata to include with the report. */ increment(n = 1, meta) { - if (!this.shouldReport) return; - if (typeof n === 'object') { meta = n; n = 1; @@ -314,8 +312,6 @@ class Counter extends Metric { * @param {object} [meta] Additional metadata to include with the report. */ decrement(n = 1, meta) { - if (!this.shouldReport) return; - if (typeof n === 'object') { meta = n; n = 1; From 00f29062812d5ffc3287a752a537d649896296b2 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Sat, 5 Jul 2025 22:59:44 +0800 Subject: [PATCH 15/18] lib: improve metrics factory naming --- doc/api/metrics.md | 64 +++++++++---------- lib/internal/perf/metrics.js | 16 ++--- test/parallel/test-metrics-counter.js | 4 +- test/parallel/test-metrics-gauge.js | 4 +- test/parallel/test-metrics-pull-gauge.js | 4 +- .../parallel/test-metrics-timer-with-gauge.js | 4 +- test/parallel/test-metrics-timer.js | 4 +- 7 files changed, 50 insertions(+), 50 deletions(-) diff --git a/doc/api/metrics.md b/doc/api/metrics.md index f096089ef08b3d..ab42999d237585 100644 --- a/doc/api/metrics.md +++ b/doc/api/metrics.md @@ -31,11 +31,11 @@ flexible consumption patterns. ```mjs import { metrics } from 'node:perf_hooks'; -const { counter } = metrics; +const { createCounter } = metrics; // Create counter metrics -const apiCalls = counter('api.calls', { service: 'web' }); -const requestDuration = counter('api.request.duration.ms', { service: 'web' }); +const apiCalls = createCounter('api.calls', { service: 'web' }); +const requestDuration = createCounter('api.request.duration.ms', { service: 'web' }); // Use metrics in your application function handleRequest(req, res) { @@ -51,11 +51,11 @@ function handleRequest(req, res) { ```cjs const { metrics } = require('node:perf_hooks'); -const { counter } = metrics; +const { createCounter } = metrics; // Create counter metrics -const apiCalls = counter('api.calls', { service: 'web' }); -const requestDuration = counter('api.request.duration.ms', { service: 'web' }); +const apiCalls = createCounter('api.calls', { service: 'web' }); +const requestDuration = createCounter('api.request.duration.ms', { service: 'web' }); // Use metrics in your application function handleRequest(req, res) { @@ -71,7 +71,7 @@ function handleRequest(req, res) { ## Metric Types -### `metrics.counter(name[, meta])` +### `metrics.createCounter(name[, meta])` -The metrics API provides an API for application instrumentation and -performance monitoring. It offers various metric types and built-in exporters -for popular monitoring systems. +The `node:perf_hooks` metrics API provides a flexible, low-overhead +instrumentation system for application performance monitoring. It follows a +three-layer architecture: value producers generate values via direct recording, +timers, or observable callbacks; descriptors identify and group metric values; +consumers subscribe to metrics and aggregate values into snapshots. The metrics API can be accessed using: @@ -20,616 +22,1002 @@ import { metrics } from 'node:perf_hooks'; const { metrics } = require('node:perf_hooks'); ``` -## Overview +## Public API -The metrics API enables developers to instrument their applications with custom -metrics that can be collected and exported to monitoring systems. All metrics -publish their data through the `node:diagnostics_channel` module, allowing for -flexible consumption patterns. - -### Example +### Overview ```mjs import { metrics } from 'node:perf_hooks'; -const { createCounter } = metrics; -// Create counter metrics -const apiCalls = createCounter('api.calls', { service: 'web' }); -const requestDuration = createCounter('api.request.duration.ms', { service: 'web' }); +// Create a metric (singleton — same name returns same instance) +const requests = metrics.create('http.requests', { unit: '{count}' }); -// Use metrics in your application -function handleRequest(req, res) { - const timer = requestDuration.createTimer({ endpoint: req.url }); +// Create a consumer to aggregate values +const consumer = metrics.createConsumer({ + defaultAggregation: 'sum', + groupByAttributes: true, + metrics: { + 'http.requests': { aggregation: 'sum' }, + }, +}); - apiCalls.increment(); +// Record values with optional attributes +requests.record(1, { method: 'GET', status: 200 }); +requests.record(1, { method: 'POST', status: 201 }); - // Process request... +// Collect aggregated snapshots +const snapshot = consumer.collect(); +console.log(snapshot[0].dataPoints); +// [ +// { sum: 1, count: 1, attributes: { method: 'GET', status: 200 } }, +// { sum: 1, count: 1, attributes: { method: 'POST', status: 201 } }, +// ] - timer.stop(); // Increments requestDuration with the elapsed time -} +consumer.close(); +requests.close(); ``` ```cjs const { metrics } = require('node:perf_hooks'); -const { createCounter } = metrics; -// Create counter metrics -const apiCalls = createCounter('api.calls', { service: 'web' }); -const requestDuration = createCounter('api.request.duration.ms', { service: 'web' }); +const requests = metrics.create('http.requests', { unit: '{count}' }); -// Use metrics in your application -function handleRequest(req, res) { - const timer = requestDuration.createTimer({ endpoint: req.url }); +const consumer = metrics.createConsumer({ + defaultAggregation: 'sum', + groupByAttributes: true, + metrics: { + 'http.requests': { aggregation: 'sum' }, + }, +}); - apiCalls.increment(); +requests.record(1, { method: 'GET', status: 200 }); +requests.record(1, { method: 'POST', status: 201 }); - // Process request... +const snapshot = consumer.collect(); +console.log(snapshot[0].dataPoints); - timer.stop(); // Increments requestDuration with the elapsed time -} +consumer.close(); +requests.close(); ``` -## Metric Types - -### `metrics.createCounter(name[, meta])` +#### `metrics.create(name[, options])` -* `name` {string} The name of the counter metric. -* `meta` {Object} Optional metadata to attach to all reports. -* Returns: {metrics.Counter} +* `name` {string} The metric name. Must not be empty. +* `options` {Object} + * `unit` {string} The unit of measurement (e.g., `'ms'`, `'By'`, + `'{count}'`). + * `description` {string} Human-readable description. + * `observable` {Function} If provided, makes the metric observable. The + function is called during [`consumer.collect()`][] with a facade object + as its argument. The facade has: + * `record(value[, attributes])` — Records a value for the current + subscriber. + * `descriptor` — The [`MetricDescriptor`][] for this metric. + * `scope` {InstrumentationScope} The instrumentation scope. +* Returns: {Metric} + +Creates a new metric or returns the existing one with the same name. If a +metric with the same name already exists but with different options, a +`'MetricsWarning'` process warning is emitted and the existing metric is +returned unchanged. -Creates a counter metric that tracks cumulative values. +Each metric maintains its own subscriber list. Values are dispatched directly +to subscribed consumers with no per-value identity lookups, similar to +`node:diagnostics_channel`. ```mjs import { metrics } from 'node:perf_hooks'; -const { createCounter } = metrics; +import { memoryUsage } from 'node:process'; -const errorCount = createCounter('errors.total', { component: 'database' }); +// Direct metric — record values explicitly +const duration = metrics.create('http.request.duration', { + unit: 'ms', + description: 'HTTP request duration in milliseconds', +}); -errorCount.increment(); // Increment by 1 -errorCount.increment(5); // Increment by 5 -errorCount.decrement(2); // Decrement by 2 +duration.record(42, { route: '/api/users' }); + +// Observable metric — sampled on demand during collect() +const memory = metrics.create('process.memory.heap', { + unit: 'By', + observable: (metric) => { + const mem = memoryUsage(); + metric.record(mem.heapUsed, { type: 'used' }); + metric.record(mem.heapTotal, { type: 'total' }); + }, +}); ``` -### `metrics.createGauge(name[, meta])` +```cjs +const { metrics } = require('node:perf_hooks'); +const { memoryUsage } = require('node:process'); - +const duration = metrics.create('http.request.duration', { + unit: 'ms', + description: 'HTTP request duration in milliseconds', +}); -* `name` {string} The name of the gauge metric. -* `meta` {Object} Optional metadata to attach to all reports. -* Returns: {metrics.Gauge} +duration.record(42, { route: '/api/users' }); -Creates a gauge metric that represents a single value at a point in time. +const memory = metrics.create('process.memory.heap', { + unit: 'By', + observable: (metric) => { + const mem = memoryUsage(); + metric.record(mem.heapUsed, { type: 'used' }); + metric.record(mem.heapTotal, { type: 'total' }); + }, +}); +``` + +#### `metrics.createConsumer([config])` + + + +* `config` {Object} + * `defaultAggregation` {string} Default aggregation strategy for all + metrics. One of `'sum'`, `'lastValue'`, `'histogram'`, `'summary'`, or a + custom aggregator object. **Default:** `'sum'`. + * `defaultTemporality` {string} Default temporality. `'cumulative'` means + data points represent totals since metric creation; `'delta'` means + data points represent values since the last [`consumer.collect()`][] + call and state is reset after collection. **Default:** `'cumulative'`. + * `groupByAttributes` {boolean} When `true`, values are bucketed by their + attribute combinations. When `false`, all values aggregate into a single + bucket regardless of attributes. **Default:** `false`. + * `metrics` {Object} Per-metric configuration keyed by metric name. Each + value is an object with: + * `aggregation` {string|Object} Aggregation strategy for this metric. + Built-in strategies: + * `'sum'` — Running sum and count. Supports `monotonic: true` to + reject negative values. Data point fields: `sum`, `count`. + * `'lastValue'` — Most recent value and its timestamp. Data point + fields: `value`, `timestamp`. + * `'histogram'` — Explicit-boundary histogram. Data point fields: + `buckets` (array of `{ le, count }`), `sum`, `count`, `min`, `max`. + * `'summary'` — Quantile summary. Data point fields: `quantiles` + (object mapping quantile to value), `sum`, `count`, `min`, `max`. + * Custom object with `createState(config)`, `aggregate(state, value[, + timestamp])`, `finalize(state)`, and optional `resetState(state)` and + `needsTimestamp` properties. + * `temporality` {string} Overrides `defaultTemporality` for this metric. + * `monotonic` {boolean} For `'sum'` aggregation, reject negative values. + * `boundaries` {number\[]} For `'histogram'` aggregation, bucket + boundaries. **Default:** `[10, 50, 100, 500, 1000]`. + * `quantiles` {number\[]} For `'summary'` aggregation, quantile values. + **Default:** `[0.5, 0.9, 0.95, 0.99]`. + * `groupBy` {string\[]} Attribute keys to group by (subset of all + attributes). + * `normalizeAttributes` {Function} Function to normalize attribute + objects before grouping. + * `attributeKey` {Function} Custom function to derive the grouping key + from an attributes object. + * `cardinalityLimit` {number} Maximum unique attribute combinations when + `groupByAttributes` is enabled. Oldest entries are evicted for + `'delta'` temporality; new entries are dropped for `'cumulative'` + temporality. A `'MetricsWarning'` is emitted on first limit hit. + **Default:** `2000`. + * `exemplar` {Object} Exemplar sampler. Must implement three methods: + `sample(value, timestamp, attributes)` called on each recorded value, + `getExemplars()` returning an {Exemplar\[]} array, and `reset()` called + after collection to clear stored exemplars. See [`ReservoirSampler`][] + and [`BoundarySampler`][] for built-in implementations. +* Returns: {Consumer} + +Creates a consumer that aggregates metric values. The consumer subscribes to +existing metrics immediately and to metrics created later that match its +configuration. A wildcard consumer (no `metrics` key, or empty `metrics`) +subscribes to all current and future metrics. + +Config keys that are not reserved (`defaultAggregation`, `defaultTemporality`, +`groupByAttributes`, `metrics`) are treated as metric names in the shorthand +format: ```mjs import { metrics } from 'node:perf_hooks'; -const { createGauge } = metrics; -import { memoryUsage } from 'node:process'; -const memory = createGauge('memory.usage.bytes'); +// Shorthand: top-level keys are metric names +const consumer = metrics.createConsumer({ + 'http.requests': { aggregation: 'sum' }, + 'http.duration': { aggregation: 'histogram' }, +}); + +// Explicit: nested under 'metrics' key +const consumer2 = metrics.createConsumer({ + defaultAggregation: 'sum', + groupByAttributes: true, + metrics: { + 'http.requests': { aggregation: 'sum' }, + 'http.duration': { aggregation: 'histogram' }, + }, +}); + +// Wildcard: subscribes to all metrics +const wildcard = metrics.createConsumer(); -memory.reset(memoryUsage().heapUsed); +consumer.close(); +consumer2.close(); +wildcard.close(); ``` -### `metrics.createPullGauge(name, fn[, meta])` +```cjs +const { metrics } = require('node:perf_hooks'); + +const consumer = metrics.createConsumer({ + 'http.requests': { aggregation: 'sum' }, + 'http.duration': { aggregation: 'histogram' }, +}); + +const consumer2 = metrics.createConsumer({ + defaultAggregation: 'sum', + groupByAttributes: true, + metrics: { + 'http.requests': { aggregation: 'sum' }, + 'http.duration': { aggregation: 'histogram' }, + }, +}); + +const wildcard = metrics.createConsumer(); + +consumer.close(); +consumer2.close(); +wildcard.close(); +``` + +#### `metrics.createDiagnosticsChannelConsumer()` -* `name` {string} The name of the pull gauge metric. -* `fn` {Function} A function that returns the current value. -* `meta` {Object} Optional metadata to attach to all reports. -* Returns: {metrics.PullGauge} +* Returns: {Object} -Creates a gauge that samples a value on-demand by calling the provided function. +Creates a singleton consumer that forwards all metric values to +`node:diagnostics_channel`. Each metric publishes to a channel named +`metrics:{name}`. Values are only published when the channel has active +subscribers. + +Calling this function again after the first call returns the same instance. +The returned object has `collect()` (to sample observable metrics) and +`close()` methods. ```mjs +import diagnosticsChannel from 'node:diagnostics_channel'; import { metrics } from 'node:perf_hooks'; -const { createPullGauge } = metrics; -import { cpuUsage } from 'node:process'; -const cpu = createPullGauge('cpu.usage', () => { - return cpuUsage().user; +diagnosticsChannel.subscribe('metrics:http.requests', (msg) => { + // msg.descriptor, msg.value, msg.attributes, msg.timestamp + console.log(msg.value, msg.attributes); }); -// Sample the gauge when needed -cpu.sample(); +metrics.createDiagnosticsChannelConsumer(); + +const m = metrics.create('http.requests'); +m.record(1, { method: 'GET' }); // Published to the channel immediately ``` -## Classes +```cjs +const diagnosticsChannel = require('node:diagnostics_channel'); +const { metrics } = require('node:perf_hooks'); + +diagnosticsChannel.subscribe('metrics:http.requests', (msg) => { + console.log(msg.value, msg.attributes); +}); + +metrics.createDiagnosticsChannelConsumer(); + +const m = metrics.create('http.requests'); +m.record(1, { method: 'GET' }); +``` -### Class: `MetricReport` +#### `metrics.get(name)` -Represents a single metric measurement. +* `name` {string} +* Returns: {Metric|undefined} + +Returns the registered metric with the given name, or `undefined` if not +found. -#### `metricReport.type` +#### `metrics.list()` -* {string} +* Returns: {Metric\[]} -The type of the metric (e.g., 'counter', 'gauge', 'pullGauge', -'timer'). +Returns an array of all currently registered metrics. -#### `metricReport.name` +#### `metrics.diagnosticsChannelConsumer` -* {string} +* {Object|null} -The name of the metric. +The diagnostics channel consumer singleton. `null` if +[`metrics.createDiagnosticsChannelConsumer()`][] has not been called. -#### `metricReport.value` +### Class: `Metric` -* {number} - -The numeric value of the measurement. +A metric records values and dispatches them immediately to all subscribed +consumers. Metrics are created with [`metrics.create()`][], which implements a +singleton pattern — creating a metric with an already-registered name returns +the existing instance. -#### `metricReport.meta` +#### `metric.descriptor` -* {Object} +* {MetricDescriptor} -Additional metadata associated with the measurement. +The immutable descriptor for this metric. -#### `metricReport.time` +#### `metric.isObservable` -* {number} +* {boolean} -The `performance.now()` timestamp when the measurement was recorded in -milliseconds since `performance.timeOrigin`. +`true` if this metric has an observable callback. -### Class: `Metric` +#### `metric.isClosed` -Manages the lifecycle of a metric channel and provides methods for reporting -values to it. Each metric type holds a `Metric` instance which it reports to. +* {boolean} + +`true` if this metric has been closed. -#### `metric.type` +#### `metric.record(value[, attributes[, timestamp]])` -* {string} +* `value` {number|bigint} The value to record. +* `attributes` {Object} Attributes for this value. **Default:** `{}`. +* `timestamp` {number} Optional timestamp override. + +Records a value and dispatches it to all subscribed consumers. Validation is +always performed. If there are no subscribers, the value is not stored. + +```mjs +import { metrics } from 'node:perf_hooks'; + +const m = metrics.create('db.query.duration', { unit: 'ms' }); + +m.record(12.5, { db: 'postgres', operation: 'SELECT' }); +m.record(3n); // BigInt values are supported +m.record(0.5); +``` + +```cjs +const { metrics } = require('node:perf_hooks'); -The type of the metric (e.g., 'counter', 'gauge', 'pullGauge', -'timer'). +const m = metrics.create('db.query.duration', { unit: 'ms' }); -#### `metric.name` +m.record(12.5, { db: 'postgres', operation: 'SELECT' }); +m.record(3n); +m.record(0.5); +``` + +#### `metric.startTimer([attributes])` -* {string} +* `attributes` {Object} Attributes to include with the recorded duration. + **Default:** `{}`. +* Returns: {Timer} + +Creates a {Timer} that records its duration to this metric when stopped. +Timers are pooled for reuse to reduce garbage collection pressure. -The name of the metric. +```mjs +import { metrics } from 'node:perf_hooks'; -#### `metric.meta` +const duration = metrics.create('http.request.duration', { unit: 'ms' }); + +// Manual stop +const timer = duration.startTimer({ route: '/api/users' }); +// ... handle request ... +const ms = timer.stop(); // Records duration and returns it + +// Automatic stop using `using` +{ + using t = duration.startTimer({ route: '/api/orders' }); + // ... handle request ... + // Timer is stopped automatically at end of block +} +``` + +```cjs +const { metrics } = require('node:perf_hooks'); + +const duration = metrics.create('http.request.duration', { unit: 'ms' }); + +const timer = duration.startTimer({ route: '/api/users' }); +// ... handle request ... +const ms = timer.stop(); +``` + +#### `metric.close()` -* {Object} +Unregisters the metric from the global registry and notifies all consumers +via [`consumer.onMetricClosed()`][]. After closing, `record()` calls are +silently ignored (but still validated) and consumers receive no further values. +Calling `close()` multiple times is safe. After closing, a new metric can be +created with the same name. + +### Class: `Timer` + + -Additional metadata associated with the metric. +A helper for measuring durations. Obtained via [`metric.startTimer()`][]. -#### `metric.channelName` +#### `timer.startTime` -* {string} +* {number} -The name of the diagnostics\_channel used for this metric. +The start time in milliseconds from `performance.now()`. -#### `metric.channel` +#### `timer.stop()` -* {Channel} +* Returns: {number} The duration in milliseconds. -The diagnostics channel instance used for this metric. +Stops the timer and records the duration to the associated metric. Throws +`ERR_INVALID_STATE` if called after the timer has already been stopped. -#### `metric.shouldReport` +#### `timer[Symbol.dispose]()` -* {boolean} +Stops the timer if it has not already been stopped. Enables `using` syntax +for automatic cleanup. + +### Class: `Consumer` + + -Indicates whether the metric should report values. This can be used to -conditionally enable or disable value preparation work. +Aggregates metric values using a subscriber model. Each consumer subscribes +directly to metrics at subscription time, eliminating per-value identity +lookups. Consumers are created with [`metrics.createConsumer()`][]. -#### `metric.report(value[, meta])` +#### `consumer.collect()` -* `value` {number} The value to report. -* `meta` {Object} Additional metadata for this report. -* Returns: {metrics.MetricReport} +* Returns: {Object\[]} + +Collects all metrics and returns an array of metric snapshots. Observable +metrics are sampled during this call, each receiving only its own subscriber +to maintain consumer isolation. -Reports a value for the metric, creating a `MetricReport` instance. -This bypasses the metric type specific methods, allowing direct reporting -to a channel. +Each snapshot is an object with: -Generally this method should not be used directly. Instead, use the -specific methods provided by each metric type (e.g., `increment`, `reset`, -`mark`, etc.) which internally call this method with the appropriate value and -metadata. +* `descriptor` {Object} The metric descriptor (`name`, `unit`, `description`, + `scope`). +* `temporality` {string} `'cumulative'` or `'delta'`. +* `timestamp` {number} The collection timestamp. +* `startTime` {number} Start of the time window (`'delta'` temporality only). +* `dataPoints` {Object\[]} Array of aggregated data points. Each has an + `attributes` property plus aggregation-specific fields (see + [`metrics.createConsumer()`][]). + +Snapshots with no data points are omitted. For `'delta'` temporality, +subscriber state is reset after collection. ```mjs import { metrics } from 'node:perf_hooks'; -const { createGauge } = metrics; -const memoryUsage = gauge('memory.usage', { service: 'web' }); +const m = metrics.create('app.requests'); +const consumer = metrics.createConsumer({ + groupByAttributes: true, + metrics: { 'app.requests': { aggregation: 'sum' } }, +}); + +m.record(1, { status: 200 }); +m.record(1, { status: 404 }); + +const snapshot = consumer.collect(); +console.log(snapshot[0].dataPoints); +// [ +// { sum: 1, count: 1, attributes: { status: 200 } }, +// { sum: 1, count: 1, attributes: { status: 404 } }, +// ] -memoryUsage.report(85); // Reports a value of 85 -memoryUsage.report(90, { threshold: 'warning' }); // Reports 90 with metadata +consumer.close(); +m.close(); ``` -### Class: `Counter` +```cjs +const { metrics } = require('node:perf_hooks'); + +const m = metrics.create('app.requests'); +const consumer = metrics.createConsumer({ + groupByAttributes: true, + metrics: { 'app.requests': { aggregation: 'sum' } }, +}); + +m.record(1, { status: 200 }); +m.record(1, { status: 404 }); + +const snapshot = consumer.collect(); +console.log(snapshot[0].dataPoints); + +consumer.close(); +m.close(); +``` -* Extends: {metrics.Metric} +#### `consumer.autoCollect(interval, callback)` -A metric that only increases or decreases. +* `interval` {number} Collection interval in milliseconds. +* `callback` {Function} Called with the snapshot array from each collection. +* Returns: {Function} A stop function that cancels auto-collection. + +Starts periodic automatic collection. The underlying timer is unref'd so it +does not keep the process alive. Throws `ERR_INVALID_STATE` if auto-collection +is already active or if the consumer is closed. + +```mjs +import { metrics } from 'node:perf_hooks'; + +const consumer = metrics.createConsumer(); + +const stop = consumer.autoCollect(10_000, (snapshot) => { + // Called every 10 seconds with the collected snapshot + for (const metric of snapshot) { + console.log(metric.descriptor.name, metric.dataPoints); + } +}); + +// Later, cancel periodic collection +stop(); +consumer.close(); +``` + +```cjs +const { metrics } = require('node:perf_hooks'); + +const consumer = metrics.createConsumer(); + +const stop = consumer.autoCollect(10_000, (snapshot) => { + for (const metric of snapshot) { + console.log(metric.descriptor.name, metric.dataPoints); + } +}); + +stop(); +consumer.close(); +``` -#### `counter.value` +#### `consumer.close()` -* {number} - -The current value of the counter. +Closes the consumer: stops auto-collection, unsubscribes from all metrics, +and unregisters from the global registry. Safe to call multiple times. -#### `counter.increment([n[, meta]])` +#### `consumer.onMetricCreated(metric)` -* `n` {number} The amount to increment. **Default:** `1` -* `meta` {Object} Additional metadata for this report. +* `metric` {Metric} -Increments the counter by the specified amount. +Called by the registry when a new metric is created. Override to observe +metric creation events. The default implementation subscribes to the metric +if it matches the consumer's configuration. -```mjs -import { metrics } from 'node:perf_hooks'; -const { createCounter } = metrics; +#### `consumer.onMetricClosed(metric)` -const apiCalls = createCounter('api.calls', { service: 'web' }); + -apiCalls.increment(); // Increment by 1 -apiCalls.increment(5); // Increment by 5 -apiCalls.increment(10, { endpoint: '/api/users' }); // Increment by 10 with metadata -apiCalls.increment({ endpoint: '/api/orders' }); // Increment by 1 with metadata -``` +* `metric` {Metric} -#### `counter.decrement([n[, meta]])` +Called by the registry when a metric is closed. Override to observe metric +closure events. The default implementation cleans up the consumer's +subscriptions for that metric. + +### Class: `MetricDescriptor` -* `n` {number} The amount to decrement. **Default:** `1` -* `meta` {Object} Additional metadata for this report. +An immutable descriptor for a metric. Created once per metric and reused. +Consumers can use reference equality (`===`) for fast comparisons. Obtained +via [`metric.descriptor`][]. -Decrements the counter by the specified amount. +#### `metricDescriptor.name` -```mjs -import { metrics } from 'node:perf_hooks'; -const { createCounter } = metrics; + -const errorCount = createCounter('errors.total', { component: 'database' }); +* {string} -errorCount.decrement(); // Decrement by 1 -errorCount.decrement(3); // Decrement by 3 -errorCount.decrement(2, { errorType: 'timeout' }); // Decrement by 2 with metadata -errorCount.decrement({ errorType: 'timeout' }); // Decrement by 1 with metadata -``` +The metric name. -#### `counter.createTimer([meta])` +#### `metricDescriptor.unit` -* `meta` {Object} Additional metadata to include with the report. -* Returns: {Timer} +* {string|undefined} -Creates a timer that will increment this counter with its duration when stopped. +The unit of measurement. -```mjs -import { metrics } from 'node:perf_hooks'; -const { createCounter } = metrics; +#### `metricDescriptor.description` -const requestDuration = createCounter('request.duration.ms'); + -const timer = requestDuration.createTimer({ endpoint: '/api/users' }); -// Process request... -const duration = timer.stop(); // Counter is incremented with duration -``` +* {string|undefined} -### Class: `Gauge` +The human-readable description. -* Extends: {metrics.Metric} +#### `metricDescriptor.scope` -A metric representing a single value that can go up or down. +* {InstrumentationScope|undefined} + +The instrumentation scope. -#### `gauge.value` +#### `metricDescriptor.channel` -* {number} +* {Channel} -The current value of the metric. +The `node:diagnostics_channel` channel for this metric (lazily created). The +channel name is `metrics:{name}`. -#### `gauge.reset([value[, meta]])` +#### `metricDescriptor.toJSON()` -* `value` {number} The new value. **Default:** `0` -* `meta` {Object} Additional metadata for this report. +* Returns: {Object} -Sets the gauge to a specific value and reports it. +Returns a plain object representation suitable for JSON serialization. -```mjs -import { metrics } from 'node:perf_hooks'; -const { createGauge } = metrics; -import { memoryUsage } from 'node:process'; +### Class: `InstrumentationScope` -const memory = createGauge('memory.usage.bytes'); + -memory.reset(); // Reset to 0 -memory.reset(memoryUsage().heapUsed); // Set to current memory usage -memory.reset(1024, { source: 'system' }); // Set to 1024 with metadata -``` +Identifies the library or module producing metrics. Corresponds to the +[OpenTelemetry Instrumentation Scope][] concept. -#### `gauge.createTimer([meta])` +#### `new InstrumentationScope(name[, version[, schemaUrl]])` -* `meta` {Object} Additional metadata to include with the report. -* Returns: {Timer} +* `name` {string} The scope name (e.g., package name). +* `version` {string} The scope version. +* `schemaUrl` {string} The schema URL. -Creates a timer that will set this gauge to its duration when stopped. +#### `instrumentationScope.name` -```mjs -import { metrics } from 'node:perf_hooks'; -const { createGauge } = metrics; + -const responseTime = createGauge('response.time.ms'); +* {string} -const timer = responseTime.createTimer({ endpoint: '/api/users' }); -// Process request... -const duration = timer.stop(); // Gauge is set to duration -``` +#### `instrumentationScope.version` -### Class: `Timer` + + +* {string|undefined} + +#### `instrumentationScope.schemaUrl` -A helper for measuring durations that reports the elapsed time via a callback when stopped. +* {string|undefined} -#### `timer.start` +#### `instrumentationScope.toJSON()` -* {number} +* Returns: {Object} + +### Class: `Exemplar` + + -The start time of the timer (milliseconds since `performance.timeOrigin`). This property is read-only. +A sample measurement with trace context, used to correlate metric data points +with distributed traces. See the [OpenTelemetry Exemplars][] specification. +`Exemplar` instances are returned by [`reservoirSampler.getExemplars()`][] and +[`boundarySampler.getExemplars()`][]. -#### `timer.end` +#### `exemplar.value` -* {number|undefined} +* {number|bigint} + +#### `exemplar.timestamp` + + -The end time of the timer (milliseconds since `performance.timeOrigin`). `undefined` if timer is still running. -This property is read-only. +* {number} -#### `timer.duration` +#### `exemplar.traceId` -* {number|undefined} +* {string} + +#### `exemplar.spanId` -The duration in milliseconds. `undefined` if timer is still running. This property is read-only. + -#### `timer.stop()` +* {string} + +#### `exemplar.filteredAttributes` -* Returns: {number} The duration in milliseconds. +* {Object} -Stops the timer and reports the duration. Can only be called once. +#### `exemplar.toJSON()` -```mjs -import { metrics } from 'node:perf_hooks'; -const { createCounter } = metrics; + -const dbQueryDuration = createCounter('db.query.duration'); +* Returns: {Object} -const t = dbQueryDuration.createTimer({ query: 'SELECT * FROM users' }); +### Class: `ReservoirSampler` -// Perform database query... + -// Stop the timer and get the duration -const duration = t.stop(); // Returns duration in milliseconds -``` +An exemplar sampler using reservoir sampling (Algorithm R). Maintains a +fixed-size random sample of exemplars over the collection window. -#### `timer[Symbol.dispose]()` +#### `new metrics.ReservoirSampler(maxExemplars, extract)` -Allows `using` syntax to automatically stop the timer when done. +* `maxExemplars` {number} Maximum number of exemplars to retain. Must be + at least `1`. +* `extract` {Function} Called with the recording attributes to extract trace + context. Must return an object with `traceId`, `spanId`, and + `filteredAttributes` properties, or `null` to skip sampling. ```mjs import { metrics } from 'node:perf_hooks'; -const { createCounter } = metrics; -const dbQueryDuration = createCounter('db.query.duration'); - -{ - using t = dbQueryDuration.createTimer({ query: 'SELECT * FROM users' }); - // Perform database query... +const { ReservoirSampler } = metrics; - // Timer is automatically stopped here +function extractTraceContext(attributes) { + if (!attributes.traceId || !attributes.spanId) return null; + const { traceId, spanId, ...filteredAttributes } = attributes; + return { traceId, spanId, filteredAttributes }; } -``` -### Class: `PullGauge` +const sampler = new ReservoirSampler(10, extractTraceContext); + +const consumer = metrics.createConsumer({ + metrics: { + 'http.request.duration': { + aggregation: 'histogram', + exemplar: sampler, + }, + }, +}); +``` -* Extends: {metrics.Metric} +#### `reservoirSampler.sample(value, timestamp, attributes)` -A gauge that samples values on-demand when the `sample()` method is called. +* `value` {number|bigint} +* `timestamp` {number} +* `attributes` {Object} -#### `pullGauge.sample([meta])` +Records a candidate exemplar. If the reservoir is not full, the sample is +added directly. Otherwise, it randomly replaces an existing sample with +decreasing probability (Algorithm R). + +#### `reservoirSampler.getExemplars()` -* `meta` {Object} Additional metadata for this specific sample. -* Returns: {number} The sampled value. +* Returns: {Exemplar\[]} -Calls the configured function to get the current value and reports it. +Returns the current set of sampled exemplars. -```mjs -import { metrics } from 'node:perf_hooks'; -const { createPullGauge } = metrics; -import { cpuUsage } from 'node:process'; +#### `reservoirSampler.reset()` -const cpu = createPullGauge('cpu.usage', () => { - return cpuUsage().user; -}); + -// Sample the gauge when needed -const value = cpu.sample(); -console.log(`Current CPU usage: ${value}`); +Clears all exemplars and resets the sample count. Called automatically after +`'delta'` temporality collection. -// Sample with additional metadata -cpu.sample({ threshold: 'high' }); -``` +### Class: `BoundarySampler` -## Integration with Diagnostics Channel + + +An exemplar sampler that maintains one exemplar per histogram bucket boundary. +Suitable for use with `'histogram'` aggregation when you want one representative +trace per bucket. + +#### `new metrics.BoundarySampler(boundaries, extract)` + + -All metrics publish their reports through `node:diagnostics_channel`. The channel -name format is `metrics:{type}:{name}` where `{type}` is the metric type and -`{name}` is the metric name. +* `boundaries` {number\[]} Histogram bucket boundaries. Should match the + `boundaries` option passed to the consumer for the same metric. +* `extract` {Function} Called with the recording attributes to extract trace + context. Must return an object with `traceId`, `spanId`, and + `filteredAttributes` properties, or `null` to skip sampling. ```mjs -import { subscribe } from 'node:diagnostics_channel'; +import { metrics } from 'node:perf_hooks'; -// Subscribe to a specific metric -subscribe('metrics:counter:api.calls', (report) => { - console.log(`API calls: ${report.value}`); -}); -``` +const { BoundarySampler } = metrics; -```cjs -const { subscribe } = require('node:diagnostics_channel'); +function extractTraceContext(attributes) { + if (!attributes.traceId || !attributes.spanId) return null; + const { traceId, spanId, ...filteredAttributes } = attributes; + return { traceId, spanId, filteredAttributes }; +} -subscribe('metrics:counter:api.calls', (report) => { - console.log(`API calls: ${report.value}`); +const boundaries = [10, 50, 100, 500]; +const sampler = new BoundarySampler(boundaries, extractTraceContext); + +const consumer = metrics.createConsumer({ + metrics: { + 'http.request.duration': { + aggregation: 'histogram', + boundaries, + exemplar: sampler, + }, + }, }); ``` -Additionally there is a specialized channel `metrics:new` which publishes any -newly created metrics, allowing subcribing to all metrics without needing to -know their names in advance. +#### `boundarySampler.sample(value, timestamp, attributes)` -```mjs -import { subscribe } from 'node:diagnostics_channel'; + -subscribe('metrics:new', (metric) => { - console.log(`New metric created: ${metric.type} - ${metric.name}`); -}); -``` +* `value` {number|bigint} +* `timestamp` {number} +* `attributes` {Object} -```cjs -const { subscribe } = require('node:diagnostics_channel'); +Records a candidate exemplar into the bucket corresponding to `value`. Each +bucket retains only its most recently sampled exemplar. -subscribe('metrics:new', (metric) => { - console.log(`New metric created: ${metric.type} - ${metric.name}`); -}); -``` +#### `boundarySampler.getExemplars()` + + + +* Returns: {Exemplar\[]} -## Best Practices +Returns exemplars for all populated buckets. -1. **Naming Conventions**: Use dot-separated hierarchical names (e.g., `http.requests.total`). +#### `boundarySampler.reset()` + + -2. **Metadata**: Use metadata to add dimensions to your metrics without creating separate metric instances. +Clears all stored exemplars. Called automatically after `'delta'` temporality +collection. -3. **Performance**: Metric types are designed to be lightweight. However, avoid - creating metric types in hot code paths. As with diagnostics\_channel, metric - creation is optimized for capture time performance by moving costly - operations to metric type creation time. +[OpenTelemetry Exemplars]: https://opentelemetry.io/docs/specs/otel/metrics/data-model/#exemplars +[OpenTelemetry Instrumentation Scope]: https://opentelemetry.io/docs/specs/otel/glossary/#instrumentation-scope +[`BoundarySampler`]: #class-boundarysampler +[`MetricDescriptor`]: #class-metricdescriptor +[`ReservoirSampler`]: #class-reservoirsampler +[`boundarySampler.getExemplars()`]: #boundarysamplergetexemplars +[`consumer.collect()`]: #consumercollect +[`consumer.onMetricClosed()`]: #consumeronmetricclosedmetric +[`metric.descriptor`]: #metricdescriptor +[`metric.startTimer()`]: #metricstarttimerattributes +[`metrics.create()`]: #metricscreatenameoptions +[`metrics.createConsumer()`]: #metricscreateConsumerconfig +[`metrics.createDiagnosticsChannelConsumer()`]: #metricscreatediagnosticschannelconsumer +[`reservoirSampler.getExemplars()`]: #reservoirsamplergetexemplars diff --git a/lib/diagnostics_channel.js b/lib/diagnostics_channel.js index 743a177e3c485e..c7264b9ac2c5a0 100644 --- a/lib/diagnostics_channel.js +++ b/lib/diagnostics_channel.js @@ -9,10 +9,12 @@ const { ArrayPrototypeSplice, ObjectDefineProperty, ObjectGetPrototypeOf, + ObjectKeys, ObjectSetPrototypeOf, ReflectApply, SafeFinalizationRegistry, SafeMap, + SymbolDispose, SymbolHasInstance, } = primordials; @@ -80,24 +82,45 @@ function maybeMarkInactive(channel) { } } -function defaultTransform(data) { - return data; -} +class RunStoresScope { + #stack; + + constructor(activeChannel, data) { + // eslint-disable-next-line no-restricted-globals + using stack = new DisposableStack(); + + // Enter stores using withScope + if (activeChannel._stores) { + for (const entry of activeChannel._stores.entries()) { + const store = entry[0]; + const transform = entry[1]; + + let newContext = data; + if (transform) { + try { + newContext = transform(data); + } catch (err) { + process.nextTick(() => { + triggerUncaughtException(err, false); + }); + continue; + } + } -function wrapStoreRun(store, data, next, transform = defaultTransform) { - return () => { - let context; - try { - context = transform(data); - } catch (err) { - process.nextTick(() => { - triggerUncaughtException(err, false); - }); - return next(); + stack.use(store.withScope(newContext)); + } } - return store.run(context, next); - }; + // Publish data + activeChannel.publish(data); + + // Transfer ownership of the stack + this.#stack = stack.move(); + } + + [SymbolDispose]() { + this.#stack[SymbolDispose](); + } } // TODO(qard): should there be a C++ channel interface? @@ -167,19 +190,14 @@ class ActiveChannel { } } - runStores(data, fn, thisArg, ...args) { - let run = () => { - this.publish(data); - return ReflectApply(fn, thisArg, args); - }; - - for (const entry of this._stores.entries()) { - const store = entry[0]; - const transform = entry[1]; - run = wrapStoreRun(store, data, run, transform); - } + withStoreScope(data) { + return new RunStoresScope(this, data); + } - return run(); + runStores(data, fn, thisArg, ...args) { + // eslint-disable-next-line no-unused-vars + using scope = this.withStoreScope(data); + return ReflectApply(fn, thisArg, args); } } @@ -228,14 +246,17 @@ class Channel { runStores(data, fn, thisArg, ...args) { return ReflectApply(fn, thisArg, args); } + + withStoreScope() { + // Return no-op disposable for inactive channels + return { + [SymbolDispose]() {}, + }; + } } const channels = new WeakRefMap(); -function hasChannel(name) { - return channels.has(name); -} - function channel(name) { const channel = channels.get(name); if (channel) return channel; @@ -262,12 +283,9 @@ function hasSubscribers(name) { return channel.hasSubscribers; } -const traceEvents = [ +const windowEvents = [ 'start', 'end', - 'asyncStart', - 'asyncEnd', - 'error', ]; function assertChannel(value, name) { @@ -276,7 +294,7 @@ function assertChannel(value, name) { } } -function tracingChannelFrom(nameOrChannels, name) { +function channelFromMap(nameOrChannels, name, className) { if (typeof nameOrChannels === 'string') { return channel(`tracing:${nameOrChannels}:${name}`); } @@ -288,7 +306,7 @@ function tracingChannelFrom(nameOrChannels, name) { } throw new ERR_INVALID_ARG_TYPE('nameOrChannels', - ['string', 'object', 'TracingChannel'], + ['string', 'object', className], nameOrChannels); } @@ -297,28 +315,66 @@ function emitNonThenableWarning(fn) { 'which returned a non-thenable.'); } -class TracingChannel { +class WindowChannelScope { + #context; + #end; + #scope; + + constructor(windowChannel, context) { + // Only proceed if there are subscribers + if (!windowChannel.hasSubscribers) { + return; + } + + const { start, end } = windowChannel; + this.#context = context; + this.#end = end; + + // Use RunStoresScope for the start channel + this.#scope = new RunStoresScope(start, context); + } + + [SymbolDispose]() { + if (!this.#scope) { + return; + } + + // Clear scope first to make idempotent + const scope = this.#scope; + this.#scope = undefined; + + // Copy all enumerable own properties back to context + for (const key of ObjectKeys(this)) { + this.#context[key] = this[key]; + } + + // Publish end event + this.#end.publish(this.#context); + + // Dispose the start scope to restore stores + scope[SymbolDispose](); + } +} + +class WindowChannel { constructor(nameOrChannels) { - for (let i = 0; i < traceEvents.length; ++i) { - const eventName = traceEvents[i]; + for (let i = 0; i < windowEvents.length; ++i) { + const eventName = windowEvents[i]; ObjectDefineProperty(this, eventName, { __proto__: null, - value: tracingChannelFrom(nameOrChannels, eventName), + value: channelFromMap(nameOrChannels, eventName, 'WindowChannel'), }); } } get hasSubscribers() { return this.start?.hasSubscribers || - this.end?.hasSubscribers || - this.asyncStart?.hasSubscribers || - this.asyncEnd?.hasSubscribers || - this.error?.hasSubscribers; + this.end?.hasSubscribers; } subscribe(handlers) { - for (let i = 0; i < traceEvents.length; ++i) { - const name = traceEvents[i]; + for (let i = 0; i < windowEvents.length; ++i) { + const name = windowEvents[i]; if (!handlers[name]) continue; this[name]?.subscribe(handlers[name]); @@ -328,8 +384,8 @@ class TracingChannel { unsubscribe(handlers) { let done = true; - for (let i = 0; i < traceEvents.length; ++i) { - const name = traceEvents[i]; + for (let i = 0; i < windowEvents.length; ++i) { + const name = windowEvents[i]; if (!handlers[name]) continue; if (!this[name]?.unsubscribe(handlers[name])) { @@ -340,26 +396,150 @@ class TracingChannel { return done; } + withScope(context = {}) { + return new WindowChannelScope(this, context); + } + + run(context, fn, thisArg, ...args) { + context ??= {}; + // eslint-disable-next-line no-unused-vars + using scope = this.withScope(context); + const result = ReflectApply(fn, thisArg, args); + context.result = result; + return result; + } +} + +function windowChannel(nameOrChannels) { + return new WindowChannel(nameOrChannels); +} + +class TracingChannel { + #callWindow; + #continuationWindow; + + constructor(nameOrChannels) { + // Create a WindowChannel for start/end (call window) + if (typeof nameOrChannels === 'string') { + this.#callWindow = new WindowChannel(nameOrChannels); + this.#continuationWindow = new WindowChannel({ + start: channel(`tracing:${nameOrChannels}:asyncStart`), + end: channel(`tracing:${nameOrChannels}:asyncEnd`), + }); + } else if (typeof nameOrChannels === 'object') { + this.#callWindow = new WindowChannel({ + start: nameOrChannels.start, + end: nameOrChannels.end, + }); + this.#continuationWindow = new WindowChannel({ + start: nameOrChannels.asyncStart, + end: nameOrChannels.asyncEnd, + }); + } + + // Create individual channel for error + ObjectDefineProperty(this, 'error', { + __proto__: null, + value: channelFromMap(nameOrChannels, 'error', 'TracingChannel'), + }); + } + + get start() { + return this.#callWindow.start; + } + + get end() { + return this.#callWindow.end; + } + + get asyncStart() { + return this.#continuationWindow.start; + } + + get asyncEnd() { + return this.#continuationWindow.end; + } + + get hasSubscribers() { + return this.#callWindow.hasSubscribers || + this.#continuationWindow.hasSubscribers || + this.error?.hasSubscribers; + } + + subscribe(handlers) { + // Subscribe to call window (start/end) + if (handlers.start || handlers.end) { + this.#callWindow.subscribe({ + start: handlers.start, + end: handlers.end, + }); + } + + // Subscribe to continuation window (asyncStart/asyncEnd) + if (handlers.asyncStart || handlers.asyncEnd) { + this.#continuationWindow.subscribe({ + start: handlers.asyncStart, + end: handlers.asyncEnd, + }); + } + + // Subscribe to error channel + if (handlers.error) { + this.error.subscribe(handlers.error); + } + } + + unsubscribe(handlers) { + let done = true; + + // Unsubscribe from call window + if (handlers.start || handlers.end) { + if (!this.#callWindow.unsubscribe({ + start: handlers.start, + end: handlers.end, + })) { + done = false; + } + } + + // Unsubscribe from continuation window + if (handlers.asyncStart || handlers.asyncEnd) { + if (!this.#continuationWindow.unsubscribe({ + start: handlers.asyncStart, + end: handlers.asyncEnd, + })) { + done = false; + } + } + + // Unsubscribe from error channel + if (handlers.error) { + if (!this.error.unsubscribe(handlers.error)) { + done = false; + } + } + + return done; + } + traceSync(fn, context = {}, thisArg, ...args) { if (!this.hasSubscribers) { return ReflectApply(fn, thisArg, args); } - const { start, end, error } = this; + const { error } = this; - return start.runStores(context, () => { - try { - const result = ReflectApply(fn, thisArg, args); - context.result = result; - return result; - } catch (err) { - context.error = err; - error.publish(context); - throw err; - } finally { - end.publish(context); - } - }); + // eslint-disable-next-line no-unused-vars + using scope = this.#callWindow.withScope(context); + try { + const result = ReflectApply(fn, thisArg, args); + context.result = result; + return result; + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } } tracePromise(fn, context = {}, thisArg, ...args) { @@ -371,44 +551,42 @@ class TracingChannel { return result; } - const { start, end, asyncStart, asyncEnd, error } = this; + const { error } = this; + const continuationWindow = this.#continuationWindow; function reject(err) { context.error = err; error.publish(context); - asyncStart.publish(context); + // Just publish asyncStart/asyncEnd without running stores + continuationWindow.start.publish(context); // TODO: Is there a way to have asyncEnd _after_ the continuation? - asyncEnd.publish(context); - throw err; + continuationWindow.end.publish(context); + return PromiseReject(err); } function resolve(result) { context.result = result; - asyncStart.publish(context); + // Just publish asyncStart/asyncEnd without running stores + continuationWindow.start.publish(context); // TODO: Is there a way to have asyncEnd _after_ the continuation? - asyncEnd.publish(context); + continuationWindow.end.publish(context); return result; } - return start.runStores(context, () => { - try { - const result = ReflectApply(fn, thisArg, args); - // If the return value is not a thenable, then return it with a warning. - // Do not publish to asyncStart/asyncEnd. - if (typeof result?.then !== 'function') { - emitNonThenableWarning(fn); - context.result = result; - return result; - } - return result.then(resolve, reject); - } catch (err) { - context.error = err; - error.publish(context); - throw err; - } finally { - end.publish(context); + // eslint-disable-next-line no-unused-vars + using scope = this.#callWindow.withScope(context); + try { + let promise = ReflectApply(fn, thisArg, args); + // Convert thenables to native promises + if (!(promise instanceof Promise)) { + promise = PromiseResolve(promise); } - }); + return PromisePrototypeThen(promise, resolve, reject); + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } } traceCallback(fn, position = -1, context = {}, thisArg, ...args) { @@ -416,7 +594,8 @@ class TracingChannel { return ReflectApply(fn, thisArg, args); } - const { start, end, asyncStart, asyncEnd, error } = this; + const { error } = this; + const continuationWindow = this.#continuationWindow; function wrappedCallback(err, res) { if (err) { @@ -426,31 +605,25 @@ class TracingChannel { context.result = res; } - // Using runStores here enables manual context failure recovery - asyncStart.runStores(context, () => { - try { - return ReflectApply(callback, this, arguments); - } finally { - asyncEnd.publish(context); - } - }); + // Use continuation window for asyncStart/asyncEnd around callback + // eslint-disable-next-line no-unused-vars + using scope = continuationWindow.withScope(context); + return ReflectApply(callback, this, arguments); } const callback = ArrayPrototypeAt(args, position); validateFunction(callback, 'callback'); ArrayPrototypeSplice(args, position, 1, wrappedCallback); - return start.runStores(context, () => { - try { - return ReflectApply(fn, thisArg, args); - } catch (err) { - context.error = err; - error.publish(context); - throw err; - } finally { - end.publish(context); - } - }); + // eslint-disable-next-line no-unused-vars + using scope = this.#callWindow.withScope(context); + try { + return ReflectApply(fn, thisArg, args); + } catch (err) { + context.error = err; + error.publish(context); + throw err; + } } } @@ -461,11 +634,12 @@ function tracingChannel(nameOrChannels) { dc_binding.linkNativeChannel((name) => channel(name)); module.exports = { - hasChannel, channel, hasSubscribers, subscribe, tracingChannel, unsubscribe, + windowChannel, Channel, + WindowChannel, }; diff --git a/lib/internal/bootstrap/realm.js b/lib/internal/bootstrap/realm.js index 3b72209146705c..f49f0814bbc687 100644 --- a/lib/internal/bootstrap/realm.js +++ b/lib/internal/bootstrap/realm.js @@ -124,7 +124,6 @@ const legacyWrapperList = new SafeSet([ // beginning with "internal/". // Modules that can only be imported via the node: scheme. const schemelessBlockList = new SafeSet([ - 'metrics', 'sea', 'sqlite', 'quic', diff --git a/lib/internal/perf/metrics.js b/lib/internal/perf/metrics.js index 30c380ae480622..8687defaedebd2 100644 --- a/lib/internal/perf/metrics.js +++ b/lib/internal/perf/metrics.js @@ -1,440 +1,1868 @@ +'use strict'; + /** - * A metrics provider which reports to diagnostics_channel. + * This provides a flexible metrics core based on diagnostics_channel design + * principles. It is meant to do the minimum work required at each stage to + * enable maximal flexibility of the next. * - * # Metric Types - * - * - Counter: An increasing or decreasing value. - * - Gauge: A snapshot of a single value in time. - * - Timer: A duration in milliseconds. - * - PullGauge: A gauge which updates its value by calling a function when sampled. - * # TODO(qard): - * - Histograms - * - Distributions/Summaries + * It follows a three-layer architecture: + * 1. Value Producers - Generates singular values (direct, observable, timer) + * 2. Descriptors - Metric values are grouped through a descriptor + * 3. Consumers - Multi-tenant continuous aggregation of selected descriptors */ -'use strict'; - const { + Array, + ArrayFrom, + ArrayIsArray, + ArrayPrototypeIndexOf, + ArrayPrototypePush, + ArrayPrototypeSplice, + BigInt, + MathRandom, + MathRound, + Number, ObjectAssign, + ObjectDefineProperty, + ObjectKeys, + SafeMap, + SafeSet, + Symbol, SymbolDispose, } = primordials; const { codes: { + ERR_INVALID_ARG_TYPE, ERR_INVALID_ARG_VALUE, + ERR_INVALID_STATE, }, } = require('internal/errors'); const { - kValidateObjectAllowNullable, + kEmptyObject, +} = require('internal/util'); + +const { + validateFunction, validateNumber, validateObject, - validateOneOf, validateString, - validateFunction, } = require('internal/validators'); -const { channel, hasChannel } = require('diagnostics_channel'); const { performance } = require('internal/perf/performance'); +const { createHistogram } = require('internal/histogram'); +const { + setInterval, + clearInterval, +} = require('timers'); +const { triggerUncaughtException } = internalBinding('errors'); +const { ObjectIdentity } = require('internal/util/object_identity'); -const newMetricChannel = channel('metrics:new'); +// Private symbols for module-internal methods +const kReset = Symbol('kReset'); +const kAddSubscriber = Symbol('kAddSubscriber'); +const kRemoveSubscriber = Symbol('kRemoveSubscriber'); +const kEnableTimestamp = Symbol('kEnableTimestamp'); +const kDisableTimestamp = Symbol('kDisableTimestamp'); +const kAdd = Symbol('kAdd'); +const kRemove = Symbol('kRemove'); + +// ============================================================================= +// Attribute Identity for Performance +// ============================================================================= /** - * Represents a single reported metric. - * @typedef {MetricReport} MetricReport - * @property {string} type The type of metric (e.g., 'gauge', 'counter'). - * @property {string} name The name of the metric. - * @property {number} value The value of the metric. - * @property {object} [meta] Additional metadata to include with the report. - * @property {number} time The timestamp of the report in milliseconds since `performance.timeOrigin`. + * Shared ObjectIdentity instance for attribute hashing. + * Uses xxHash32 algorithm (industry standard, same as OpenTelemetry/Prometheus). + * Returns numeric hashes for efficient Map lookups. */ -class MetricReport { - #type; +const attributeIdentity = new ObjectIdentity({ + sortedKeysCacheSize: 1000, +}); + +// ============================================================================= +// Layer 2: Data Representation +// ============================================================================= + +/** + * InstrumentationScope identifies the library/module producing metrics. + * Immutable once created. + * See: https://opentelemetry.io/docs/specs/otel/glossary/#instrumentation-scope + */ +class InstrumentationScope { #name; - #value; - #meta; - #time; + #version; + #schemaUrl; + #json; - /** - * Constructs a new metric report. - * @param {string} type The type of metric (e.g., 'gauge', 'counter'). - * @param {string} name The name of the metric. - * @param {number} value The value of the metric. - * @param {object} [meta] Additional metadata to include with the report. - */ - constructor(type, name, value, meta) { - validateString(type, 'type'); + constructor(name, version, schemaUrl) { validateString(name, 'name'); - validateNumber(value, 'value'); - validateObject(meta, 'meta', kValidateObjectAllowNullable); - this.#type = type; + if (version !== undefined) validateString(version, 'version'); + if (schemaUrl !== undefined) validateString(schemaUrl, 'schemaUrl'); this.#name = name; + this.#version = version; + this.#schemaUrl = schemaUrl; + } + + get name() { + return this.#name; + } + + get version() { + return this.#version; + } + + get schemaUrl() { + return this.#schemaUrl; + } + + toJSON() { + this.#json ??= { + name: this.#name, + version: this.#version, + schemaUrl: this.#schemaUrl, + }; + return this.#json; + } +} + +/** + * Exemplar represents a sample measurement with trace context. + * Used to correlate metrics with distributed traces. + * See: https://opentelemetry.io/docs/specs/otel/metrics/data-model/#exemplars + */ +class Exemplar { + #value; + #timestamp; + #traceId; + #spanId; + #filteredAttributes; + + constructor(value, timestamp, traceId, spanId, filteredAttributes = kEmptyObject) { this.#value = value; - this.#meta = meta; - this.#time = performance.now(); + this.#timestamp = timestamp; + this.#traceId = traceId; + this.#spanId = spanId; + this.#filteredAttributes = filteredAttributes; } - get type() { - return this.#type; + get value() { + return this.#value; + } + + get timestamp() { + return this.#timestamp; + } + + get traceId() { + return this.#traceId; + } + + get spanId() { + return this.#spanId; + } + + get filteredAttributes() { + return this.#filteredAttributes; + } + + toJSON() { + return { + value: this.#value, + timestamp: this.#timestamp, + traceId: this.#traceId, + spanId: this.#spanId, + filteredAttributes: this.#filteredAttributes, + }; + } +} + +/** + * Immutable descriptor for a metric. Constructed once, reused for all values. + * Consumers can use reference equality (===) for fast comparisons. + */ +class MetricDescriptor { + #name; + #unit; + #description; + #scope; // InstrumentationScope + #channel; // Lazily created diagnostics_channel for this metric + #json; // Cached JSON representation + + constructor(name, unit, description, scope) { + this.#name = name; + this.#unit = unit; + this.#description = description; + this.#scope = scope; } get name() { return this.#name; } - get value() { - return this.#value; + get unit() { + return this.#unit; + } + + get description() { + return this.#description; + } + + get scope() { + return this.#scope; + } + + /** + * Get the diagnostics_channel for this metric (lazily created). + * Used by DiagnosticsChannelConsumer for efficient publishing. + */ + get channel() { + if (!this.#channel) { + const dc = require('diagnostics_channel'); + this.#channel = dc.channel(`metrics:${this.#name}`); + } + return this.#channel; + } + + toJSON() { + this.#json ??= { + name: this.#name, + unit: this.#unit, + description: this.#description, + scope: this.#scope?.toJSON(), + }; + return this.#json; + } +} + +// ============================================================================= +// Layer 1: Value Producers +// ============================================================================= + +// Timer pool for object reuse to reduce GC pressure +const timerPool = []; +const kMaxPoolSize = 100; + +/** + * A timer that measures duration and records it to a metric when stopped. + */ +class Timer { + #metric; + #attributes; + #startTime; + #stopped; + + /** + * @param {Metric} metric - The metric to record duration to + * @param {object} [attributes] - Attributes to include with the recorded value + */ + constructor(metric, attributes = kEmptyObject) { + validateObject(attributes, 'attributes'); + this.#metric = metric; + this.#attributes = attributes; + this.#startTime = performance.now(); + this.#stopped = false; } - get meta() { - return this.#meta; + get startTime() { + return this.#startTime; } - get time() { - return this.#time; + /** + * Stop the timer and record the duration to the metric. + * @returns {number} The duration in milliseconds + * @throws {ERR_INVALID_STATE} If timer is already stopped + */ + stop() { + if (this.#stopped) { + throw new ERR_INVALID_STATE('Timer has already been stopped'); + } + this.#stopped = true; + const endTime = performance.now(); + const duration = endTime - this.#startTime; + // Pass endTime as timestamp to avoid redundant now() call in record() + this.#metric.record(duration, this.#attributes, endTime); + // Return to pool for reuse + this.#returnToPool(); + return duration; + } + + /** + * Support `using` syntax to automatically stop the timer. + * Does not throw if already stopped. + */ + [SymbolDispose]() { + if (!this.#stopped) { + this.stop(); + } + } + + /** + * Reset the timer for reuse (internal only). + * @param {Metric} metric - The metric to record duration to + * @param {object} attributes - Attributes to include with the recorded value + */ + [kReset](metric, attributes) { + this.#metric = metric; + this.#attributes = attributes; + this.#startTime = performance.now(); + this.#stopped = false; + } + + /** + * Return timer to pool if space available (internal only). + */ + #returnToPool() { + if (timerPool.length < kMaxPoolSize) { + // Clear references to allow GC of metric/attributes + this.#metric = null; + this.#attributes = null; + ArrayPrototypePush(timerPool, this); + } } } +// ============================================================================= +// Layer 2: Data Representation +// ============================================================================= + /** - * Represents a metric which can be reported to. - * @typedef {Metric} Metric - * @property {string} type The type of metric (e.g., 'gauge', 'counter'). - * @property {string} name The name of the metric. - * @property {object} [meta] Additional metadata to include with the report. - * @property {string} channelName The name of the corresponding diagnostics_channel - * @property {DiagnosticsChannel} channel The corresponding diagnostics_channel - * @property {boolean} shouldReport If a MetricReport should be prepared + * A metric that records values. Values are immediately dispatched to subscribers. + * + * Each metric maintains its own subscriber list (like diagnostics_channel). + * This eliminates per-value identity lookups - subscribers are registered once + * and called directly on each value. */ class Metric { - #channel; - #type; - #name; - #meta; + #descriptor; + #observable; + #subscribers = []; // Direct subscriber list (like dc channel) + #makeTimestamp = (ts) => ts; // Default: pass through unchanged + #timestampConsumers = 0; // Reference count for timestamp-needing consumers + #closed = false; /** - * Constructs a new metric. - * @param {string} type The type of metric. - * @param {string} name The name of the metric. - * @param {object} [meta] Additional metadata to include with the metric. + * @param {string} name - The metric name + * @param {object} [options] + * @param {string} [options.unit] - The unit of measurement + * @param {string} [options.description] - Human-readable description + * @param {Function} [options.observable] - Callback for observable metrics + * @param {InstrumentationScope} [options.scope] - The instrumentation scope */ - constructor(type, name, meta) { - validateOneOf(type, 'type', [ 'gauge', 'counter', 'pullGauge' ]); + constructor(name, options = kEmptyObject) { validateString(name, 'name'); - validateObject(meta, 'meta', kValidateObjectAllowNullable); if (name === '') { throw new ERR_INVALID_ARG_VALUE('name', name, 'must not be empty'); } + validateObject(options, 'options'); - this.#type = type; - this.#name = name; - this.#meta = meta; + const { unit, description, observable, scope } = options; + if (unit !== undefined) validateString(unit, 'options.unit'); + if (description !== undefined) validateString(description, 'options.description'); + if (observable !== undefined) validateFunction(observable, 'options.observable'); + if (scope !== undefined && !(scope instanceof InstrumentationScope)) { + throw new ERR_INVALID_ARG_TYPE('options.scope', 'InstrumentationScope', scope); + } + + this.#descriptor = new MetricDescriptor(name, unit, description, scope); + this.#observable = observable; - // Before acquiring the channel, check if it already exists. - const exists = hasChannel(this.channelName); - this.#channel = channel(this.channelName); + // Add to global registry, notifying consumers + registry[kAdd](this); + } - // If the channel is new and there are new channel subscribers, - // publish the metric to the new metric channel. - if (!exists && newMetricChannel.hasSubscribers) { - newMetricChannel.publish(this); + get descriptor() { + return this.#descriptor; + } + + get isObservable() { + return this.#observable !== undefined; + } + + /** + * Record a value. Immediately dispatches to all subscribers. + * @param {number|bigint} value - The value to record + * @param {object} [attributes] - Additional attributes for this value + * @param {number} [timestamp] - Optional timestamp (defaults to now) + */ + record(value, attributes = kEmptyObject, timestamp) { + if (typeof value !== 'number' && typeof value !== 'bigint') { + throw new ERR_INVALID_ARG_TYPE('value', ['number', 'bigint'], value); + } + validateObject(attributes, 'attributes'); + + const subscribers = this.#subscribers; + const subCount = subscribers.length; + if (subCount === 0) return; + + timestamp = this.#makeTimestamp(timestamp); + + // Fast iteration - no lookups, only interested parties + for (let i = 0; i < subCount; i++) { + subscribers[i].onValue(value, timestamp, attributes); } } - get type() { - return this.#type; + /** + * Sample an observable metric. Called during collect(). + * Only dispatches to the specified subscriber to maintain consumer isolation. + * Observable callbacks receive a facade with `record()` and `descriptor`: + * + * ```js + * observable: (metric) => { + * metric.record(10, { cpu: 0 }); + * metric.record(20, { cpu: 1 }); + * } + * ``` + * + * Error handling: Observable callback errors are non-fatal. They emit a warning + * and continue with other observables. This follows the principle that a single + * misbehaving observable should not break the entire metrics collection. + * @param {MetricSubscriber} subscriber - The specific subscriber to receive values + * @returns {boolean} True if any values were sampled + */ + sample(subscriber) { + if (!this.#observable || this.#closed) return false; + + const timestamp = this.#makeTimestamp(undefined); + let hasValues = false; + const descriptor = this.#descriptor; + + // Facade passed to callback for multi-value reporting with per-consumer isolation + const facade = { + record(value, attributes = kEmptyObject) { + if (typeof value !== 'number' && typeof value !== 'bigint') { + throw new ERR_INVALID_ARG_TYPE('value', ['number', 'bigint'], value); + } + validateObject(attributes, 'attributes'); + hasValues = true; + subscriber.onValue(value, timestamp, attributes); + }, + get descriptor() { + return descriptor; + }, + }; + + try { + this.#observable(facade); + } catch (err) { + // Observable errors are non-fatal to the collection loop, but should + // still be surfaced via uncaughtException (like diagnostics_channel) + process.nextTick(() => { + triggerUncaughtException(err, false); + }); + return hasValues; + } + + return hasValues; } - get name() { - return this.#name; + /** + * Create a timer that records its duration to this metric when stopped. + * @param {object} [attributes] - Attributes to include with the recorded value + * @returns {Timer} + */ + startTimer(attributes = kEmptyObject) { + validateObject(attributes, 'attributes'); + // Try to reuse a pooled timer + if (timerPool.length > 0) { + const timer = timerPool.pop(); + timer[kReset](this, attributes); + return timer; + } + return new Timer(this, attributes); } - get meta() { - return this.#meta; + /** + * Add a subscriber to this metric (internal). + * @param {MetricSubscriber} subscriber - The subscriber to add + * @returns {Function} Unsubscribe function + */ + [kAddSubscriber](subscriber) { + ArrayPrototypePush(this.#subscribers, subscriber); + return () => this[kRemoveSubscriber](subscriber); } - get channelName() { - return `metrics:${this.type}:${this.name}`; + /** + * Remove a subscriber from this metric (internal). + * @param {MetricSubscriber} subscriber - The subscriber to remove + */ + [kRemoveSubscriber](subscriber) { + const idx = ArrayPrototypeIndexOf(this.#subscribers, subscriber); + if (idx !== -1) { + ArrayPrototypeSplice(this.#subscribers, idx, 1); + } } - get channel() { - return this.#channel; + /** + * Enable timestamp generation for this metric (internal). + * Called by consumers that need timestamps (lastValue, DC consumer). + * Uses reference counting to track how many consumers need timestamps. + */ + [kEnableTimestamp]() { + this.#timestampConsumers++; + if (this.#timestampConsumers === 1) { + this.#makeTimestamp = (ts) => ts ?? performance.now(); + } + } + + /** + * Disable timestamp generation for this metric (internal). + * Called when a consumer that needed timestamps is closed. + * When no consumers need timestamps, reverts to pass-through. + */ + [kDisableTimestamp]() { + this.#timestampConsumers--; + if (this.#timestampConsumers === 0) { + this.#makeTimestamp = (ts) => ts; + } } - get shouldReport() { - return this.#channel.hasSubscribers; + /** + * Check if this metric is closed. + * @returns {boolean} + */ + get isClosed() { + return this.#closed; } /** - * Report a value to the metric. - * @param {number} value The value to report. - * @param {object} [meta] Additional metadata to include with the report. + * Unregister this metric and notify all consumers. + * Use for lifecycle management when a metric is no longer needed. + * Consumers will be notified via onMetricClosed() so they can clean up. */ - report(value, meta) { - // Skip report construction if there are no subscribers. - if (!this.shouldReport) return; - const report = new MetricReport(this.type, this.name, value, - ObjectAssign({}, this.meta, meta)); - this.#channel.publish(report); + close() { + if (this.#closed) return; + this.#closed = true; + + // Notify registry (which notifies consumers) + registry[kRemove](this); + + // Clear subscribers + this.#subscribers = []; + this.#timestampConsumers = 0; + } +} + +// ============================================================================= +// Layer 3: Consumer Infrastructure +// ============================================================================= + +const kAggregatorSum = 'sum'; +const kAggregatorLastValue = 'lastValue'; +const kAggregatorHistogram = 'histogram'; +const kAggregatorSummary = 'summary'; + +/** + * Built-in aggregators + */ +const builtinAggregators = { + [kAggregatorSum]: { + createState(config) { + return { + sum: 0, + count: 0, + monotonic: config.monotonic ?? false, + }; + }, + aggregate(state, value) { + if (state.monotonic && value < 0) { + // Ignore negative values for monotonic sums + return; + } + if (typeof value === 'bigint') { + // Convert sum to bigint if needed + state.sum = BigInt(state.sum) + value; + } else if (typeof state.sum === 'bigint') { + // Sum is already bigint, convert value + state.sum += BigInt(value); + } else { + state.sum += value; + } + state.count++; + }, + finalize(state) { + return { + sum: state.sum, + count: state.count, + }; + }, + resetState(state) { + state.sum = 0; + state.count = 0; + }, + }, + + [kAggregatorLastValue]: { + needsTimestamp: true, + createState() { + return { + value: undefined, + timestamp: undefined, + }; + }, + aggregate(state, value, timestamp) { + state.value = value; + state.timestamp = timestamp; + }, + finalize(state) { + return { + value: state.value, + timestamp: state.timestamp, + }; + }, + resetState(state) { + state.value = undefined; + state.timestamp = undefined; + }, + }, + + [kAggregatorHistogram]: { + createState(config) { + const boundaries = config.boundaries ?? [10, 50, 100, 500, 1000]; + const buckets = new Array(boundaries.length + 1); + for (let i = 0; i < buckets.length; i++) { + buckets[i] = 0; + } + return { + boundaries, + buckets, + sum: 0, + count: 0, + min: Infinity, + max: -Infinity, + }; + }, + aggregate(state, value) { + const numValue = typeof value === 'bigint' ? Number(value) : value; + state.sum += numValue; + state.count++; + if (numValue < state.min) state.min = numValue; + if (numValue > state.max) state.max = numValue; + + // Find bucket using binary search for O(log n) lookup + // Values <= boundary go in that bucket + const { boundaries, buckets } = state; + const len = boundaries.length; + + // Linear search for small arrays (faster due to less overhead) + if (len <= 8) { + let i = 0; + while (i < len && numValue > boundaries[i]) { + i++; + } + buckets[i]++; + return; + } + + // Binary search for larger arrays + let low = 0; + let high = len; + while (low < high) { + const mid = (low + high) >>> 1; + if (numValue > boundaries[mid]) { + low = mid + 1; + } else { + high = mid; + } + } + buckets[low]++; + }, + finalize(state) { + const buckets = []; + for (let i = 0; i < state.boundaries.length; i++) { + ArrayPrototypePush(buckets, { + le: state.boundaries[i], + count: state.buckets[i], + }); + } + ArrayPrototypePush(buckets, { + le: Infinity, + count: state.buckets[state.boundaries.length], + }); + + return { + buckets, + sum: state.sum, + count: state.count, + min: state.count > 0 ? state.min : undefined, + max: state.count > 0 ? state.max : undefined, + }; + }, + resetState(state) { + for (let i = 0; i < state.buckets.length; i++) { + state.buckets[i] = 0; + } + state.sum = 0; + state.count = 0; + state.min = Infinity; + state.max = -Infinity; + }, + }, + + [kAggregatorSummary]: { + createState(config) { + const quantiles = config.quantiles ?? [0.5, 0.9, 0.95, 0.99]; + // Resolution multiplier for fractional value precision. + // RecordableHistogram only supports integers, so we scale values up + // before recording and scale quantiles back down when finalizing. + // Default resolution of 1000 gives microsecond precision for ms values. + const resolution = config.resolution ?? 1000; + return { + quantiles, + resolution, + histogram: createHistogram(), + sum: 0, + count: 0, + // Track min/max separately with full precision (avoids HDRHistogram bucket rounding) + min: Infinity, + max: -Infinity, + }; + }, + aggregate(state, value) { + const numValue = typeof value === 'bigint' ? Number(value) : value; + // Scale up by resolution to preserve fractional precision + // e.g., 0.5ms * 1000 = 500 (stored as integer) + const scaledValue = MathRound(numValue * state.resolution); + // Clamp to valid range for RecordableHistogram (1 to 2^63-1) + if (scaledValue >= 1) { + state.histogram.record(scaledValue); + } + state.sum += numValue; + state.count++; + // Track exact min/max for full precision + if (numValue < state.min) state.min = numValue; + if (numValue > state.max) state.max = numValue; + }, + finalize(state) { + const quantileValues = {}; + const resolution = state.resolution; + for (const q of state.quantiles) { + // Scale back down to original units + quantileValues[q] = state.histogram.percentile(q * 100) / resolution; + } + + return { + quantiles: quantileValues, + sum: state.sum, + count: state.count, + // Use our exact min/max tracking (not HDRHistogram's bucket-rounded values) + min: state.count > 0 ? state.min : undefined, + max: state.count > 0 ? state.max : undefined, + }; + }, + resetState(state) { + state.histogram.reset(); + state.sum = 0; + state.count = 0; + state.min = Infinity; + state.max = -Infinity; + }, + }, +}; + +/** + * Get an aggregator by name or return custom aggregator object. + */ +function getAggregator(aggregation) { + if (typeof aggregation === 'string') { + const agg = builtinAggregators[aggregation]; + if (!agg) { + throw new ERR_INVALID_ARG_VALUE( + 'aggregation', + aggregation, + 'must be one of: sum, lastValue, histogram, summary', + ); + } + return agg; } + // Custom aggregator object + if (typeof aggregation === 'object' && aggregation !== null) { + validateFunction(aggregation.createState, 'aggregation.createState'); + validateFunction(aggregation.aggregate, 'aggregation.aggregate'); + validateFunction(aggregation.finalize, 'aggregation.finalize'); + return aggregation; + } + throw new ERR_INVALID_ARG_TYPE('aggregation', ['string', 'object'], aggregation); } +// ============================================================================= +// Per-Metric Subscriber Model (like diagnostics_channel) +// ============================================================================= + /** - * A timer that measures duration and reports the measured time via a callback. - * @typedef {Timer} Timer - * @property {number} start Start time of timer in ms. - * @property {number|undefined} end End time of timer in ms. If undefined, timer is still running. - * @property {number|undefined} duration Duration of timer in ms. If undefined, timer is still running. + * Reservoir sampler using Algorithm R. + * Maintains a fixed-size random sample of exemplars. + * See: https://en.wikipedia.org/wiki/Reservoir_sampling */ -class Timer { - #report; - #start; - #end; - #duration; +class ReservoirSampler { + #maxExemplars; + #extract; + #reservoir = []; + #count = 0; - /** - * Construct a new timer. - * @param {Function} report The function to call with the duration when stopped. - */ - constructor(report) { - validateFunction(report, 'report'); - this.#report = report; - this.#start = performance.now(); - this.#end = undefined; - this.#duration = undefined; + constructor(maxExemplars, extract) { + validateNumber(maxExemplars, 'maxExemplars', 1); + validateFunction(extract, 'extract'); + this.#maxExemplars = maxExemplars; + this.#extract = extract; } - get start() { - return this.#start; + sample(value, timestamp, attributes) { + // Extract trace context + const context = this.#extract(attributes); + if (!context || !context.traceId || !context.spanId) { + return; // Skip if no valid trace context + } + + const { traceId, spanId, filteredAttributes } = context; + const exemplar = new Exemplar(value, timestamp, traceId, spanId, filteredAttributes); + + this.#count++; + + // Reservoir sampling: Algorithm R + if (this.#reservoir.length < this.#maxExemplars) { + // Fill reservoir + ArrayPrototypePush(this.#reservoir, exemplar); + } else { + // Random replacement with decreasing probability + const j = MathRound(MathRandom() * this.#count); + if (j < this.#maxExemplars) { + this.#reservoir[j] = exemplar; + } + } } - get end() { - return this.#end; + getExemplars() { + return this.#reservoir; } - get duration() { - return this.#duration; + reset() { + this.#reservoir = []; + this.#count = 0; } +} - /** - * Stop the timer and report the duration via the callback. - * @returns {number} The duration in milliseconds. - */ - stop() { - if (this.#end !== undefined) return this.#duration; - this.#end = performance.now(); - this.#duration = this.#end - this.#start; - this.#report(this.#duration); - return this.#duration; +/** + * Boundary sampler for histograms. + * Maintains one exemplar per bucket boundary. + */ +class BoundarySampler { + #boundaries; + #extract; + #exemplars; // SafeMap + + constructor(boundaries, extract) { + if (!ArrayIsArray(boundaries)) { + throw new ERR_INVALID_ARG_TYPE('boundaries', 'Array', boundaries); + } + validateFunction(extract, 'extract'); + this.#boundaries = boundaries; + this.#extract = extract; + this.#exemplars = new SafeMap(); + } + + sample(value, timestamp, attributes) { + // Extract trace context + const context = this.#extract(attributes); + if (!context || !context.traceId || !context.spanId) { + return; // Skip if no valid trace context + } + + const { traceId, spanId, filteredAttributes } = context; + + // Find bucket index + const numValue = typeof value === 'bigint' ? Number(value) : value; + let bucketIndex = 0; + const len = this.#boundaries.length; + + // Linear search for small arrays (faster due to less overhead) + if (len <= 8) { + while (bucketIndex < len && numValue > this.#boundaries[bucketIndex]) { + bucketIndex++; + } + } else { + // Binary search for larger arrays + let low = 0; + let high = len; + while (low < high) { + const mid = (low + high) >>> 1; + if (numValue > this.#boundaries[mid]) { + low = mid + 1; + } else { + high = mid; + } + } + bucketIndex = low; + } + + // Store or replace with 10% probability + if (!this.#exemplars.has(bucketIndex) || MathRandom() < 0.1) { + const exemplar = new Exemplar(value, timestamp, traceId, spanId, filteredAttributes); + this.#exemplars.set(bucketIndex, exemplar); + } + } + + getExemplars() { + return ArrayFrom(this.#exemplars.values()); + } + + reset() { + this.#exemplars.clear(); + } +} + +/** + * Base class for metric subscribers. + * Each subscriber encapsulates the state for one consumer watching one metric. + * Subclasses implement different strategies for grouped vs ungrouped values. + */ +class MetricSubscriber { + #descriptor; + #aggregator; + #temporality; + + constructor(descriptor, aggregator, config) { + this.#descriptor = descriptor; + this.#aggregator = aggregator; + this.#temporality = config.temporality; + } + + get descriptor() { + return this.#descriptor; + } + + get aggregator() { + return this.#aggregator; + } + + get temporality() { + return this.#temporality; } /** - * Support `using` syntax to automatically stop the timer when done. + * Get a snapshot of this subscriber's data. + * @returns {object} Snapshot with descriptor, temporality, and dataPoints */ - [SymbolDispose]() { - this.stop(); + getSnapshot() { + return { + descriptor: this.#descriptor.toJSON(), + temporality: this.#temporality, + dataPoints: this.getDataPoints(), + }; } + + // Subclasses must implement: + // onValue(value, timestamp, attributes) - aggregate a value + // getDataPoints() - return array of finalized data points } /** - * Represents a snapshot of a value in time. Will report the value every time - * reset() is called, or when applyDelta() is called with a non-zero value. - * @typedef {Gauge} Gauge - * @extends Metric - * @property {number} value The current value of the gauge. + * Simple subscriber - all values go to a single state bucket. + * Used when groupByAttributes=false (default, fastest path). + * No attribute key lookup, no Map overhead. */ -class Gauge extends Metric { - #value; +class SimpleMetricSubscriber extends MetricSubscriber { + #state; + #hasData = false; + #exemplarSampler; + + constructor(descriptor, aggregator, config) { + super(descriptor, aggregator, config); + this.#state = aggregator.createState(config); + this.#exemplarSampler = config.exemplar; + } + + onValue(value, timestamp, attributes) { + this.#hasData = true; + this.aggregator.aggregate(this.#state, value, timestamp); + + // Sample for exemplar if sampler is configured + if (this.#exemplarSampler) { + this.#exemplarSampler.sample(value, timestamp, attributes); + } + } + + getDataPoints() { + if (!this.#hasData) { + return []; + } + const data = this.aggregator.finalize(this.#state); + data.attributes = kEmptyObject; + + // Add exemplars if present + if (this.#exemplarSampler) { + const exemplars = this.#exemplarSampler.getExemplars(); + if (exemplars.length > 0) { + data.exemplars = []; + for (let i = 0; i < exemplars.length; i++) { + ArrayPrototypePush(data.exemplars, exemplars[i].toJSON()); + } + } + } + + return [data]; + } /** - * @param {string} name The name of the gauge metric. - * @param {object} [meta] Additional metadata to include with the metric. + * Reset state for delta temporality. + * Called after collect() to reset aggregation state for the next interval. */ - constructor(name, meta) { - super('gauge', name, meta); - this.#value = 0; + reset() { + if (this.temporality === 'delta') { + this.#hasData = false; + if (this.aggregator.resetState) { + this.aggregator.resetState(this.#state); + } + // Reset exemplar sampler for delta temporality + if (this.#exemplarSampler) { + this.#exemplarSampler.reset(); + } + } } +} - get value() { - return this.#value; +// Maximum number of unique attribute combinations per grouped subscriber +// Prevents unbounded memory growth from high-cardinality attributes +const kMaxCardinalityLimit = 2000; + +/** + * Grouped subscriber - values bucketed by attribute key. + * Used when groupByAttributes=true. + * + * Implements cardinality limiting to prevent unbounded memory growth. + * Eviction behavior differs by temporality: + * - Delta: Evict oldest entries (data already exported) + * - Cumulative: Drop new entries (preserve historical data integrity) + */ +class GroupedMetricSubscriber extends MetricSubscriber { + #states; // Map + #config; + #cardinalityLimit; + #cardinalityWarned = false; // Only warn once per subscriber + #droppedCount = 0; // Count of dropped values for cumulative temporality + #exemplarSampler; + + constructor(descriptor, aggregator, config) { + super(descriptor, aggregator, config); + this.#states = new SafeMap(); + this.#config = config; + this.#cardinalityLimit = config.cardinalityLimit ?? kMaxCardinalityLimit; + this.#exemplarSampler = config.exemplar; + } + + onValue(value, timestamp, attributes) { + const attrKey = this.#getAttributeKey(attributes); + let entry = this.#states.get(attrKey); + if (!entry) { + // Enforce cardinality limit + if (this.#states.size >= this.#cardinalityLimit) { + // Warn on first limit hit + if (!this.#cardinalityWarned) { + this.#cardinalityWarned = true; + const behavior = this.temporality === 'cumulative' ? + 'New attribute combinations are being dropped' : + 'Oldest attribute combinations are being evicted'; + process.emitWarning( + `Metric '${this.descriptor.name}' reached cardinality limit of ${this.#cardinalityLimit}. ` + + `${behavior}. ` + + 'Consider using groupBy or normalizeAttributes to reduce cardinality.', + 'MetricsWarning', + ); + } + + // Different eviction strategies based on temporality + if (this.temporality === 'cumulative') { + // For cumulative: DROP the new value to preserve historical integrity + // Evicting old cumulative data would cause incorrect sums when the key reappears + this.#droppedCount++; + return; + } + // For delta: EVICT oldest entry (its data was already exported) + // Map iterates in insertion order, so first key is oldest + const { value: oldestKey } = this.#states.keys().next(); + this.#states.delete(oldestKey); + } + entry = { + state: this.aggregator.createState(this.#config), + attributes: this.#normalizeAttributes(attributes), + hasData: false, + }; + this.#states.set(attrKey, entry); + } + entry.hasData = true; + this.aggregator.aggregate(entry.state, value, timestamp); + + // Sample for exemplar if sampler is configured + if (this.#exemplarSampler) { + this.#exemplarSampler.sample(value, timestamp, attributes); + } } /** - * Set the gauge value. - * @param {number} value The value to set the gauge to. - * @param {object} [meta] Additional metadata to include with the report. + * Get the count of dropped values due to cardinality limiting. + * Only applicable for cumulative temporality. + * @returns {number} */ - reset(value = 0, meta) { - this.#value = value; - this.report(value, meta); + get droppedCount() { + return this.#droppedCount; + } + + getDataPoints() { + const dataPoints = []; + for (const { 1: entry } of this.#states) { + if (!entry.hasData) continue; + const data = this.aggregator.finalize(entry.state); + data.attributes = entry.attributes; + + // Add exemplars if present (shared across all attribute groups) + if (this.#exemplarSampler) { + const exemplars = this.#exemplarSampler.getExemplars(); + if (exemplars.length > 0) { + data.exemplars = []; + for (let i = 0; i < exemplars.length; i++) { + ArrayPrototypePush(data.exemplars, exemplars[i].toJSON()); + } + } + } + + ArrayPrototypePush(dataPoints, data); + } + return dataPoints; } /** - * Create a timer that will set this gauge to its duration when stopped. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {Timer} A new timer instance. + * Reset state for delta temporality. + * Called after collect() to reset aggregation state for the next interval. */ - createTimer(meta) { - return new Timer((duration) => { - this.reset(duration, meta); - }); + reset() { + if (this.temporality === 'delta') { + for (const { 1: entry } of this.#states) { + entry.hasData = false; + if (this.aggregator.resetState) { + this.aggregator.resetState(entry.state); + } + } + // Reset exemplar sampler for delta temporality + if (this.#exemplarSampler) { + this.#exemplarSampler.reset(); + } + } + } + + #getAttributeKey(attributes) { + // Custom key function + if (this.#config.attributeKey) { + return this.#config.attributeKey(attributes); + } + + // If normalizeAttributes is configured, use normalized attrs for key + if (this.#config.normalizeAttributes) { + const normalized = this.#config.normalizeAttributes(attributes); + return attributeIdentity.getId(normalized); + } + + // Group by specific attributes + if (this.#config.groupBy) { + const grouped = {}; + for (const key of this.#config.groupBy) { + if (key in attributes) { + grouped[key] = attributes[key]; + } + } + return attributeIdentity.getId(grouped); + } + + // Default: use all attributes + return attributeIdentity.getId(attributes); + } + + #normalizeAttributes(attributes) { + if (this.#config.normalizeAttributes) { + return this.#config.normalizeAttributes(attributes); + } + if (this.#config.groupBy) { + const normalized = {}; + for (const key of this.#config.groupBy) { + if (key in attributes) { + normalized[key] = attributes[key]; + } + } + return normalized; + } + // Shallow copy to prevent external mutation of stored attributes + return ObjectAssign({}, attributes); } } /** - * An increasing or decreasing value. - * @typedef {Counter} Counter - * @extends Metric - * @property {number} value The current value of the counter. + * Factory function to create the appropriate subscriber type. + * @param {MetricDescriptor} descriptor - The metric descriptor + * @param {object} aggregator - The aggregator to use + * @param {object} config - The consumer's config for this metric + * @returns {MetricSubscriber} */ -class Counter extends Metric { - #value; +function createMetricSubscriber(descriptor, aggregator, config) { + if (config.groupByAttributes) { + return new GroupedMetricSubscriber(descriptor, aggregator, config); + } + return new SimpleMetricSubscriber(descriptor, aggregator, config); +} + +/** + * A consumer that aggregates metric values using the subscriber model. + * + * Instead of receiving all values via onValue() and doing identity lookups, + * each Consumer subscribes directly to the metrics it cares about (like + * diagnostics_channel). This eliminates per-value "is this metric relevant?" + * checks and moves that decision to subscription time. + */ +class Consumer { + #config; + #subscribers = []; // Array of MetricSubscriber for iteration + #observableSubscribers = []; // Array of { metric, subscriber } for observables + #unsubscribeFns = []; // Cleanup functions from metric._addSubscriber() + #timestampMetrics = []; // Metrics where we enabled timestamps (for cleanup) + #subscribedMetrics; // Set of metric names we've subscribed to + #pendingMetrics; // Set of metric names we're waiting for + #isWildcard; // Whether this consumer wants all metrics + #groupByAttributes; // Whether to differentiate values by attributes + #closed; + #lastCollectTime; // Timestamp of last collect() for delta startTime + #autoCollectTimer; // Timer for autoCollect() /** - * @param {string} name The name of the counter metric. - * @param {object} [meta] Additional metadata to include with the metric. + * @param {object} config + * @param {string} [config.defaultAggregation='sum'] + * @param {string} [config.defaultTemporality='cumulative'] + * @param {boolean} [config.groupByAttributes=false] - Enable attribute differentiation + * @param {object} [config.metrics] - Per-metric configuration */ - constructor(name, meta) { - super('counter', name, meta); - this.#value = 0; + constructor(config = kEmptyObject) { + validateObject(config, 'config'); + const metrics = config.metrics ?? kEmptyObject; + const metricNames = ObjectKeys(metrics); + + // Validate exemplar samplers + for (const metricName of metricNames) { + const metricConfig = metrics[metricName]; + if (metricConfig?.exemplar) { + const sampler = metricConfig.exemplar; + if (typeof sampler !== 'object' || sampler === null) { + throw new ERR_INVALID_ARG_TYPE( + `config.metrics['${metricName}'].exemplar`, + 'object', + sampler, + ); + } + validateFunction(sampler.sample, `config.metrics['${metricName}'].exemplar.sample`); + validateFunction(sampler.getExemplars, `config.metrics['${metricName}'].exemplar.getExemplars`); + validateFunction(sampler.reset, `config.metrics['${metricName}'].exemplar.reset`); + } + } + + this.#config = { + defaultAggregation: config.defaultAggregation ?? kAggregatorSum, + defaultTemporality: config.defaultTemporality ?? 'cumulative', + metrics, + }; + this.#groupByAttributes = config.groupByAttributes ?? false; + this.#closed = false; + this.#lastCollectTime = performance.now(); // Start time for first delta + this.#autoCollectTimer = null; + + // Track subscriptions + this.#subscribedMetrics = new SafeSet(); + + // Wildcard mode: no specific metrics = subscribe to ALL metrics + this.#isWildcard = metricNames.length === 0; + + if (this.#isWildcard) { + // Subscribe to all existing metrics + this.#pendingMetrics = null; + for (const metric of registry.list()) { + this.#subscribeToMetric(metric); + } + } else { + // Subscribe to specific metrics that exist, track pending ones + this.#pendingMetrics = new SafeSet(metricNames); + for (const metricName of metricNames) { + const metric = registry.get(metricName); + if (metric) { + this.#subscribeToMetric(metric); + this.#pendingMetrics.delete(metricName); + } + } + } + + // Register with registry to be notified of new metrics + registry.addConsumer(this); } - get value() { - return this.#value; + /** + * Called by registry when a new metric is created. + * @param {Metric} metric - The newly created metric + */ + onMetricCreated(metric) { + if (this.#closed) return; + + const name = metric.descriptor.name; + + // Wildcard: subscribe to all new metrics + if (this.#isWildcard) { + this.#subscribeToMetric(metric); + return; + } + + // Specific metrics: only subscribe if we're waiting for this one + if (this.#pendingMetrics && this.#pendingMetrics.has(name)) { + this.#subscribeToMetric(metric); + this.#pendingMetrics.delete(name); + } } /** - * Increment the counter. Negative values invert to positive. - * @param {number} [n] The amount to increment the counter by. Defaults to 1. - * @param {object} [meta] Additional metadata to include with the report. + * Called by registry when a metric is closed. + * Cleans up subscriptions and state for the closed metric. + * @param {Metric} metric - The closed metric */ - increment(n = 1, meta) { - if (typeof n === 'object') { - meta = n; - n = 1; + onMetricClosed(metric) { + if (this.#closed) return; + + const name = metric.descriptor.name; + + // Remove from subscribed metrics + this.#subscribedMetrics.delete(name); + + // Remove from observable subscribers + for (let i = this.#observableSubscribers.length - 1; i >= 0; i--) { + if (this.#observableSubscribers[i].metric === metric) { + ArrayPrototypeSplice(this.#observableSubscribers, i, 1); + } } - this.#value += n; - this.report(n, meta); + // Remove subscriber for this metric + // Note: The unsubscribe fn was already called when metric cleared its subscribers + for (let i = this.#subscribers.length - 1; i >= 0; i--) { + if (this.#subscribers[i].descriptor.name === name) { + ArrayPrototypeSplice(this.#subscribers, i, 1); + } + } + + // Remove from timestamp metrics + const tsIdx = ArrayPrototypeIndexOf(this.#timestampMetrics, metric); + if (tsIdx !== -1) { + ArrayPrototypeSplice(this.#timestampMetrics, tsIdx, 1); + } } /** - * Decrement the counter. Negative values invert to positive. - * @param {number} [n] The amount to decrement the counter by. Defaults to 1. - * @param {object} [meta] Additional metadata to include with the report. + * Collect all metrics. Samples observables and returns aggregated state. + * Returns an array of metric snapshots. + * @returns {Array} Array of metric snapshots */ - decrement(n = 1, meta) { - if (typeof n === 'object') { - meta = n; - n = 1; + collect() { + if (this.#closed) return []; + + // Sample observable metrics - each subscriber samples only for itself + // This maintains consumer isolation (no cross-consumer value leakage) + const observableSubscribers = this.#observableSubscribers; + for (let i = 0; i < observableSubscribers.length; i++) { + const { metric, subscriber } = observableSubscribers[i]; + metric.sample(subscriber); + } + + // Capture timestamps for this collection period + const startTime = this.#lastCollectTime; + const timestamp = performance.now(); + this.#lastCollectTime = timestamp; + + // Build snapshot - just iterate our subscribers + const metrics = []; + const subscribers = this.#subscribers; + for (let i = 0; i < subscribers.length; i++) { + const metricSnapshot = subscribers[i].getSnapshot(); + // Only include metrics that have data points + if (metricSnapshot.dataPoints.length > 0) { + metricSnapshot.timestamp = timestamp; + // For delta temporality, include startTime to define the time window + if (metricSnapshot.temporality === 'delta') { + metricSnapshot.startTime = startTime; + } + ArrayPrototypePush(metrics, metricSnapshot); + } + } + + // Reset delta temporality subscribers after snapshot + for (let i = 0; i < subscribers.length; i++) { + subscribers[i].reset(); } - this.#value -= n; - this.report(-n, meta); + return metrics; } /** - * Create a timer that will increment this counter with its duration when stopped. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {Timer} A new timer instance. + * Start automatic periodic collection. + * @param {number} interval - Collection interval in milliseconds + * @param {Function} callback - Called with each snapshot + * @returns {Function} Stop function to cancel auto-collection */ - createTimer(meta) { - return new Timer((duration) => { - this.increment(duration, meta); - }); + autoCollect(interval, callback) { + validateNumber(interval, 'interval', 1); + validateFunction(callback, 'callback'); + + if (this.#closed) { + throw new ERR_INVALID_STATE('Consumer is closed'); + } + if (this.#autoCollectTimer) { + throw new ERR_INVALID_STATE('autoCollect is already active'); + } + + this.#autoCollectTimer = setInterval(() => { + try { + const snapshot = this.collect(); + callback(snapshot); + } catch (err) { + triggerUncaughtException(err, false); + } + }, interval); + + // Don't keep process alive just for metrics collection + this.#autoCollectTimer.unref(); + + // Return stop function + return () => { + if (this.#autoCollectTimer) { + clearInterval(this.#autoCollectTimer); + this.#autoCollectTimer = null; + } + }; + } + + /** + * Close this consumer and unregister from the registry. + */ + close() { + if (this.#closed) return; + this.#closed = true; + + // Stop auto-collection if active + if (this.#autoCollectTimer) { + clearInterval(this.#autoCollectTimer); + this.#autoCollectTimer = null; + } + + // Unsubscribe from all metrics + const unsubscribeFns = this.#unsubscribeFns; + for (let i = 0; i < unsubscribeFns.length; i++) { + unsubscribeFns[i](); + } + + // Disable timestamp generation for metrics where we enabled it + const timestampMetrics = this.#timestampMetrics; + for (let i = 0; i < timestampMetrics.length; i++) { + timestampMetrics[i][kDisableTimestamp](); + } + + this.#subscribers = []; + this.#observableSubscribers = []; + this.#unsubscribeFns = []; + this.#timestampMetrics = []; + this.#subscribedMetrics.clear(); + if (this.#pendingMetrics) { + this.#pendingMetrics.clear(); + } + + registry.removeConsumer(this); + } + + #subscribeToMetric(metric) { + const name = metric.descriptor.name; + + // Avoid duplicate subscriptions + if (this.#subscribedMetrics.has(name)) { + return; + } + this.#subscribedMetrics.add(name); + + const metricConfig = this.#getMetricConfig(metric.descriptor); + const aggregator = getAggregator(metricConfig.aggregation); + + // Enable timestamp generation if this aggregator needs it + if (aggregator.needsTimestamp) { + metric[kEnableTimestamp](); + ArrayPrototypePush(this.#timestampMetrics, metric); + } + + // Factory creates the right subscriber type (Simple or Grouped) + const subscriber = createMetricSubscriber( + metric.descriptor, + aggregator, + metricConfig, + ); + + ArrayPrototypePush(this.#subscribers, subscriber); + const unsubscribe = metric[kAddSubscriber](subscriber); + ArrayPrototypePush(this.#unsubscribeFns, unsubscribe); + + // Track observable metrics separately for isolated sampling during collect() + if (metric.isObservable) { + ArrayPrototypePush(this.#observableSubscribers, { metric, subscriber }); + } + } + + #getMetricConfig(descriptor) { + const perMetric = this.#config.metrics[descriptor.name]; + return { + aggregation: perMetric?.aggregation ?? this.#config.defaultAggregation, + temporality: perMetric?.temporality ?? this.#config.defaultTemporality, + monotonic: perMetric?.monotonic ?? false, + boundaries: perMetric?.boundaries, + quantiles: perMetric?.quantiles, + groupBy: perMetric?.groupBy, + attributeKey: perMetric?.attributeKey, + normalizeAttributes: perMetric?.normalizeAttributes, + cardinalityLimit: perMetric?.cardinalityLimit, + groupByAttributes: this.#groupByAttributes, + exemplar: perMetric?.exemplar, + }; } } +// ============================================================================= +// Global Registry +// ============================================================================= + +const kMetrics = Symbol('metrics'); +const kObservables = Symbol('observables'); +const kConsumers = Symbol('consumers'); + /** - * A gauge which updates its value by calling a function when sampled. - * @typedef {PullGauge} PullGauge - * @extends Metric - * @property {number} value The current value of the pull gauge. + * The global metric registry (MeterProvider equivalent in OTel terms). + * Manages all metrics and their lifecycle. */ -class PullGauge extends Metric { - #puller; - #value; +class MetricRegistry { + [kMetrics] = new SafeMap(); + [kObservables] = []; // Direct array for fast iteration + [kConsumers] = []; /** - * Construct a new pull gauge. - * @param {string} name The name of the pull gauge metric. - * @param {Function} puller The function to call to get the gauge value. - * @param {object} [meta] Additional metadata to include with the metric. + * Check if a metric with this name already exists (for singleton pattern). + * @param {string} name - The metric name + * @returns {Metric|undefined} The existing metric if found */ - constructor(name, puller, meta) { - super('pullGauge', name, meta); - validateFunction(puller, 'puller'); - this.#puller = puller; - this.#value = 0; + get(name) { + return this[kMetrics].get(name); } - get value() { - return this.#value; + /** + * Add a metric to the registry (internal, called by Metric constructor). + * @param {Metric} metric - The metric to add + */ + [kAdd](metric) { + const name = metric.descriptor.name; + this[kMetrics].set(name, metric); + + // Track observables in a direct array (no generator overhead) + if (metric.isObservable) { + ArrayPrototypePush(this[kObservables], metric); + } + + // Notify existing consumers that a new metric was created + // They can decide if they want to subscribe to it + const consumers = this[kConsumers]; + for (let i = 0; i < consumers.length; i++) { + const consumer = consumers[i]; + if (consumer.onMetricCreated) { + consumer.onMetricCreated(metric); + } + } } /** - * Sample the gauge by calling the function and reporting the value. - * @param {object} [meta] Additional metadata to include with the report. + * Remove a metric from the registry and notify consumers. + * @param {Metric} metric - The metric to remove */ - sample(meta) { - const value = this.#puller(); - this.#value = value; - this.report(value, meta); - return value; + [kRemove](metric) { + this[kMetrics].delete(metric.descriptor.name); + + // Remove from observables list if it was observable + if (metric.isObservable) { + const idx = ArrayPrototypeIndexOf(this[kObservables], metric); + if (idx !== -1) { + ArrayPrototypeSplice(this[kObservables], idx, 1); + } + } + + // Notify consumers that a metric was closed + // They can clean up their subscriptions and state + const consumers = this[kConsumers]; + for (let i = 0; i < consumers.length; i++) { + const consumer = consumers[i]; + if (consumer.onMetricClosed) { + consumer.onMetricClosed(metric); + } + } + } + + addConsumer(consumer) { + ArrayPrototypePush(this[kConsumers], consumer); + } + + removeConsumer(consumer) { + const idx = ArrayPrototypeIndexOf(this[kConsumers], consumer); + if (idx !== -1) { + ArrayPrototypeSplice(this[kConsumers], idx, 1); + } + } + + /** + * Get all observable metrics (direct array, no generator). + * @returns {Array} + */ + observables() { + return this[kObservables]; + } + + list() { + return [...this[kMetrics].values()]; } } +const registry = new MetricRegistry(); + +// ============================================================================= +// Public API +// ============================================================================= + /** - * Create a counter metric. - * @param {string} name The name of the counter. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {Counter} The counter metric. + * Create a new metric or return existing one with the same name (singleton pattern). + * @param {string} name - The metric name + * @param {object} [options] + * @param {string} [options.unit] - The unit of measurement + * @param {string} [options.description] - Human-readable description + * @param {Function} [options.observable] - Callback for observable metrics + * @returns {Metric} */ -function createCounter(name, meta) { - return new Counter(name, meta); +function create(name, options = kEmptyObject) { + // Check for existing metric first (singleton pattern) + const existing = registry.get(name); + if (existing) { + // Warn if options differ from existing metric + const desc = existing.descriptor; + if ((options.unit !== undefined && options.unit !== desc.unit) || + (options.description !== undefined && options.description !== desc.description) || + (options.observable !== undefined && existing.isObservable !== (options.observable !== undefined))) { + process.emitWarning( + `Metric '${name}' already exists with different options. ` + + 'Returning existing metric. Options from this call are ignored.', + 'MetricsWarning', + ); + } + return existing; + } + return new Metric(name, options); } /** - * Create a gauge metric. - * @param {string} name The name of the gauge. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {Gauge} The gauge metric. + * Create a new consumer. + * + * Config can be passed in two formats: + * 1. Direct metric configs at top level: + * { 'metric.name': { aggregation: 'sum' } } + * 2. Nested under 'metrics' key (with optional defaults): + * { defaultAggregation: 'sum', metrics: { 'metric.name': { ... } } } + * @param {object} [config] + * @returns {Consumer} */ -function createGauge(name, meta) { - return new Gauge(name, meta); +function createConsumer(config = kEmptyObject) { + // If config has 'metrics' key, use it directly + if (config.metrics) { + return new Consumer(config); + } + + // Otherwise, separate reserved keys from metric configs + const { defaultAggregation, defaultTemporality, groupByAttributes, ...metrics } = config; + const normalized = { + defaultAggregation, + defaultTemporality, + groupByAttributes, + metrics, + }; + + return new Consumer(normalized); } /** - * Create a raw metric. - * @param {string} type The type of metric to create (e.g., 'gauge', 'counter'). - * @param {string} name The name of the metric. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {Metric} The raw metric. + * List all registered metrics. + * @returns {Array} */ -function createMetric(type, name, meta) { - return new Metric(type, name, meta); +function list() { + return registry.list(); } /** - * Create a pull gauge metric. - * @param {string} name The name of the pull gauge. - * @param {Function} puller The function to call to get the gauge value. - * @param {object} [meta] Additional metadata to include with the report. - * @returns {PullGauge} The pull gauge metric. + * Get a metric by name. + * @param {string} name + * @returns {Metric|undefined} */ -function createPullGauge(name, puller, meta) { - return new PullGauge(name, puller, meta); +function get(name) { + return registry.get(name); } +// ============================================================================= +// Optional: DiagnosticsChannel Consumer +// ============================================================================= + +/** + * Special subscriber that publishes values to diagnostics_channel. + * Unlike regular subscribers, this doesn't aggregate - just forwards. + */ +class DCMetricSubscriber { + #descriptor; + + constructor(descriptor) { + this.#descriptor = descriptor; + } + + onValue(value, timestamp, attributes) { + const ch = this.#descriptor.channel; + if (ch.hasSubscribers) { + // Create entry object only when there are subscribers + ch.publish({ + descriptor: this.#descriptor, + value, + attributes, + timestamp, + }); + } + } +} + +let dcConsumer = null; + +/** + * Create a singleton consumer that forwards all values to diagnostics_channel. + * Uses the subscriber model to receive values directly from metrics. + * @returns {object} + */ +function createDiagnosticsChannelConsumer() { + if (dcConsumer) return dcConsumer; + + const unsubscribeFns = []; + const timestampMetrics = []; // Track metrics where we enabled timestamps + const subscribedMetrics = new SafeSet(); + const observableSubscribers = []; // Track { metric, subscriber } for observables + + function subscribeToMetric(metric) { + const name = metric.descriptor.name; + if (subscribedMetrics.has(name)) return; + subscribedMetrics.add(name); + + // DC consumer needs timestamps + metric[kEnableTimestamp](); + ArrayPrototypePush(timestampMetrics, metric); + + const subscriber = new DCMetricSubscriber(metric.descriptor); + const unsubscribe = metric[kAddSubscriber](subscriber); + ArrayPrototypePush(unsubscribeFns, unsubscribe); + + // Track observable metrics for isolated sampling + if (metric.isObservable) { + ArrayPrototypePush(observableSubscribers, { metric, subscriber }); + } + } + + // Subscribe to all existing metrics + for (const metric of registry.list()) { + subscribeToMetric(metric); + } + + dcConsumer = { + // Called by registry when a new metric is created + onMetricCreated(metric) { + subscribeToMetric(metric); + }, + // Called by registry when a metric is closed + onMetricClosed(metric) { + const name = metric.descriptor.name; + subscribedMetrics.delete(name); + + // Remove from observable subscribers + for (let i = observableSubscribers.length - 1; i >= 0; i--) { + if (observableSubscribers[i].metric === metric) { + ArrayPrototypeSplice(observableSubscribers, i, 1); + } + } + + // Remove from timestamp metrics + const tsIdx = ArrayPrototypeIndexOf(timestampMetrics, metric); + if (tsIdx !== -1) { + ArrayPrototypeSplice(timestampMetrics, tsIdx, 1); + } + }, + collect() { + // Sample observables - each subscriber samples only for itself + for (let i = 0; i < observableSubscribers.length; i++) { + const { metric, subscriber } = observableSubscribers[i]; + metric.sample(subscriber); + } + return null; + }, + close() { + // Unsubscribe from all metrics + for (let i = 0; i < unsubscribeFns.length; i++) { + unsubscribeFns[i](); + } + // Disable timestamp generation for metrics where we enabled it + for (let i = 0; i < timestampMetrics.length; i++) { + timestampMetrics[i][kDisableTimestamp](); + } + unsubscribeFns.length = 0; + timestampMetrics.length = 0; + observableSubscribers.length = 0; + subscribedMetrics.clear(); + registry.removeConsumer(this); + dcConsumer = null; + }, + }; + + registry.addConsumer(dcConsumer); + return dcConsumer; +} + +// ============================================================================= +// Exports +// ============================================================================= + module.exports = { - MetricReport, + // Classes + MetricDescriptor, Metric, - Gauge, - Counter, Timer, - PullGauge, + Consumer, + InstrumentationScope, + Exemplar, + ReservoirSampler, + BoundarySampler, - createCounter, - createGauge, - createMetric, - createPullGauge, + // Factory functions + create, + createConsumer, + createDiagnosticsChannelConsumer, + + // Utilities + list, + get, }; + +// Getter for diagnosticsChannelConsumer singleton (returns null if not created) +ObjectDefineProperty(module.exports, 'diagnosticsChannelConsumer', { + __proto__: null, + configurable: false, + enumerable: true, + get() { + return dcConsumer; + }, +}); diff --git a/lib/internal/util/object_identity.js b/lib/internal/util/object_identity.js new file mode 100644 index 00000000000000..b519289f751afb --- /dev/null +++ b/lib/internal/util/object_identity.js @@ -0,0 +1,304 @@ +'use strict'; + +/** + * Object identity utility using xxHash32 for content-based equality. + * + * xxHash32 algorithm based on the official specification: + * https://github.com/Cyan4973/xxHash/blob/dev/doc/xxhash_spec.md + * + * This is a pure JavaScript implementation suitable for Node.js internals. + * No external dependencies required. + * + * Performance characteristics: + * - Empty object: O(1) + * - 1 key: O(1) + * - 2-3 keys: O(k) with inline sort + * - 4+ keys: O(k log k) with sorted keys cache + * + * Collision probability at 2000 items (default cardinality limit): ~0.05% + */ + +const { + ArrayPrototypePush, + ArrayPrototypeShift, + ArrayPrototypeSplice, + MathImul, + ObjectKeys, + SafeMap, + String, + StringPrototypeCharCodeAt, +} = primordials; + +const { kEmptyObject } = require('internal/util'); + +// xxHash32 prime constants from specification +const PRIME32_1 = 0x9E3779B1; +const PRIME32_2 = 0x85EBCA77; +const PRIME32_3 = 0xC2B2AE3D; +const PRIME32_5 = 0x165667B1; + +// Default seed +const SEED = 0; + +/** + * 32-bit rotate left operation. + * @param {number} value - 32-bit unsigned integer + * @param {number} amount - Rotation amount (0-31) + * @returns {number} Rotated value + */ +function rotl32(value, amount) { + return ((value << amount) | (value >>> (32 - amount))) >>> 0; +} + +/** + * xxHash32 implementation for strings. + * Optimized for attribute key-value pairs (typically ASCII/UTF-8 strings). + * @param {string} str - String to hash + * @param {number} seed - Hash seed (default: 0) + * @returns {number} 32-bit hash value + */ +function xxHash32(str, seed = SEED) { + const len = str.length; + let h32; + let index = 0; + + if (len >= 16) { + // For longer strings, use the full algorithm with accumulators + let v1 = (seed + PRIME32_1 + PRIME32_2) >>> 0; + let v2 = (seed + PRIME32_2) >>> 0; + let v3 = (seed + 0) >>> 0; + let v4 = (seed - PRIME32_1) >>> 0; + + // Process 16-byte blocks + const limit = len - 16; + while (index <= limit) { + // Read 4 bytes (1 char = 2 bytes in UTF-16, but we treat as byte stream) + // For attribute strings (typically ASCII), this is efficient + const k1 = (StringPrototypeCharCodeAt(str, index) | + (StringPrototypeCharCodeAt(str, index + 1) << 8) | + (StringPrototypeCharCodeAt(str, index + 2) << 16) | + (StringPrototypeCharCodeAt(str, index + 3) << 24)) >>> 0; + index += 4; + + v1 = (v1 + MathImul(k1, PRIME32_2)) >>> 0; + v1 = rotl32(v1, 13); + v1 = MathImul(v1, PRIME32_1) >>> 0; + + const k2 = (StringPrototypeCharCodeAt(str, index) | + (StringPrototypeCharCodeAt(str, index + 1) << 8) | + (StringPrototypeCharCodeAt(str, index + 2) << 16) | + (StringPrototypeCharCodeAt(str, index + 3) << 24)) >>> 0; + index += 4; + + v2 = (v2 + MathImul(k2, PRIME32_2)) >>> 0; + v2 = rotl32(v2, 13); + v2 = MathImul(v2, PRIME32_1) >>> 0; + + const k3 = (StringPrototypeCharCodeAt(str, index) | + (StringPrototypeCharCodeAt(str, index + 1) << 8) | + (StringPrototypeCharCodeAt(str, index + 2) << 16) | + (StringPrototypeCharCodeAt(str, index + 3) << 24)) >>> 0; + index += 4; + + v3 = (v3 + MathImul(k3, PRIME32_2)) >>> 0; + v3 = rotl32(v3, 13); + v3 = MathImul(v3, PRIME32_1) >>> 0; + + const k4 = (StringPrototypeCharCodeAt(str, index) | + (StringPrototypeCharCodeAt(str, index + 1) << 8) | + (StringPrototypeCharCodeAt(str, index + 2) << 16) | + (StringPrototypeCharCodeAt(str, index + 3) << 24)) >>> 0; + index += 4; + + v4 = (v4 + MathImul(k4, PRIME32_2)) >>> 0; + v4 = rotl32(v4, 13); + v4 = MathImul(v4, PRIME32_1) >>> 0; + } + + // Merge accumulators + h32 = rotl32(v1, 1) + rotl32(v2, 7) + rotl32(v3, 12) + rotl32(v4, 18); + h32 = h32 >>> 0; + } else { + // Short string path + h32 = (seed + PRIME32_5) >>> 0; + } + + h32 = (h32 + len) >>> 0; + + // Process remaining bytes + while (index < len) { + const k1 = StringPrototypeCharCodeAt(str, index); + index++; + + h32 = (h32 + MathImul(k1, PRIME32_5)) >>> 0; + h32 = rotl32(h32, 11); + h32 = MathImul(h32, PRIME32_1) >>> 0; + } + + // Final avalanche mixing + h32 ^= h32 >>> 15; + h32 = MathImul(h32, PRIME32_2) >>> 0; + h32 ^= h32 >>> 13; + h32 = MathImul(h32, PRIME32_3) >>> 0; + h32 ^= h32 >>> 16; + + return h32 >>> 0; +} + +/** + * Get sorted keys for an object, using cache when possible. + * @param {Array} keys - Unsorted keys + * @param {Map} cache - Sorted keys cache + * @param {Array} cacheOrder - LRU order tracking + * @param {number} maxCacheSize - Maximum cache size + * @returns {Array} Sorted keys + */ +function getSortedKeys(keys, cache, cacheOrder, maxCacheSize) { + const len = keys.length; + + // Inline sort for 2-3 keys (faster than array sort overhead) + if (len === 2) { + return keys[0] < keys[1] ? keys : [keys[1], keys[0]]; + } + if (len === 3) { + // Inline 3-element sort network + let a = keys[0]; let b = keys[1]; let c = keys[2]; + if (a > b) ({ 0: a, 1: b } = [b, a]); + if (b > c) ({ 0: b, 1: c } = [c, b]); + if (a > b) ({ 0: a, 1: b } = [b, a]); + return [a, b, c]; + } + + // For 4+ keys, use cache + const shape = keys.join(','); + let sorted = cache.get(shape); + + if (sorted !== undefined) { + // Cache hit - update LRU order + const idx = cacheOrder.indexOf(shape); + if (idx !== -1) { + // Move to end (most recently used) + ArrayPrototypeSplice(cacheOrder, idx, 1); + ArrayPrototypePush(cacheOrder, shape); + } + return sorted; + } + + // Cache miss - sort and store + sorted = [...keys].sort(); + + // Evict least recently used if at capacity + if (cache.size >= maxCacheSize) { + const evictShape = ArrayPrototypeShift(cacheOrder); + cache.delete(evictShape); + } + + cache.set(shape, sorted); + ArrayPrototypePush(cacheOrder, shape); + + return sorted; +} + +/** + * ObjectIdentity provides deterministic numeric hashing for plain objects. + * Uses xxHash32 algorithm following the industry standard approach used by + * OpenTelemetry and Prometheus. + * + * Each instance maintains an isolated LRU cache for sorted keys arrays. + */ +class ObjectIdentity { + #sortedKeysCache; + #sortedKeysCacheOrder; + #maxSortedKeysCacheSize; + + /** + * Create a new ObjectIdentity instance with configurable cache size. + * @param {object} [options] + * @param {number} [options.sortedKeysCacheSize=1000] - Max sorted keys cache entries + */ + constructor(options = kEmptyObject) { + this.#sortedKeysCache = new SafeMap(); + this.#sortedKeysCacheOrder = []; + this.#maxSortedKeysCacheSize = options.sortedKeysCacheSize ?? 1000; + } + + /** + * Get a numeric hash for an object based on its content. + * + * Objects with the same key-value pairs produce the same hash: + * getId({ a: 1, b: 2 }) === getId({ b: 2, a: 1 }) + * + * Uses xxHash32 algorithm for fast, well-distributed hashing. + * Hash collisions are possible but extremely rare at typical cardinalities + * (~0.05% at 2000 unique attribute combinations). + * @param {object} obj - Object to hash + * @returns {number} 32-bit unsigned integer hash (0 for empty object) + * @example + * const oid = new ObjectIdentity(); + * const hash1 = oid.getId({ method: 'GET', status: 200 }); + * const hash2 = oid.getId({ status: 200, method: 'GET' }); + * // hash1 === hash2 (order doesn't matter) + */ + getId(obj) { + // Fast path: empty object constant + if (obj === kEmptyObject) { + return 0; + } + + const keys = ObjectKeys(obj); + const len = keys.length; + + if (len === 0) { + return 0; + } + + // For single key, hash directly (no sorting needed) + if (len === 1) { + const key = keys[0]; + const value = String(obj[key]); + // Hash: key + separator + value + return xxHash32(key + '\x00' + value); + } + + // Multiple keys: sort for canonical order, then hash + const sortedKeys = getSortedKeys( + keys, + this.#sortedKeysCache, + this.#sortedKeysCacheOrder, + this.#maxSortedKeysCacheSize, + ); + + // Build canonical string representation and hash it + // Format: key1\x00value1\x00key2\x00value2... + let canonical = sortedKeys[0] + '\x00' + String(obj[sortedKeys[0]]); + for (let i = 1; i < len; i++) { + const key = sortedKeys[i]; + canonical += '\x00' + key + '\x00' + String(obj[key]); + } + + return xxHash32(canonical); + } + + /** + * Clear sorted keys cache. Useful for testing or memory reclamation. + */ + clearCache() { + this.#sortedKeysCache.clear(); + this.#sortedKeysCacheOrder.length = 0; + } + + /** + * Get current cache statistics. + * @returns {object} Cache stats + */ + getCacheStats() { + return { + size: this.#sortedKeysCache.size, + max: this.#maxSortedKeysCacheSize, + }; + } +} + +module.exports = { + ObjectIdentity, +}; diff --git a/test/parallel/test-metrics-aggregator-histogram.js b/test/parallel/test-metrics-aggregator-histogram.js new file mode 100644 index 00000000000000..9c96f78927d338 --- /dev/null +++ b/test/parallel/test-metrics-aggregator-histogram.js @@ -0,0 +1,77 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test histogram aggregator with custom boundaries +const m = metrics.create('test.histogram', { unit: 'ms' }); + +const consumer = metrics.createConsumer({ + 'test.histogram': { + aggregation: 'histogram', + boundaries: [10, 50, 100, 500], + }, +}); + +// Record values across different buckets +m.record(5); // bucket: <= 10 +m.record(25); // bucket: <= 50 +m.record(75); // bucket: <= 100 +m.record(200); // bucket: <= 500 +m.record(750); // bucket: > 500 (Infinity) + +const result = consumer.collect(); +const hist = result[0].dataPoints[0]; + +assert.strictEqual(hist.count, 5); +assert.strictEqual(hist.sum, 1055); +assert.strictEqual(hist.min, 5); +assert.strictEqual(hist.max, 750); + +// Check bucket counts +assert.strictEqual(hist.buckets.length, 5); // 4 boundaries + 1 overflow +assert.strictEqual(hist.buckets[0].le, 10); +assert.strictEqual(hist.buckets[0].count, 1); +assert.strictEqual(hist.buckets[1].le, 50); +assert.strictEqual(hist.buckets[1].count, 1); +assert.strictEqual(hist.buckets[2].le, 100); +assert.strictEqual(hist.buckets[2].count, 1); +assert.strictEqual(hist.buckets[3].le, 500); +assert.strictEqual(hist.buckets[3].count, 1); +assert.strictEqual(hist.buckets[4].count, 1); // Infinity bucket + +consumer.close(); + +// Test histogram with default boundaries +const m2 = metrics.create('test.histogram.default'); + +const consumer2 = metrics.createConsumer({ + 'test.histogram.default': { aggregation: 'histogram' }, +}); + +m2.record(5); +m2.record(25); +m2.record(75); + +const result2 = consumer2.collect(); +const hist2 = result2[0].dataPoints[0]; +assert.strictEqual(hist2.count, 3); +assert.ok(hist2.buckets.length > 0); + +consumer2.close(); + +// Test empty histogram (no recordings) +metrics.create('test.histogram.empty'); + +const consumer3 = metrics.createConsumer({ + 'test.histogram.empty': { aggregation: 'histogram' }, +}); + +// Don't record anything, then collect +// This should not create a data point since there's no data +const result3 = consumer3.collect(); +assert.strictEqual(result3.length, 0); + +consumer3.close(); diff --git a/test/parallel/test-metrics-aggregator-lastvalue.js b/test/parallel/test-metrics-aggregator-lastvalue.js new file mode 100644 index 00000000000000..88455652d8bb96 --- /dev/null +++ b/test/parallel/test-metrics-aggregator-lastvalue.js @@ -0,0 +1,47 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test lastValue aggregator +const m = metrics.create('test.lastvalue'); + +const consumer = metrics.createConsumer({ + 'test.lastvalue': { aggregation: 'lastValue' }, +}); + +m.record(10); +let result = consumer.collect(); +assert.strictEqual(result[0].dataPoints[0].value, 10); +assert.ok(result[0].dataPoints[0].timestamp > 0); + +m.record(42); +result = consumer.collect(); +assert.strictEqual(result[0].dataPoints[0].value, 42); + +m.record(0); +result = consumer.collect(); +assert.strictEqual(result[0].dataPoints[0].value, 0); + +consumer.close(); + +// Test lastValue with observable +let gaugeValue = 100; +metrics.create('test.lastvalue.observable', { + observable: (metric) => { metric.record(gaugeValue); }, +}); + +const consumer2 = metrics.createConsumer({ + 'test.lastvalue.observable': { aggregation: 'lastValue' }, +}); + +result = consumer2.collect(); +assert.strictEqual(result[0].dataPoints[0].value, 100); + +gaugeValue = 200; +result = consumer2.collect(); +assert.strictEqual(result[0].dataPoints[0].value, 200); + +consumer2.close(); diff --git a/test/parallel/test-metrics-aggregator-sum.js b/test/parallel/test-metrics-aggregator-sum.js new file mode 100644 index 00000000000000..d3f336f53d6ff1 --- /dev/null +++ b/test/parallel/test-metrics-aggregator-sum.js @@ -0,0 +1,76 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test sum aggregator +const m = metrics.create('test.sum'); + +const consumer = metrics.createConsumer({ + 'test.sum': { aggregation: 'sum' }, +}); + +m.record(10); +m.record(20); +m.record(30); + +let result = consumer.collect(); +assert.strictEqual(result[0].dataPoints[0].sum, 60); +assert.strictEqual(result[0].dataPoints[0].count, 3); + +consumer.close(); + +// Test monotonic sum (ignores negative values) +const m2 = metrics.create('test.sum.monotonic'); + +const consumer2 = metrics.createConsumer({ + 'test.sum.monotonic': { aggregation: 'sum', monotonic: true }, +}); + +m2.record(10); +m2.record(-5); // Should be ignored +m2.record(20); +m2.record(-10); // Should be ignored + +result = consumer2.collect(); +assert.strictEqual(result[0].dataPoints[0].sum, 30); // 10 + 20 +assert.strictEqual(result[0].dataPoints[0].count, 2); // Only positive values counted + +consumer2.close(); + +// Test non-monotonic sum (includes negative values) +const m3 = metrics.create('test.sum.nonmonotonic'); + +const consumer3 = metrics.createConsumer({ + 'test.sum.nonmonotonic': { aggregation: 'sum', monotonic: false }, +}); + +m3.record(10); +m3.record(-5); +m3.record(20); + +result = consumer3.collect(); +assert.strictEqual(result[0].dataPoints[0].sum, 25); // 10 - 5 + 20 +assert.strictEqual(result[0].dataPoints[0].count, 3); + +consumer3.close(); + +// Test cumulative temporality (default) +const m4 = metrics.create('test.sum.cumulative'); + +const consumer4 = metrics.createConsumer({ + 'test.sum.cumulative': { aggregation: 'sum', temporality: 'cumulative' }, +}); + +m4.record(10); +result = consumer4.collect(); +assert.strictEqual(result[0].dataPoints[0].sum, 10); +assert.strictEqual(result[0].temporality, 'cumulative'); + +m4.record(20); +result = consumer4.collect(); +assert.strictEqual(result[0].dataPoints[0].sum, 30); // Cumulative + +consumer4.close(); diff --git a/test/parallel/test-metrics-aggregator-summary.js b/test/parallel/test-metrics-aggregator-summary.js new file mode 100644 index 00000000000000..924d8dffba3479 --- /dev/null +++ b/test/parallel/test-metrics-aggregator-summary.js @@ -0,0 +1,58 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test summary aggregator with custom quantiles +const m = metrics.create('test.summary', { unit: 'ms' }); + +const consumer = metrics.createConsumer({ + 'test.summary': { + aggregation: 'summary', + quantiles: [0.5, 0.9, 0.99], + }, +}); + +// Record 100 values (1 to 100) +for (let i = 1; i <= 100; i++) { + m.record(i); +} + +const result = consumer.collect(); +const summary = result[0].dataPoints[0]; + +assert.strictEqual(summary.count, 100); +assert.strictEqual(summary.sum, 5050); // Sum of 1 to 100 +assert.strictEqual(summary.min, 1); +assert.strictEqual(summary.max, 100); + +// Check quantiles (approximate values due to histogram-based calculation) +assert.ok(summary.quantiles['0.5'] >= 45 && summary.quantiles['0.5'] <= 55); +assert.ok(summary.quantiles['0.9'] >= 85 && summary.quantiles['0.9'] <= 95); +assert.ok(summary.quantiles['0.99'] >= 95 && summary.quantiles['0.99'] <= 100); + +consumer.close(); + +// Test summary with default quantiles +const m2 = metrics.create('test.summary.default'); + +const consumer2 = metrics.createConsumer({ + 'test.summary.default': { aggregation: 'summary' }, +}); + +for (let i = 1; i <= 50; i++) { + m2.record(i); +} + +const result2 = consumer2.collect(); +const summary2 = result2[0].dataPoints[0]; + +// Default quantiles should include 0.5, 0.9, 0.95, 0.99 +assert.ok('0.5' in summary2.quantiles); +assert.ok('0.9' in summary2.quantiles); +assert.ok('0.95' in summary2.quantiles); +assert.ok('0.99' in summary2.quantiles); + +consumer2.close(); diff --git a/test/parallel/test-metrics-auto-collect.js b/test/parallel/test-metrics-auto-collect.js new file mode 100644 index 00000000000000..dcf4401a722712 --- /dev/null +++ b/test/parallel/test-metrics-auto-collect.js @@ -0,0 +1,114 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test autoCollect basic functionality +{ + const m = metrics.create('test.autocollect.basic'); + const consumer = metrics.createConsumer({ + 'test.autocollect.basic': { aggregation: 'sum', temporality: 'delta' }, + }); + + let callCount = 0; + const stop = consumer.autoCollect(20, common.mustCallAtLeast((result) => { + callCount++; + if (callCount === 1) { + // First result should have our recorded value + assert.strictEqual(result.length, 1); + assert.strictEqual(result[0].dataPoints[0].sum, 42); + } else if (callCount === 2) { + // Second result should be empty (no new values, delta reset) + assert.strictEqual(result.length, 0); + stop(); + consumer.close(); + } + }, 2)); + + // Record a value before first collection + m.record(42); +} + +// Test autoCollect throws if already active +{ + const consumer = metrics.createConsumer(); + const stop = consumer.autoCollect(1000, () => {}); + + assert.throws(() => { + consumer.autoCollect(1000, () => {}); + }, { + code: 'ERR_INVALID_STATE', + message: /autoCollect is already active/, + }); + + stop(); + consumer.close(); +} + +// Test autoCollect throws if consumer is closed +{ + const consumer = metrics.createConsumer(); + consumer.close(); + + assert.throws(() => { + consumer.autoCollect(1000, () => {}); + }, { + code: 'ERR_INVALID_STATE', + message: /Consumer is closed/, + }); +} + +// Test stop function can be called multiple times safely +{ + const consumer = metrics.createConsumer(); + const stop = consumer.autoCollect(1000, () => {}); + + stop(); + stop(); // Should not throw + consumer.close(); +} + +// Test close() stops autoCollect +{ + const consumer = metrics.createConsumer(); + let callCount = 0; + consumer.autoCollect(10, () => { + callCount++; + }); + + // Close immediately - should stop the timer + consumer.close(); + + // Wait a bit to ensure no more calls happen + setTimeout(common.mustCall(() => { + assert.strictEqual(callCount, 0); + }), 50); +} + +// Test validation +{ + const consumer = metrics.createConsumer(); + + assert.throws(() => { + consumer.autoCollect('not a number', () => {}); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + consumer.autoCollect(100, 'not a function'); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + // Interval must be >= 1 + consumer.autoCollect(0, () => {}); + }, { + code: 'ERR_OUT_OF_RANGE', + }); + + consumer.close(); +} diff --git a/test/parallel/test-metrics-bigint.js b/test/parallel/test-metrics-bigint.js new file mode 100644 index 00000000000000..2284b209182cc8 --- /dev/null +++ b/test/parallel/test-metrics-bigint.js @@ -0,0 +1,41 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test bigint value support +const m = metrics.create('test.bigint'); + +const consumer = metrics.createConsumer({ + 'test.bigint': { aggregation: 'sum' }, +}); + +// Record bigint values +m.record(BigInt(Number.MAX_SAFE_INTEGER)); +m.record(BigInt(Number.MAX_SAFE_INTEGER)); +m.record(1n); + +const result = consumer.collect(); +const expected = BigInt(Number.MAX_SAFE_INTEGER) * 2n + 1n; +assert.strictEqual(result[0].dataPoints[0].sum, expected); +assert.strictEqual(result[0].dataPoints[0].count, 3); + +consumer.close(); + +// Test mixed number and bigint (bigint converts sum to bigint) +const m2 = metrics.create('test.mixed'); + +const consumer2 = metrics.createConsumer({ + 'test.mixed': { aggregation: 'sum' }, +}); + +m2.record(10); +m2.record(20n); +m2.record(30); + +const result2 = consumer2.collect(); +assert.strictEqual(result2[0].dataPoints[0].sum, 60n); + +consumer2.close(); diff --git a/test/parallel/test-metrics-cardinality-limit.js b/test/parallel/test-metrics-cardinality-limit.js new file mode 100644 index 00000000000000..1682306cd4f958 --- /dev/null +++ b/test/parallel/test-metrics-cardinality-limit.js @@ -0,0 +1,47 @@ +'use strict'; + +// Test that cardinality limits prevent unbounded memory growth +// when using groupByAttributes with high-cardinality attribute values. + +const common = require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Expect a warning when cardinality limit is exceeded +process.on('warning', common.mustCall((warning) => { + assert.strictEqual(warning.name, 'MetricsWarning'); + assert.match(warning.message, /test\.cardinality\.limit/); + assert.match(warning.message, /cardinality limit of 5/); +}, 1)); + +const m = metrics.create('test.cardinality.limit'); + +// Create a consumer with a small cardinality limit for testing +// Use delta temporality since cumulative now drops new values instead of evicting old ones +const consumer = metrics.createConsumer({ + 'groupByAttributes': true, + 'test.cardinality.limit': { + aggregation: 'sum', + temporality: 'delta', + cardinalityLimit: 5, + }, +}); + +// Record values with 10 different attribute combinations +// Only the last 5 should be retained due to cardinality limit +for (let i = 0; i < 10; i++) { + m.record(1, { id: `item-${i}` }); +} + +const result = consumer.collect(); +assert.strictEqual(result.length, 1); + +// Should only have 5 data points due to cardinality limit +assert.strictEqual(result[0].dataPoints.length, 5); + +// The retained data points should be the most recent ones (item-5 through item-9) +const ids = result[0].dataPoints.map((dp) => dp.attributes.id).sort(); +assert.deepStrictEqual(ids, ['item-5', 'item-6', 'item-7', 'item-8', 'item-9']); + +consumer.close(); diff --git a/test/parallel/test-metrics-consumer-attributes.js b/test/parallel/test-metrics-consumer-attributes.js new file mode 100644 index 00000000000000..670c6d45b65552 --- /dev/null +++ b/test/parallel/test-metrics-consumer-attributes.js @@ -0,0 +1,79 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test groupBy attribute filtering +const m = metrics.create('test.attrs.groupby'); + +const consumer = metrics.createConsumer({ + 'groupByAttributes': true, + 'test.attrs.groupby': { + aggregation: 'sum', + groupBy: ['method'], + }, +}); + +m.record(1, { method: 'GET', path: '/api', user: 'alice' }); +m.record(2, { method: 'GET', path: '/users', user: 'bob' }); +m.record(3, { method: 'POST', path: '/api', user: 'alice' }); + +const result = consumer.collect(); +assert.strictEqual(result[0].dataPoints.length, 2); + +const getData = result[0].dataPoints.find((dp) => dp.attributes.method === 'GET'); +const postData = result[0].dataPoints.find((dp) => dp.attributes.method === 'POST'); + +// Only 'method' should be in attributes (groupBy filters) +assert.deepStrictEqual(getData.attributes, { method: 'GET' }); +assert.deepStrictEqual(postData.attributes, { method: 'POST' }); +assert.strictEqual(getData.sum, 3); // 1 + 2 +assert.strictEqual(postData.sum, 3); + +consumer.close(); + +// Test custom attributeKey function +const m2 = metrics.create('test.attrs.custom'); + +const consumer2 = metrics.createConsumer({ + 'groupByAttributes': true, + 'test.attrs.custom': { + aggregation: 'sum', + attributeKey: (attrs) => attrs.region || 'unknown', + }, +}); + +m2.record(1, { region: 'us-east', zone: 'a' }); +m2.record(2, { region: 'us-east', zone: 'b' }); +m2.record(3, { region: 'eu-west', zone: 'a' }); + +const result2 = consumer2.collect(); +assert.strictEqual(result2[0].dataPoints.length, 2); + +consumer2.close(); + +// Test normalizeAttributes function +const m3 = metrics.create('test.attrs.normalize'); + +const consumer3 = metrics.createConsumer({ + 'groupByAttributes': true, + 'test.attrs.normalize': { + aggregation: 'sum', + normalizeAttributes: (attrs) => ({ + method: attrs.method?.toUpperCase(), + }), + }, +}); + +m3.record(1, { method: 'get', path: '/api' }); +m3.record(2, { method: 'GET', path: '/users' }); + +const result3 = consumer3.collect(); +// Both should be grouped under 'GET' +const dp = result3[0].dataPoints[0]; +assert.strictEqual(dp.attributes.method, 'GET'); +assert.strictEqual(dp.sum, 3); + +consumer3.close(); diff --git a/test/parallel/test-metrics-consumer-close.js b/test/parallel/test-metrics-consumer-close.js new file mode 100644 index 00000000000000..2c49c6ad6c9dec --- /dev/null +++ b/test/parallel/test-metrics-consumer-close.js @@ -0,0 +1,43 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test consumer close() unregisters from registry +const m = metrics.create('test.close'); + +const consumer = metrics.createConsumer({ + 'test.close': { aggregation: 'sum' }, +}); + +// Record value before close +m.record(10); + +// Close consumer +consumer.close(); + +// Record more values - consumer should not receive them +m.record(20); +m.record(30); + +// Collect after close should return empty result +const result = consumer.collect(); +assert.deepStrictEqual(result, []); + +// Test double close is safe +consumer.close(); // Should not throw + +// Test closed consumer doesn't receive new values +const consumer2 = metrics.createConsumer({ + 'test.close': { aggregation: 'sum' }, +}); + +// This consumer should only see values recorded after it was created +const result2 = consumer2.collect(); +// Note: the sum might include previous values if metric state persists, +// but the point is the closed consumer doesn't receive new values +assert.ok(Array.isArray(result2)); + +consumer2.close(); diff --git a/test/parallel/test-metrics-consumer.js b/test/parallel/test-metrics-consumer.js new file mode 100644 index 00000000000000..546035e0faa88e --- /dev/null +++ b/test/parallel/test-metrics-consumer.js @@ -0,0 +1,108 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test basic consumer +const m = metrics.create('test.consumer'); + +const consumer = metrics.createConsumer({ + 'test.consumer': { aggregation: 'sum' }, +}); + +// Record values +m.record(1); +m.record(2); +m.record(3); + +// Collect - now returns metrics array directly +const result = consumer.collect(); +assert.strictEqual(result.length, 1); +assert.strictEqual(result[0].descriptor.name, 'test.consumer'); +assert.strictEqual(result[0].temporality, 'cumulative'); +assert.strictEqual(result[0].dataPoints[0].sum, 6); +assert.ok(result[0].timestamp > 0); + +consumer.close(); + +// Test consumer only tracks configured metrics +const m1 = metrics.create('test.consumer.a'); +const m2 = metrics.create('test.consumer.b'); + +const consumer2 = metrics.createConsumer({ + 'test.consumer.a': { aggregation: 'sum' }, +}); + +m1.record(10); +m2.record(20); // Should be ignored by consumer2 + +const result2 = consumer2.collect(); +assert.strictEqual(result2.length, 1); +assert.strictEqual(result2[0].descriptor.name, 'test.consumer.a'); +assert.strictEqual(result2[0].dataPoints[0].sum, 10); + +consumer2.close(); + +// Test consumer with default aggregation (tracks all metrics) +const m3 = metrics.create('test.consumer.all'); + +const consumer3 = metrics.createConsumer({ + defaultAggregation: 'lastValue', +}); + +m3.record(42); + +const result3 = consumer3.collect(); +assert.ok(result3.some((s) => s.descriptor.name === 'test.consumer.all')); + +consumer3.close(); + +// Test temporality option - delta resets between collects +const m4 = metrics.create('test.consumer.delta'); + +const consumer4 = metrics.createConsumer({ + 'test.consumer.delta': { aggregation: 'sum', temporality: 'delta' }, +}); + +m4.record(10); + +let result4 = consumer4.collect(); +assert.strictEqual(result4[0].temporality, 'delta'); +assert.strictEqual(result4[0].dataPoints[0].sum, 10); +assert.strictEqual(result4[0].dataPoints[0].count, 1); + +// Record more values and collect again - should NOT be cumulative +m4.record(20); +m4.record(5); + +result4 = consumer4.collect(); +assert.strictEqual(result4[0].dataPoints[0].sum, 25); // 20 + 5, NOT 10 + 20 + 5 = 35 +assert.strictEqual(result4[0].dataPoints[0].count, 2); + +// After another collect with no values, should have no data points +result4 = consumer4.collect(); +assert.strictEqual(result4.length, 0); // No data points after reset + +consumer4.close(); + +// Test cumulative temporality does NOT reset +const m5 = metrics.create('test.consumer.cumulative'); + +const consumer5 = metrics.createConsumer({ + 'test.consumer.cumulative': { aggregation: 'sum', temporality: 'cumulative' }, +}); + +m5.record(10); + +let result5 = consumer5.collect(); +assert.strictEqual(result5[0].temporality, 'cumulative'); +assert.strictEqual(result5[0].dataPoints[0].sum, 10); + +m5.record(20); + +result5 = consumer5.collect(); +assert.strictEqual(result5[0].dataPoints[0].sum, 30); // Cumulative: 10 + 20 + +consumer5.close(); diff --git a/test/parallel/test-metrics-counter.js b/test/parallel/test-metrics-counter.js deleted file mode 100644 index 227055df3959a7..00000000000000 --- a/test/parallel/test-metrics-counter.js +++ /dev/null @@ -1,41 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { metrics } = require('node:perf_hooks'); -const { createCounter, Counter, Metric, MetricReport } = metrics; - -const testCounter = createCounter('test', { base: 'test' }); -assert.ok(testCounter instanceof Counter); -assert.ok(testCounter instanceof Metric); -assert.strictEqual(testCounter.value, 0); - -assert.strictEqual(testCounter.type, 'counter'); -assert.strictEqual(testCounter.name, 'test'); -assert.deepStrictEqual(testCounter.meta, { base: 'test' }); -assert.strictEqual(testCounter.channelName, 'metrics:counter:test'); - -const messages = [ - [1, { base: 'test' }], - [123, { base: 'test', meta: 'extra' }], - [-1, { base: 'test' }], - [-123, { base: 'test', meta: 'extra' }], -]; - -subscribe(testCounter.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'counter'); - assert.strictEqual(report.name, 'test'); - assert.ok(report.time > 0); - - const [value, meta] = messages.shift(); - assert.strictEqual(report.value, value); - assert.deepStrictEqual(report.meta, meta); -}, 4)); - -testCounter.increment(); -testCounter.increment(123, { meta: 'extra' }); -testCounter.decrement(); -testCounter.decrement(123, { meta: 'extra' }); diff --git a/test/parallel/test-metrics-create.js b/test/parallel/test-metrics-create.js new file mode 100644 index 00000000000000..0d3061bb0fad81 --- /dev/null +++ b/test/parallel/test-metrics-create.js @@ -0,0 +1,40 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test basic metric creation +const m1 = metrics.create('test.create'); +assert.strictEqual(m1.descriptor.name, 'test.create'); +assert.strictEqual(m1.isObservable, false); + +// Test with all options +const m2 = metrics.create('test.full', { + unit: 'ms', + description: 'Full options test', +}); +assert.strictEqual(m2.descriptor.name, 'test.full'); +assert.strictEqual(m2.descriptor.unit, 'ms'); +assert.strictEqual(m2.descriptor.description, 'Full options test'); + +// Test metrics.list() +const list = metrics.list(); +assert.ok(Array.isArray(list)); +assert.ok(list.length >= 2); +assert.ok(list.some((m) => m.descriptor.name === 'test.create')); +assert.ok(list.some((m) => m.descriptor.name === 'test.full')); + +// Test metrics.get() +const retrieved = metrics.get('test.create'); +assert.strictEqual(retrieved, m1); +assert.strictEqual(metrics.get('nonexistent'), undefined); + +// Test validation +assert.throws(() => metrics.create(), { + code: 'ERR_INVALID_ARG_TYPE', +}); +assert.throws(() => metrics.create(123), { + code: 'ERR_INVALID_ARG_TYPE', +}); diff --git a/test/parallel/test-metrics-custom-aggregator.js b/test/parallel/test-metrics-custom-aggregator.js new file mode 100644 index 00000000000000..752245bd15c90f --- /dev/null +++ b/test/parallel/test-metrics-custom-aggregator.js @@ -0,0 +1,109 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test custom aggregator with non-standard state properties +// This tests the fix for custom aggregators that don't use 'count' or 'value' +const m = metrics.create('test.custom.aggregator'); + +const customAggregator = { + createState() { + return { + total: 0, + items: [], + }; + }, + aggregate(state, value) { + state.total += value; + state.items.push(value); + }, + finalize(state) { + return { + total: state.total, + itemCount: state.items.length, + average: state.items.length > 0 ? state.total / state.items.length : 0, + }; + }, + resetState(state) { + state.total = 0; + state.items = []; + }, +}; + +const consumer = metrics.createConsumer({ + 'test.custom.aggregator': { + aggregation: customAggregator, + temporality: 'delta', + }, +}); + +// Before recording, should have no data points +let result = consumer.collect(); +assert.strictEqual(result.length, 0); + +// Record some values +m.record(10); +m.record(20); +m.record(30); + +// Collect and verify custom aggregator worked +result = consumer.collect(); +assert.strictEqual(result.length, 1); +assert.strictEqual(result[0].dataPoints[0].total, 60); +assert.strictEqual(result[0].dataPoints[0].itemCount, 3); +assert.strictEqual(result[0].dataPoints[0].average, 20); + +// After delta reset, should have no data points again +result = consumer.collect(); +assert.strictEqual(result.length, 0); + +// Record more and verify reset worked +m.record(5); +result = consumer.collect(); +assert.strictEqual(result.length, 1); +assert.strictEqual(result[0].dataPoints[0].total, 5); +assert.strictEqual(result[0].dataPoints[0].itemCount, 1); + +consumer.close(); + +// Test custom aggregator with groupByAttributes +const m2 = metrics.create('test.custom.aggregator.grouped'); + +const consumer2 = metrics.createConsumer({ + 'groupByAttributes': true, + 'test.custom.aggregator.grouped': { + aggregation: customAggregator, + temporality: 'delta', + groupBy: ['region'], + }, +}); + +// Before recording, should have no data +result = consumer2.collect(); +assert.strictEqual(result.length, 0); + +// Record values with different attributes +m2.record(10, { region: 'us-east' }); +m2.record(20, { region: 'us-east' }); +m2.record(30, { region: 'eu-west' }); + +result = consumer2.collect(); +assert.strictEqual(result.length, 1); +assert.strictEqual(result[0].dataPoints.length, 2); + +const usEast = result[0].dataPoints.find((dp) => dp.attributes.region === 'us-east'); +const euWest = result[0].dataPoints.find((dp) => dp.attributes.region === 'eu-west'); + +assert.strictEqual(usEast.total, 30); +assert.strictEqual(usEast.itemCount, 2); +assert.strictEqual(euWest.total, 30); +assert.strictEqual(euWest.itemCount, 1); + +// After delta reset, should have no data +result = consumer2.collect(); +assert.strictEqual(result.length, 0); + +consumer2.close(); diff --git a/test/parallel/test-metrics-dc-consumer.js b/test/parallel/test-metrics-dc-consumer.js new file mode 100644 index 00000000000000..d1db4188c679ee --- /dev/null +++ b/test/parallel/test-metrics-dc-consumer.js @@ -0,0 +1,71 @@ +'use strict'; + +const common = require('../common'); + +const assert = require('assert'); +const dc = require('diagnostics_channel'); +const { metrics } = require('perf_hooks'); + +// Test DiagnosticsChannel consumer +const m = metrics.create('test.dc', { unit: 'count' }); + +const received = []; +dc.subscribe('metrics:test.dc', common.mustCall((msg) => { + received.push(msg); +}, 3)); + +// Create DC consumer +const dcConsumer = metrics.createDiagnosticsChannelConsumer(); + +// Record values +m.record(42, { tag: 'a' }); +m.record(100, { tag: 'b' }); +m.record(7); + +// Verify DC received the values +assert.strictEqual(received.length, 3); + +assert.strictEqual(received[0].value, 42); +assert.deepStrictEqual(received[0].attributes, { tag: 'a' }); +assert.strictEqual(received[0].descriptor.name, 'test.dc'); + +assert.strictEqual(received[1].value, 100); +assert.deepStrictEqual(received[1].attributes, { tag: 'b' }); + +assert.strictEqual(received[2].value, 7); +assert.strictEqual(Object.keys(received[2].attributes).length, 0); + +// Test DC consumer is singleton +const dcConsumer2 = metrics.createDiagnosticsChannelConsumer(); +assert.strictEqual(dcConsumer, dcConsumer2); + +// Close DC consumer +dcConsumer.close(); + +// Test DC consumer with observable metrics +let gaugeValue = 50; +metrics.create('test.dc.observable', { + observable: (metric) => { metric.record(gaugeValue); }, +}); + +const received2 = []; +dc.subscribe('metrics:test.dc.observable', (msg) => { + received2.push(msg); +}); + +// Create new DC consumer +const dcConsumer3 = metrics.createDiagnosticsChannelConsumer(); + +// Observables are sampled on collect +dcConsumer3.collect(); + +assert.strictEqual(received2.length, 1); +assert.strictEqual(received2[0].value, 50); + +gaugeValue = 75; +dcConsumer3.collect(); + +assert.strictEqual(received2.length, 2); +assert.strictEqual(received2[1].value, 75); + +dcConsumer3.close(); diff --git a/test/parallel/test-metrics-delta-timestamps.js b/test/parallel/test-metrics-delta-timestamps.js new file mode 100644 index 00000000000000..1e29f966381063 --- /dev/null +++ b/test/parallel/test-metrics-delta-timestamps.js @@ -0,0 +1,58 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test delta temporality includes startTime and timestamp +const m = metrics.create('test.delta.timestamps'); + +const consumer = metrics.createConsumer({ + 'test.delta.timestamps': { aggregation: 'sum', temporality: 'delta' }, +}); + +// Record some values +m.record(10); +m.record(20); + +// First collect +const result1 = consumer.collect(); +assert.strictEqual(result1.length, 1); +assert.strictEqual(result1[0].temporality, 'delta'); +assert.ok(typeof result1[0].startTime === 'number', 'startTime should be a number'); +assert.ok(typeof result1[0].timestamp === 'number', 'timestamp should be a number'); +assert.ok(result1[0].startTime < result1[0].timestamp, 'startTime should be before timestamp'); +assert.strictEqual(result1[0].dataPoints[0].sum, 30); + +const firstEndTime = result1[0].timestamp; + +// Record more values +m.record(5); + +// Second collect +const result2 = consumer.collect(); +assert.strictEqual(result2.length, 1); +// startTime of second collect should equal timestamp of first collect +assert.strictEqual(result2[0].startTime, firstEndTime); +assert.ok(result2[0].timestamp > result2[0].startTime); +assert.strictEqual(result2[0].dataPoints[0].sum, 5); + +consumer.close(); + +// Test cumulative temporality does NOT include startTime +const m2 = metrics.create('test.cumulative.timestamps'); + +const consumer2 = metrics.createConsumer({ + 'test.cumulative.timestamps': { aggregation: 'sum', temporality: 'cumulative' }, +}); + +m2.record(10); + +const result3 = consumer2.collect(); +assert.strictEqual(result3[0].temporality, 'cumulative'); +assert.ok(typeof result3[0].timestamp === 'number'); +// Cumulative temporality should not have startTime +assert.strictEqual(result3[0].startTime, undefined); + +consumer2.close(); diff --git a/test/parallel/test-metrics-descriptor.js b/test/parallel/test-metrics-descriptor.js new file mode 100644 index 00000000000000..b2e614a07166f9 --- /dev/null +++ b/test/parallel/test-metrics-descriptor.js @@ -0,0 +1,61 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test MetricDescriptor via metric creation +const m = metrics.create('test.descriptor', { + unit: 'bytes', + description: 'A test metric', +}); + +// Test descriptor getters +assert.strictEqual(m.descriptor.name, 'test.descriptor'); +assert.strictEqual(m.descriptor.unit, 'bytes'); +assert.strictEqual(m.descriptor.description, 'A test metric'); +assert.strictEqual(m.descriptor.scope, undefined); + +// Test toJSON +const json = m.descriptor.toJSON(); +assert.deepStrictEqual(json, { + name: 'test.descriptor', + unit: 'bytes', + description: 'A test metric', + scope: undefined, +}); + +// Test with minimal options +const m2 = metrics.create('test.minimal'); +assert.strictEqual(m2.descriptor.name, 'test.minimal'); +assert.strictEqual(m2.descriptor.unit, undefined); +assert.strictEqual(m2.descriptor.description, undefined); + +// Descriptor toJSON with undefined fields +const json2 = m2.descriptor.toJSON(); +assert.deepStrictEqual(json2, { + name: 'test.minimal', + unit: undefined, + description: undefined, + scope: undefined, +}); + +// Test with InstrumentationScope +const scope = new metrics.InstrumentationScope('my-library', '1.0.0', 'https://example.com/schema'); +const m3 = metrics.create('test.scoped', { + unit: 'count', + scope, +}); + +assert.strictEqual(m3.descriptor.scope, scope); +assert.strictEqual(m3.descriptor.scope.name, 'my-library'); +assert.strictEqual(m3.descriptor.scope.version, '1.0.0'); +assert.strictEqual(m3.descriptor.scope.schemaUrl, 'https://example.com/schema'); + +const json3 = m3.descriptor.toJSON(); +assert.deepStrictEqual(json3.scope, { + name: 'my-library', + version: '1.0.0', + schemaUrl: 'https://example.com/schema', +}); diff --git a/test/parallel/test-metrics-exemplar-asynclocalstorage.js b/test/parallel/test-metrics-exemplar-asynclocalstorage.js new file mode 100644 index 00000000000000..a58149eef416e3 --- /dev/null +++ b/test/parallel/test-metrics-exemplar-asynclocalstorage.js @@ -0,0 +1,224 @@ +// Flags: --expose-internals +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { AsyncLocalStorage } = require('node:async_hooks'); +const { create, createConsumer, ReservoirSampler } = require('internal/perf/metrics'); + +// This test demonstrates how to use AsyncLocalStorage to automatically +// propagate trace context to exemplar samples + +const traceStorage = new AsyncLocalStorage(); + +// Extract function that reads from AsyncLocalStorage +function extractTraceContext(attributes) { + const store = traceStorage.getStore(); + if (!store || !store.traceId || !store.spanId) { + return null; + } + return { + traceId: store.traceId, + spanId: store.spanId, + filteredAttributes: attributes, + }; +} + +// Test: Trace context propagates through AsyncLocalStorage +{ + const metric = create('test.als.exemplar', { unit: 'ms' }); + + const sampler = new ReservoirSampler(5, extractTraceContext); + + const consumer = createConsumer({ + metrics: { + 'test.als.exemplar': { + aggregation: 'sum', + exemplar: sampler, + }, + }, + }); + + // Simulate a request with trace context + traceStorage.run({ traceId: 'trace-123', spanId: 'span-456' }, () => { + metric.record(100, { endpoint: '/api/users' }); + metric.record(200, { endpoint: '/api/posts' }); + }); + + // Another request with different trace context + traceStorage.run({ traceId: 'trace-789', spanId: 'span-abc' }, () => { + metric.record(150, { endpoint: '/api/users' }); + }); + + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + assert.strictEqual(dataPoints[0].sum, 450); + assert.ok(dataPoints[0].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, 3); + + // Check trace contexts + const traceIds = dataPoints[0].exemplars.map((e) => e.traceId); + assert.ok(traceIds.includes('trace-123')); + assert.ok(traceIds.includes('trace-789')); + + // Check filtered attributes + assert.strictEqual(dataPoints[0].exemplars[0].filteredAttributes.endpoint, '/api/users'); + + consumer.close(); + metric.close(); +} + +// Test: No trace context results in no exemplars +{ + const metric = create('test.als.no.context', { unit: 'ms' }); + + const sampler = new ReservoirSampler(3, extractTraceContext); + + const consumer = createConsumer({ + metrics: { + 'test.als.no.context': { + aggregation: 'sum', + exemplar: sampler, + }, + }, + }); + + // Record without AsyncLocalStorage context + metric.record(100); + metric.record(200); + + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + assert.strictEqual(dataPoints[0].sum, 300); + // No exemplars should be collected + assert.strictEqual(dataPoints[0].exemplars, undefined); + + consumer.close(); + metric.close(); +} + +// Test: Nested contexts +{ + const metric = create('test.als.nested', { unit: 'ms' }); + + const sampler = new ReservoirSampler(5, extractTraceContext); + + const consumer = createConsumer({ + metrics: { + 'test.als.nested': { + aggregation: 'sum', + exemplar: sampler, + }, + }, + }); + + traceStorage.run({ traceId: 'trace-parent', spanId: 'span-parent' }, () => { + metric.record(100); + + // Nested span + traceStorage.run({ traceId: 'trace-parent', spanId: 'span-child' }, () => { + metric.record(200); + }); + + metric.record(150); + }); + + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + assert.strictEqual(dataPoints[0].sum, 450); + assert.ok(dataPoints[0].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, 3); + + // Should have both parent and child span IDs + const spanIds = dataPoints[0].exemplars.map((e) => e.spanId); + assert.ok(spanIds.includes('span-parent')); + assert.ok(spanIds.includes('span-child')); + + consumer.close(); + metric.close(); +} + +// Test: Timer with AsyncLocalStorage +{ + const metric = create('test.als.timer', { unit: 'ms' }); + + const sampler = new ReservoirSampler(3, extractTraceContext); + + const consumer = createConsumer({ + metrics: { + 'test.als.timer': { + aggregation: 'sum', + exemplar: sampler, + }, + }, + }); + + traceStorage.run({ traceId: 'trace-timer', spanId: 'span-timer' }, () => { + const timer = metric.startTimer(); + // Simulate some work + timer.stop(); + }); + + const snapshot = consumer.collect(); + + if (snapshot.length > 0) { + const dataPoints = snapshot[0].dataPoints; + assert.ok(dataPoints[0].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, 1); + assert.strictEqual(dataPoints[0].exemplars[0].traceId, 'trace-timer'); + assert.strictEqual(dataPoints[0].exemplars[0].spanId, 'span-timer'); + } + + consumer.close(); + metric.close(); +} + +// Test: Concurrent requests with different contexts +{ + const metric = create('test.als.concurrent', { unit: 'ms' }); + + const sampler = new ReservoirSampler(10, extractTraceContext); + + const consumer = createConsumer({ + metrics: { + 'test.als.concurrent': { + aggregation: 'sum', + exemplar: sampler, + }, + }, + }); + + const promises = []; + + // Simulate 5 concurrent requests + for (let i = 0; i < 5; i++) { + const promise = new Promise((resolve) => { + traceStorage.run({ traceId: `trace-${i}`, spanId: `span-${i}` }, () => { + setImmediate(() => { + metric.record(100 + i * 10); + resolve(); + }); + }); + }); + promises.push(promise); + } + + Promise.all(promises).then(() => { + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + assert.strictEqual(dataPoints[0].sum, 600); + assert.ok(dataPoints[0].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, 5); + + // Check all trace IDs are unique + const traceIds = dataPoints[0].exemplars.map((e) => e.traceId); + const uniqueTraceIds = new Set(traceIds); + assert.strictEqual(uniqueTraceIds.size, 5); + + consumer.close(); + metric.close(); + }); +} diff --git a/test/parallel/test-metrics-exemplar-boundary.js b/test/parallel/test-metrics-exemplar-boundary.js new file mode 100644 index 00000000000000..d07a6bf8514df1 --- /dev/null +++ b/test/parallel/test-metrics-exemplar-boundary.js @@ -0,0 +1,168 @@ +// Flags: --expose-internals +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { create, createConsumer, BoundarySampler } = require('internal/perf/metrics'); + +// This test ensures that BoundarySampler correctly maintains one exemplar +// per histogram bucket + +// Simple extract function that returns trace context +function extractTraceContext(attributes) { + if (!attributes.traceId || !attributes.spanId) { + return null; + } + return { + traceId: attributes.traceId, + spanId: attributes.spanId, + filteredAttributes: {}, + }; +} + +// Test: One exemplar per bucket +{ + const boundaries = [10, 50, 100]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + // Add samples in different buckets + sampler.sample(5, 1000, { traceId: 'trace1', spanId: 'span1' }); // Bucket 0 (≤10) + sampler.sample(30, 2000, { traceId: 'trace2', spanId: 'span2' }); // Bucket 1 (≤50) + sampler.sample(75, 3000, { traceId: 'trace3', spanId: 'span3' }); // Bucket 2 (≤100) + sampler.sample(150, 4000, { traceId: 'trace4', spanId: 'span4' }); // Bucket 3 (>100) + + const exemplars = sampler.getExemplars(); + assert.strictEqual(exemplars.length, 4); + + // Check values are from different buckets + const values = exemplars.map((e) => e.value).sort((a, b) => a - b); + assert.deepStrictEqual(values, [5, 30, 75, 150]); +} + +// Test: Replacement in same bucket (10% probability) +{ + const boundaries = [10]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + // Add many samples to same bucket + for (let i = 0; i < 100; i++) { + sampler.sample(5, 1000 + i, { + traceId: `trace${i}`, + spanId: `span${i}`, + }); + } + + const exemplars = sampler.getExemplars(); + // Should still have only 1-2 exemplars (one per bucket, possibly overflow) + assert.ok(exemplars.length <= 2); +} + +// Test: Overflow bucket +{ + const boundaries = [10, 50, 100]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + sampler.sample(200, 1000, { traceId: 'trace1', spanId: 'span1' }); + sampler.sample(300, 2000, { traceId: 'trace2', spanId: 'span2' }); + + const exemplars = sampler.getExemplars(); + assert.strictEqual(exemplars.length, 1); + // Should have exemplar from overflow bucket + assert.ok(exemplars[0].value >= 200); +} + +// Test: Boundary edge cases +{ + const boundaries = [10, 50, 100]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + sampler.sample(10, 1000, { traceId: 'trace1', spanId: 'span1' }); // Exactly on boundary + sampler.sample(50, 2000, { traceId: 'trace2', spanId: 'span2' }); // Exactly on boundary + sampler.sample(100, 3000, { traceId: 'trace3', spanId: 'span3' }); // Exactly on boundary + + const exemplars = sampler.getExemplars(); + assert.strictEqual(exemplars.length, 3); +} + +// Test: Reset clears exemplars +{ + const boundaries = [10, 50]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + sampler.sample(5, 1000, { traceId: 'trace1', spanId: 'span1' }); + sampler.sample(30, 2000, { traceId: 'trace2', spanId: 'span2' }); + + assert.strictEqual(sampler.getExemplars().length, 2); + + sampler.reset(); + + assert.strictEqual(sampler.getExemplars().length, 0); +} + +// Test: Skip samples without trace context +{ + const boundaries = [10, 50]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + sampler.sample(5, 1000, {}); // No trace context + sampler.sample(30, 2000, { traceId: 'trace1' }); // Missing spanId + + assert.strictEqual(sampler.getExemplars().length, 0); +} + +// Test: Integration with histogram consumer +{ + const metric = create('test.boundary', { unit: 'ms' }); + + const boundaries = [10, 50, 100]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + const consumer = createConsumer({ + defaultAggregation: 'histogram', + defaultTemporality: 'cumulative', + metrics: { + 'test.boundary': { + boundaries, + exemplar: sampler, + }, + }, + }); + + // Record values in different buckets + metric.record(5, { traceId: 'trace1', spanId: 'span1' }); + metric.record(30, { traceId: 'trace2', spanId: 'span2' }); + metric.record(75, { traceId: 'trace3', spanId: 'span3' }); + metric.record(150, { traceId: 'trace4', spanId: 'span4' }); + + const snapshot = consumer.collect(); + assert.strictEqual(snapshot.length, 1); + + const dataPoints = snapshot[0].dataPoints; + assert.strictEqual(dataPoints.length, 1); + + // Check histogram data + assert.strictEqual(dataPoints[0].count, 4); + assert.strictEqual(dataPoints[0].sum, 260); + + // Check exemplars + assert.ok(dataPoints[0].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, 4); + + consumer.close(); + metric.close(); +} + +// Test: Large boundary array uses binary search +{ + // Create boundaries array larger than 8 to trigger binary search path + const boundaries = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + // Add samples in different buckets + sampler.sample(5, 1000, { traceId: 'trace1', spanId: 'span1' }); + sampler.sample(45, 2000, { traceId: 'trace2', spanId: 'span2' }); + sampler.sample(95, 3000, { traceId: 'trace3', spanId: 'span3' }); + sampler.sample(105, 4000, { traceId: 'trace4', spanId: 'span4' }); + + const exemplars = sampler.getExemplars(); + assert.strictEqual(exemplars.length, 4); +} diff --git a/test/parallel/test-metrics-exemplar-consumer.js b/test/parallel/test-metrics-exemplar-consumer.js new file mode 100644 index 00000000000000..726ff044e024ca --- /dev/null +++ b/test/parallel/test-metrics-exemplar-consumer.js @@ -0,0 +1,271 @@ +// Flags: --expose-internals +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { create, createConsumer, ReservoirSampler, BoundarySampler } = require('internal/perf/metrics'); + +// This test ensures exemplars work correctly with different aggregation types +// and temporality modes + +function extractTraceContext(attributes) { + if (!attributes.traceId || !attributes.spanId) { + return null; + } + return { + traceId: attributes.traceId, + spanId: attributes.spanId, + filteredAttributes: {}, + }; +} + +// Test: Exemplars with histogram aggregation +{ + const metric = create('test.histogram.exemplar', { unit: 'ms' }); + + const boundaries = [10, 50, 100]; + const sampler = new BoundarySampler(boundaries, extractTraceContext); + + const consumer = createConsumer({ + metrics: { + 'test.histogram.exemplar': { + aggregation: 'histogram', + boundaries, + exemplar: sampler, + }, + }, + }); + + metric.record(5, { traceId: 'trace1', spanId: 'span1' }); + metric.record(30, { traceId: 'trace2', spanId: 'span2' }); + metric.record(75, { traceId: 'trace3', spanId: 'span3' }); + + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + assert.ok(dataPoints[0].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, 3); + + consumer.close(); + metric.close(); +} + +// Test: Exemplars with summary aggregation +{ + const metric = create('test.summary.exemplar', { unit: 'ms' }); + + const sampler = new ReservoirSampler(3, extractTraceContext); + + const consumer = createConsumer({ + metrics: { + 'test.summary.exemplar': { + aggregation: 'summary', + exemplar: sampler, + }, + }, + }); + + metric.record(10, { traceId: 'trace1', spanId: 'span1' }); + metric.record(20, { traceId: 'trace2', spanId: 'span2' }); + metric.record(30, { traceId: 'trace3', spanId: 'span3' }); + + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + assert.ok(dataPoints[0].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, 3); + + consumer.close(); + metric.close(); +} + +// Test: Exemplars with delta temporality +{ + const metric = create('test.delta.exemplar', { unit: 'ms' }); + + const sampler = new ReservoirSampler(2, extractTraceContext); + + const consumer = createConsumer({ + defaultTemporality: 'delta', + metrics: { + 'test.delta.exemplar': { + aggregation: 'sum', + exemplar: sampler, + }, + }, + }); + + // First collection period + metric.record(100, { traceId: 'trace1', spanId: 'span1' }); + metric.record(200, { traceId: 'trace2', spanId: 'span2' }); + + let snapshot = consumer.collect(); + assert.strictEqual(snapshot[0].dataPoints[0].sum, 300); + assert.strictEqual(snapshot[0].dataPoints[0].exemplars.length, 2); + + // Second collection period - should be reset + metric.record(300, { traceId: 'trace3', spanId: 'span3' }); + + snapshot = consumer.collect(); + assert.strictEqual(snapshot[0].dataPoints[0].sum, 300); + assert.strictEqual(snapshot[0].dataPoints[0].exemplars.length, 1); + assert.strictEqual(snapshot[0].dataPoints[0].exemplars[0].value, 300); + + consumer.close(); + metric.close(); +} + +// Test: Exemplars with cumulative temporality +{ + const metric = create('test.cumulative.exemplar', { unit: 'ms' }); + + const sampler = new ReservoirSampler(3, extractTraceContext); + + const consumer = createConsumer({ + defaultTemporality: 'cumulative', + metrics: { + 'test.cumulative.exemplar': { + aggregation: 'sum', + exemplar: sampler, + }, + }, + }); + + // First collection + metric.record(100, { traceId: 'trace1', spanId: 'span1' }); + metric.record(200, { traceId: 'trace2', spanId: 'span2' }); + + let snapshot = consumer.collect(); + assert.strictEqual(snapshot[0].dataPoints[0].sum, 300); + assert.strictEqual(snapshot[0].dataPoints[0].exemplars.length, 2); + + // Second collection - exemplars should accumulate + metric.record(300, { traceId: 'trace3', spanId: 'span3' }); + + snapshot = consumer.collect(); + assert.strictEqual(snapshot[0].dataPoints[0].sum, 600); + assert.strictEqual(snapshot[0].dataPoints[0].exemplars.length, 3); + + consumer.close(); + metric.close(); +} + +// Test: Exemplars with groupByAttributes +{ + const metric = create('test.grouped.exemplar', { unit: 'ms' }); + + const sampler = new ReservoirSampler(3, extractTraceContext); + + const consumer = createConsumer({ + groupByAttributes: true, + metrics: { + 'test.grouped.exemplar': { + aggregation: 'sum', + groupBy: ['endpoint'], + exemplar: sampler, + }, + }, + }); + + metric.record(100, { endpoint: '/api/users', traceId: 'trace1', spanId: 'span1' }); + metric.record(200, { endpoint: '/api/posts', traceId: 'trace2', spanId: 'span2' }); + metric.record(150, { endpoint: '/api/users', traceId: 'trace3', spanId: 'span3' }); + + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + // Should have 2 data points (one per endpoint) + assert.strictEqual(dataPoints.length, 2); + + // Both should share the same exemplars (sampler is shared) + assert.ok(dataPoints[0].exemplars); + assert.ok(dataPoints[1].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, dataPoints[1].exemplars.length); + + consumer.close(); + metric.close(); +} + +// Test: No exemplars field when empty +{ + const metric = create('test.no.exemplar', { unit: 'ms' }); + + const consumer = createConsumer({ + metrics: { + 'test.no.exemplar': { + aggregation: 'sum', + // No exemplar sampler configured + }, + }, + }); + + metric.record(100); + metric.record(200); + + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + // Should not have exemplars field + assert.strictEqual(dataPoints[0].exemplars, undefined); + + consumer.close(); + metric.close(); +} + +// Test: Empty exemplars don't add field +{ + const metric = create('test.empty.exemplar', { unit: 'ms' }); + + const sampler = new ReservoirSampler(3, extractTraceContext); + + const consumer = createConsumer({ + metrics: { + 'test.empty.exemplar': { + aggregation: 'sum', + exemplar: sampler, + }, + }, + }); + + // Record without trace context + metric.record(100); + metric.record(200); + + const snapshot = consumer.collect(); + const dataPoints = snapshot[0].dataPoints; + + // Should not have exemplars field when no exemplars collected + assert.strictEqual(dataPoints[0].exemplars, undefined); + + consumer.close(); + metric.close(); +} + +// Test: Invalid sampler throws +{ + assert.throws(() => { + createConsumer({ + metrics: { + 'test.invalid': { + exemplar: { sample: 'not a function' }, + }, + }, + }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + createConsumer({ + metrics: { + 'test.invalid': { + exemplar: { + sample: () => {}, + // Missing getExemplars and reset + }, + }, + }, + }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +} diff --git a/test/parallel/test-metrics-exemplar-reservoir.js b/test/parallel/test-metrics-exemplar-reservoir.js new file mode 100644 index 00000000000000..cffa38a930aaae --- /dev/null +++ b/test/parallel/test-metrics-exemplar-reservoir.js @@ -0,0 +1,157 @@ +// Flags: --expose-internals +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { create, createConsumer, ReservoirSampler } = require('internal/perf/metrics'); + +// This test ensures that ReservoirSampler correctly samples exemplars +// using reservoir sampling algorithm + +// Simple extract function that returns trace context +function extractTraceContext(attributes) { + if (!attributes.traceId || !attributes.spanId) { + return null; + } + return { + traceId: attributes.traceId, + spanId: attributes.spanId, + filteredAttributes: {}, + }; +} + +// Test: Reservoir fills up to max capacity +{ + const sampler = new ReservoirSampler(3, extractTraceContext); + + sampler.sample(10, 1000, { traceId: 'trace1', spanId: 'span1' }); + sampler.sample(20, 2000, { traceId: 'trace2', spanId: 'span2' }); + sampler.sample(30, 3000, { traceId: 'trace3', spanId: 'span3' }); + + const exemplars = sampler.getExemplars(); + assert.strictEqual(exemplars.length, 3); + assert.strictEqual(exemplars[0].value, 10); + assert.strictEqual(exemplars[1].value, 20); + assert.strictEqual(exemplars[2].value, 30); +} + +// Test: Reservoir replacement with probability +{ + const sampler = new ReservoirSampler(2, extractTraceContext); + + // Fill reservoir + sampler.sample(10, 1000, { traceId: 'trace1', spanId: 'span1' }); + sampler.sample(20, 2000, { traceId: 'trace2', spanId: 'span2' }); + + // Add many more samples - some should replace existing ones + for (let i = 0; i < 100; i++) { + sampler.sample(30 + i, 3000 + i, { + traceId: `trace${i + 3}`, + spanId: `span${i + 3}`, + }); + } + + const exemplars = sampler.getExemplars(); + assert.strictEqual(exemplars.length, 2); + // At least one should have been replaced (probabilistically) + const hasReplacement = exemplars.some((e) => e.value >= 30); + assert.strictEqual(hasReplacement, true); +} + +// Test: Reset clears reservoir +{ + const sampler = new ReservoirSampler(3, extractTraceContext); + + sampler.sample(10, 1000, { traceId: 'trace1', spanId: 'span1' }); + sampler.sample(20, 2000, { traceId: 'trace2', spanId: 'span2' }); + + assert.strictEqual(sampler.getExemplars().length, 2); + + sampler.reset(); + + assert.strictEqual(sampler.getExemplars().length, 0); +} + +// Test: Skip samples without trace context +{ + const sampler = new ReservoirSampler(3, extractTraceContext); + + sampler.sample(10, 1000, {}); // No trace context + sampler.sample(20, 2000, { traceId: 'trace1' }); // Missing spanId + sampler.sample(30, 3000, { spanId: 'span1' }); // Missing traceId + + assert.strictEqual(sampler.getExemplars().length, 0); +} + +// Test: Integration with consumer +{ + const metric = create('test.reservoir', { unit: 'ms' }); + + const sampler = new ReservoirSampler(3, extractTraceContext); + + const consumer = createConsumer({ + defaultAggregation: 'sum', + defaultTemporality: 'cumulative', + metrics: { + 'test.reservoir': { + exemplar: sampler, + }, + }, + }); + + // Record values with trace context + metric.record(100, { traceId: 'trace1', spanId: 'span1' }); + metric.record(200, { traceId: 'trace2', spanId: 'span2' }); + metric.record(300, { traceId: 'trace3', spanId: 'span3' }); + + const snapshot = consumer.collect(); + assert.strictEqual(snapshot.length, 1); + + const dataPoints = snapshot[0].dataPoints; + assert.strictEqual(dataPoints.length, 1); + assert.strictEqual(dataPoints[0].sum, 600); + assert.strictEqual(dataPoints[0].count, 3); + + // Check exemplars + assert.ok(dataPoints[0].exemplars); + assert.strictEqual(dataPoints[0].exemplars.length, 3); + assert.strictEqual(dataPoints[0].exemplars[0].value, 100); + assert.strictEqual(dataPoints[0].exemplars[0].traceId, 'trace1'); + assert.strictEqual(dataPoints[0].exemplars[0].spanId, 'span1'); + + consumer.close(); + metric.close(); +} + +// Test: Exemplars reset for delta temporality +{ + const metric = create('test.delta.reservoir', { unit: 'ms' }); + + const sampler = new ReservoirSampler(2, extractTraceContext); + + const consumer = createConsumer({ + defaultAggregation: 'sum', + defaultTemporality: 'delta', + metrics: { + 'test.delta.reservoir': { + exemplar: sampler, + }, + }, + }); + + // First collection + metric.record(100, { traceId: 'trace1', spanId: 'span1' }); + metric.record(200, { traceId: 'trace2', spanId: 'span2' }); + + let snapshot = consumer.collect(); + assert.strictEqual(snapshot[0].dataPoints[0].exemplars.length, 2); + + // Second collection - exemplars should be reset + metric.record(300, { traceId: 'trace3', spanId: 'span3' }); + + snapshot = consumer.collect(); + assert.strictEqual(snapshot[0].dataPoints[0].exemplars.length, 1); + assert.strictEqual(snapshot[0].dataPoints[0].exemplars[0].value, 300); + + consumer.close(); + metric.close(); +} diff --git a/test/parallel/test-metrics-gauge.js b/test/parallel/test-metrics-gauge.js deleted file mode 100644 index bc81e6d33606da..00000000000000 --- a/test/parallel/test-metrics-gauge.js +++ /dev/null @@ -1,37 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { metrics } = require('node:perf_hooks'); -const { createGauge, Gauge, Metric, MetricReport } = metrics; - -const testGauge = createGauge('test', { base: 'test' }); -assert.ok(testGauge instanceof Gauge); -assert.ok(testGauge instanceof Metric); -assert.strictEqual(testGauge.value, 0); - -assert.strictEqual(testGauge.type, 'gauge'); -assert.strictEqual(testGauge.name, 'test'); -assert.deepStrictEqual(testGauge.meta, { base: 'test' }); -assert.strictEqual(testGauge.channelName, 'metrics:gauge:test'); - -const messages = [ - [123, { base: 'test', meta: 'first' }], - [0, { base: 'test' }], -]; - -subscribe(testGauge.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'gauge'); - assert.strictEqual(report.name, 'test'); - assert.ok(report.time > 0); - - const [value, meta] = messages.shift(); - assert.strictEqual(report.value, value); - assert.deepStrictEqual(report.meta, meta); -}, 2)); - -testGauge.reset(123, { meta: 'first' }); -testGauge.reset(); diff --git a/test/parallel/test-metrics-late-consumer.js b/test/parallel/test-metrics-late-consumer.js new file mode 100644 index 00000000000000..15def707ca2689 --- /dev/null +++ b/test/parallel/test-metrics-late-consumer.js @@ -0,0 +1,122 @@ +'use strict'; + +// Test that consumers properly subscribe to metrics created AFTER the consumer. +// This tests the onMetricCreated callback path. + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test 1: Consumer waiting for specific metric that doesn't exist yet +{ + // Create consumer BEFORE the metric exists + const consumer = metrics.createConsumer({ + 'test.late.specific': { aggregation: 'sum' }, + }); + + // Collect before metric exists - should be empty + let result = consumer.collect(); + assert.strictEqual(result.length, 0); + + // Now create the metric + const m = metrics.create('test.late.specific'); + + // Record a value + m.record(42); + + // Collect - should now have the value + result = consumer.collect(); + assert.strictEqual(result.length, 1); + assert.strictEqual(result[0].descriptor.name, 'test.late.specific'); + assert.strictEqual(result[0].dataPoints[0].sum, 42); + + consumer.close(); +} + +// Test 2: Wildcard consumer receives metrics created after it +{ + // Create wildcard consumer + const consumer = metrics.createConsumer({ + defaultAggregation: 'sum', + }); + + // Create a metric AFTER the consumer + const m = metrics.create('test.late.wildcard'); + m.record(100); + + // Collect - should have the new metric + const result = consumer.collect(); + const found = result.find((s) => s.descriptor.name === 'test.late.wildcard'); + assert.ok(found, 'Wildcard consumer should receive metrics created after it'); + assert.strictEqual(found.dataPoints[0].sum, 100); + + consumer.close(); +} + +// Test 3: Multiple consumers waiting for the same metric +{ + const consumer1 = metrics.createConsumer({ + 'test.late.multi': { aggregation: 'sum' }, + }); + const consumer2 = metrics.createConsumer({ + 'test.late.multi': { aggregation: 'lastValue' }, + }); + + // Create the metric after both consumers + const m = metrics.create('test.late.multi'); + m.record(50); + m.record(75); + + // Both consumers should receive the values with their own aggregation + const result1 = consumer1.collect(); + const result2 = consumer2.collect(); + + assert.strictEqual(result1[0].dataPoints[0].sum, 125); // 50 + 75 + assert.strictEqual(result2[0].dataPoints[0].value, 75); // last value + + consumer1.close(); + consumer2.close(); +} + +// Test 4: Consumer created for non-existent metric, then closed before metric exists +{ + const consumer = metrics.createConsumer({ + 'test.late.never': { aggregation: 'sum' }, + }); + + // Close consumer before metric is created + consumer.close(); + + // Now create the metric - should not crash + const m = metrics.create('test.late.never'); + m.record(100); + + // No consumer to verify, just ensure no crash +} + +// Test 5: Observable metric created after consumer +{ + let observableValue = 42; + + const consumer = metrics.createConsumer({ + 'test.late.observable': { aggregation: 'lastValue' }, + }); + + // Create observable metric after consumer + metrics.create('test.late.observable', { + observable: (metric) => { metric.record(observableValue); }, + }); + + // Collect should sample the observable + let result = consumer.collect(); + assert.strictEqual(result.length, 1); + assert.strictEqual(result[0].dataPoints[0].value, 42); + + // Update and collect again + observableValue = 100; + result = consumer.collect(); + assert.strictEqual(result[0].dataPoints[0].value, 100); + + consumer.close(); +} diff --git a/test/parallel/test-metrics-lifecycle.js b/test/parallel/test-metrics-lifecycle.js new file mode 100644 index 00000000000000..7ff5ff8bf172b3 --- /dev/null +++ b/test/parallel/test-metrics-lifecycle.js @@ -0,0 +1,265 @@ +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { metrics } = require('node:perf_hooks'); + +// This test ensures that metric lifecycle methods (close, isClosed) work correctly +// and that consumers are properly notified when metrics are closed. + +{ + // Test: metric.close() removes metric from registry + const metric = metrics.create('test.close.basic', { + description: 'Test close basic behavior', + }); + + assert.strictEqual(metric.isClosed, false); + + const consumer = metrics.createConsumer({ + 'test.close.basic': {}, + }); + + metric.record(10); + const before = consumer.collect(); + assert.strictEqual(before.length, 1); + + metric.close(); + assert.strictEqual(metric.isClosed, true); + + const after = consumer.collect(); + assert.strictEqual(after.length, 0); + + consumer.close(); +} + +{ + // Test: Double close is idempotent + const metric = metrics.create('test.double.close', { + description: 'Test double close', + }); + + assert.strictEqual(metric.isClosed, false); + metric.close(); + assert.strictEqual(metric.isClosed, true); + + // Second close should not throw + metric.close(); + assert.strictEqual(metric.isClosed, true); +} + +{ + // Test: Recording to closed metric (values ignored, no error) + const metric = metrics.create('test.record.after.close', { + description: 'Test record after close', + }); + + const consumer = metrics.createConsumer({ + 'test.record.after.close': {}, + }); + + metric.record(10); + metric.close(); + + // Recording after close should not throw + metric.record(20); + metric.record(30); + + const collected = consumer.collect(); + // Metric was closed, so nothing should be collected + assert.strictEqual(collected.length, 0); + + consumer.close(); +} + +{ + // Test: Starting timer on closed metric + const metric = metrics.create('test.timer.after.close', { + description: 'Test timer after close', + }); + + const consumer = metrics.createConsumer({ + 'test.timer.after.close': {}, + }); + + metric.close(); + + // Timer should work but recording will be ignored + const timer = metric.startTimer(); + timer.stop(); + + const collected = consumer.collect(); + assert.strictEqual(collected.length, 0); + + consumer.close(); +} + +{ + // Test: Sampling observable after metric closed + const metric = metrics.create('test.observable.after.close', { + description: 'Test observable after close', + observable: (m) => { m.record(100); }, + }); + + const consumer = metrics.createConsumer({ + 'test.observable.after.close': { + aggregation: 'lastValue', + }, + }); + + const before = consumer.collect(); + assert.strictEqual(before.length, 1); + assert.strictEqual(before[0].dataPoints[0].value, 100); + + metric.close(); + + const after = consumer.collect(); + // Metric is closed, should not be collected + assert.strictEqual(after.length, 0); + + consumer.close(); +} + +{ + // Test: Creating new metric with same name after close + const metric1 = metrics.create('test.recreate', { + description: 'Original metric', + }); + + const consumer = metrics.createConsumer({ + 'test.recreate': {}, + }); + + metric1.record(10); + const first = consumer.collect(); + assert.strictEqual(first.length, 1); + assert.strictEqual(first[0].dataPoints[0].sum, 10); + + metric1.close(); + + // Create new consumer for the new metric + const consumer2 = metrics.createConsumer({ + 'test.recreate': {}, + }); + + // Create new metric with same name + const metric2 = metrics.create('test.recreate', { + description: 'New metric instance', + }); + + // Should be a fresh instance + assert.notStrictEqual(metric1, metric2); + assert.strictEqual(metric1.isClosed, true); + assert.strictEqual(metric2.isClosed, false); + + metric2.record(50); + + const second = consumer2.collect(); + assert.strictEqual(second.length, 1); + // Should have only the new recording + assert.strictEqual(second[0].dataPoints[0].sum, 50); + + metric2.close(); + consumer.close(); + consumer2.close(); +} + +{ + // Test: Consumer notified when metric closes + let onClosedCalled = false; + let closedMetricName = null; + + const metric = metrics.create('test.consumer.notification', { + description: 'Test consumer notification on close', + }); + + const consumer = metrics.createConsumer({ + 'test.consumer.notification': {}, + }); + + // Override onMetricClosed to capture notification + const originalOnClosed = consumer.onMetricClosed; + consumer.onMetricClosed = function(closedMetric) { + onClosedCalled = true; + closedMetricName = closedMetric.descriptor.name; + if (originalOnClosed) { + originalOnClosed.call(this, closedMetric); + } + }; + + assert.strictEqual(onClosedCalled, false); + + metric.close(); + + // Consumer should have been notified + assert.strictEqual(onClosedCalled, true); + assert.strictEqual(closedMetricName, 'test.consumer.notification'); + + consumer.close(); +} + +{ + // Test: Multiple consumers notified when metric closes + const metric = metrics.create('test.multi.consumer.close', { + description: 'Test multiple consumers on close', + }); + + let consumer1Notified = false; + let consumer2Notified = false; + + const consumer1 = metrics.createConsumer({ + 'test.multi.consumer.close': {}, + }); + const consumer2 = metrics.createConsumer({ + 'test.multi.consumer.close': {}, + }); + + const origOnClosed1 = consumer1.onMetricClosed; + consumer1.onMetricClosed = function(closedMetric) { + consumer1Notified = true; + if (origOnClosed1) { + origOnClosed1.call(this, closedMetric); + } + }; + + const origOnClosed2 = consumer2.onMetricClosed; + consumer2.onMetricClosed = function(closedMetric) { + consumer2Notified = true; + if (origOnClosed2) { + origOnClosed2.call(this, closedMetric); + } + }; + + metric.close(); + + assert.strictEqual(consumer1Notified, true); + assert.strictEqual(consumer2Notified, true); + + consumer1.close(); + consumer2.close(); +} + +{ + // Test: Consumer correctly cleans up after metric closes + const metric = metrics.create('test.cleanup.after.close', { + description: 'Test cleanup after close', + }); + + const consumer = metrics.createConsumer({ + 'test.cleanup.after.close': {}, + }); + + metric.record(10); + const before = consumer.collect(); + assert.strictEqual(before.length, 1); + + metric.close(); + + // Collect after close should return empty array + const after = consumer.collect(); + assert.strictEqual(after.length, 0); + + // Multiple collects should still work + const after2 = consumer.collect(); + assert.strictEqual(after2.length, 0); + + consumer.close(); +} diff --git a/test/parallel/test-metrics-metric-report.js b/test/parallel/test-metrics-metric-report.js deleted file mode 100644 index 294e15633a38aa..00000000000000 --- a/test/parallel/test-metrics-metric-report.js +++ /dev/null @@ -1,18 +0,0 @@ -'use strict'; - -require('../common'); - -const assert = require('assert'); -const { metrics } = require('node:perf_hooks'); -const { MetricReport } = metrics; - -const report = new MetricReport('counter', 'test-counter', 123, { - meta: 'test' -}); - -assert.ok(report instanceof MetricReport); -assert.strictEqual(report.type, 'counter'); -assert.strictEqual(report.name, 'test-counter'); -assert.strictEqual(report.value, 123); -assert.deepStrictEqual(report.meta, { meta: 'test' }); -assert.ok(report.time > 0); diff --git a/test/parallel/test-metrics-metric.js b/test/parallel/test-metrics-metric.js deleted file mode 100644 index 29fe5201a9127d..00000000000000 --- a/test/parallel/test-metrics-metric.js +++ /dev/null @@ -1,28 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { metrics } = require('node:perf_hooks'); -const { Metric, MetricReport } = metrics; - -const metric = new Metric('counter', 'test-counter', { base: 'test' }); - -assert.ok(metric instanceof Metric); -assert.strictEqual(metric.type, 'counter'); -assert.strictEqual(metric.name, 'test-counter'); -assert.deepStrictEqual(metric.meta, { base: 'test' }); -assert.strictEqual(metric.channelName, 'metrics:counter:test-counter'); - -subscribe(metric.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'counter'); - assert.strictEqual(report.name, 'test-counter'); - assert.ok(report.time > 0); - - assert.strictEqual(report.value, 123); - assert.deepStrictEqual(report.meta, { base: 'test', meta: 'test' }); -})); - -metric.report(123, { meta: 'test' }); diff --git a/test/parallel/test-metrics-multi-consumer.js b/test/parallel/test-metrics-multi-consumer.js new file mode 100644 index 00000000000000..fff42616a1d4ac --- /dev/null +++ b/test/parallel/test-metrics-multi-consumer.js @@ -0,0 +1,71 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test multiple consumers with different configs for same metric +const m = metrics.create('test.multi', { unit: 'count' }); + +// Consumer 1: Sum aggregation +const consumer1 = metrics.createConsumer({ + 'test.multi': { aggregation: 'sum' }, +}); + +// Consumer 2: LastValue aggregation +const consumer2 = metrics.createConsumer({ + 'test.multi': { aggregation: 'lastValue' }, +}); + +// Consumer 3: Histogram aggregation +const consumer3 = metrics.createConsumer({ + 'test.multi': { aggregation: 'histogram', boundaries: [5, 10, 20] }, +}); + +// Record values +m.record(3); +m.record(7); +m.record(15); +m.record(25); + +// Each consumer should have different interpretation +const result1 = consumer1.collect(); +const result2 = consumer2.collect(); +const result3 = consumer3.collect(); + +// Consumer 1: Sum = 50 +assert.strictEqual(result1[0].dataPoints[0].sum, 50); +assert.strictEqual(result1[0].dataPoints[0].count, 4); + +// Consumer 2: LastValue = 25 +assert.strictEqual(result2[0].dataPoints[0].value, 25); + +// Consumer 3: Histogram with 4 buckets +const hist = result3[0].dataPoints[0]; +assert.strictEqual(hist.count, 4); +assert.strictEqual(hist.buckets[0].count, 1); // <= 5: value 3 +assert.strictEqual(hist.buckets[1].count, 1); // <= 10: value 7 +assert.strictEqual(hist.buckets[2].count, 1); // <= 20: value 15 +assert.strictEqual(hist.buckets[3].count, 1); // > 20: value 25 + +consumer1.close(); +consumer2.close(); +consumer3.close(); + +// Test consumers only receive values after they're created +const m2 = metrics.create('test.multi.order'); + +m2.record(10); // Before consumer creation + +const consumer4 = metrics.createConsumer({ + 'test.multi.order': { aggregation: 'sum' }, +}); + +m2.record(20); // After consumer creation + +const result4 = consumer4.collect(); +// Consumer should only see the value recorded after it was created +assert.strictEqual(result4[0].dataPoints[0].sum, 20); + +consumer4.close(); diff --git a/test/parallel/test-metrics-observable-errors.js b/test/parallel/test-metrics-observable-errors.js new file mode 100644 index 00000000000000..eebf7782f3582d --- /dev/null +++ b/test/parallel/test-metrics-observable-errors.js @@ -0,0 +1,170 @@ +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { metrics } = require('node:perf_hooks'); + +// This test ensures that errors in observable callbacks are handled correctly. +// Note: Tests that trigger uncaughtException are verified in +// test-metrics-observable-exception.js + +{ + // Test: Observable not calling record (means no value) + const metric = metrics.create('test.return.null', { + description: 'Test no record call', + observable: () => { + // No record() call — no value + }, + }); + + const consumer = metrics.createConsumer({ + 'test.return.null': {}, + }); + + // No call to record() means no data points + const collected = consumer.collect(); + assert.strictEqual(collected.length, 0); + + consumer.close(); + metric.close(); +} + +{ + // Test: Observable not calling record (means no value) + const metric = metrics.create('test.return.undefined', { + description: 'Test no record call 2', + observable: () => { + // No record() call — no value + }, + }); + + const consumer = metrics.createConsumer({ + 'test.return.undefined': {}, + }); + + // No call to record() means no data points + const collected = consumer.collect(); + assert.strictEqual(collected.length, 0); + + consumer.close(); + metric.close(); +} + +{ + // Test: Observable recording NaN (valid number type) + const metric = metrics.create('test.return.nan', { + description: 'Test NaN return', + observable: (m) => { + m.record(NaN); + }, + }); + + const consumer = metrics.createConsumer({ + 'test.return.nan': { + aggregation: 'lastValue', + }, + }); + + // NaN is a number type, so it works (though value will be NaN) + const collected = consumer.collect(); + assert.strictEqual(collected.length, 1); + assert(Number.isNaN(collected[0].dataPoints[0].value)); + + consumer.close(); + metric.close(); +} + +{ + // Test: Observable recording Infinity (valid number) + const metric = metrics.create('test.return.infinity', { + description: 'Test Infinity return', + observable: (m) => { + m.record(Infinity); + }, + }); + + const consumer = metrics.createConsumer({ + 'test.return.infinity': { + aggregation: 'lastValue', + }, + }); + + // Infinity is a valid number + const collected = consumer.collect(); + assert.strictEqual(collected.length, 1); + assert.strictEqual(collected[0].dataPoints[0].value, Infinity); + + consumer.close(); + metric.close(); +} + +{ + // Test: Observable recording negative Infinity + const metric = metrics.create('test.return.neg.infinity', { + description: 'Test -Infinity return', + observable: (m) => { + m.record(-Infinity); + }, + }); + + const consumer = metrics.createConsumer({ + 'test.return.neg.infinity': { + aggregation: 'lastValue', + }, + }); + + const collected = consumer.collect(); + assert.strictEqual(collected.length, 1); + assert.strictEqual(collected[0].dataPoints[0].value, -Infinity); + + consumer.close(); + metric.close(); +} + +{ + // Test: Observable recording 0 + const metric = metrics.create('test.return.zero', { + description: 'Test zero return', + observable: (m) => { + m.record(0); + }, + }); + + const consumer = metrics.createConsumer({ + 'test.return.zero': { + aggregation: 'lastValue', + }, + }); + + const collected = consumer.collect(); + assert.strictEqual(collected.length, 1); + assert.strictEqual(collected[0].dataPoints[0].value, 0); + + consumer.close(); + metric.close(); +} + +{ + // Test: Observable recording negative number + const metric = metrics.create('test.return.negative', { + description: 'Test negative return', + observable: (m) => { + m.record(-42); + }, + }); + + const consumer = metrics.createConsumer({ + 'test.return.negative': { + aggregation: 'lastValue', + }, + }); + + const collected = consumer.collect(); + assert.strictEqual(collected.length, 1); + assert.strictEqual(collected[0].dataPoints[0].value, -42); + + consumer.close(); + metric.close(); +} + +// Note: Tests for observable callbacks that throw errors are in +// test-metrics-observable-exception.js diff --git a/test/parallel/test-metrics-observable-exception.js b/test/parallel/test-metrics-observable-exception.js new file mode 100644 index 00000000000000..54354689773131 --- /dev/null +++ b/test/parallel/test-metrics-observable-exception.js @@ -0,0 +1,67 @@ +'use strict'; + +// Test that observable callback exceptions are surfaced via uncaughtException +// (like diagnostics_channel) rather than being silently swallowed. +// The collection should continue despite the error. + +const common = require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test observable that throws +let throwOnCall = true; +metrics.create('test.observable.throws', { + observable: (m) => { + if (throwOnCall) { + throw new Error('Observable callback error'); + } + m.record(42); + }, +}); + +// Also create a normal observable to ensure it still works +let normalValue = 100; +metrics.create('test.observable.normal', { + observable: (m) => { m.record(normalValue); }, +}); + +const consumer = metrics.createConsumer({ + 'test.observable.throws': { aggregation: 'lastValue' }, + 'test.observable.normal': { aggregation: 'lastValue' }, +}); + +// Set up uncaughtException handler to catch the error +// (like diagnostics_channel behavior) +process.on('uncaughtException', common.mustCall((err) => { + assert.strictEqual(err.message, 'Observable callback error'); +}, 1)); + +// First collect - the throwing observable should trigger uncaughtException +// but the normal observable should still work +const result1 = consumer.collect(); + +// Should have one data point from the normal observable +// (the throwing one didn't produce a value) +assert.strictEqual(result1.length, 1); +assert.strictEqual(result1[0].descriptor.name, 'test.observable.normal'); +assert.strictEqual(result1[0].dataPoints[0].value, 100); + +// Now stop throwing +throwOnCall = false; +normalValue = 200; + +// Give nextTick a chance to fire the uncaughtException +setImmediate(() => { + // Second collect - both should work now + const result2 = consumer.collect(); + assert.strictEqual(result2.length, 2); + + const throwsMetric = result2.find((s) => s.descriptor.name === 'test.observable.throws'); + const normalMetric = result2.find((s) => s.descriptor.name === 'test.observable.normal'); + + assert.strictEqual(throwsMetric.dataPoints[0].value, 42); + assert.strictEqual(normalMetric.dataPoints[0].value, 200); + + consumer.close(); +}); diff --git a/test/parallel/test-metrics-observable-isolation.js b/test/parallel/test-metrics-observable-isolation.js new file mode 100644 index 00000000000000..7c5e5f97e34a16 --- /dev/null +++ b/test/parallel/test-metrics-observable-isolation.js @@ -0,0 +1,57 @@ +'use strict'; + +// Test that observable metrics maintain consumer isolation. +// Each consumer should only see values when IT calls collect(), +// not when other consumers collect. + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +let callCount = 0; +// Metric created to register the observable - consumers subscribe to it +metrics.create('test.observable.isolation', { + observable: (metric) => { + callCount++; + metric.record(callCount * 10); + }, +}); + +// Create two consumers for the same observable metric +const consumer1 = metrics.createConsumer({ + 'test.observable.isolation': { aggregation: 'lastValue' }, +}); + +const consumer2 = metrics.createConsumer({ + 'test.observable.isolation': { aggregation: 'lastValue' }, +}); + +// Initial state - no collects yet +assert.strictEqual(callCount, 0); + +// Consumer1 collects - observable should be called once for consumer1's subscriber +const result1 = consumer1.collect(); +assert.strictEqual(callCount, 1); +assert.strictEqual(result1.length, 1); +assert.strictEqual(result1[0].dataPoints[0].value, 10); + +// Consumer2 collects - observable should be called once for consumer2's subscriber +// NOT twice (which would happen if consumer1's collect triggered consumer2's subscriber) +const result2 = consumer2.collect(); +assert.strictEqual(callCount, 2); +assert.strictEqual(result2.length, 1); +assert.strictEqual(result2[0].dataPoints[0].value, 20); + +// Consumer1 collects again - should only increment by 1 +const result3 = consumer1.collect(); +assert.strictEqual(callCount, 3); +assert.strictEqual(result3[0].dataPoints[0].value, 30); + +// Consumer2 collects again - should only increment by 1 +const result4 = consumer2.collect(); +assert.strictEqual(callCount, 4); +assert.strictEqual(result4[0].dataPoints[0].value, 40); + +consumer1.close(); +consumer2.close(); diff --git a/test/parallel/test-metrics-observable-observer-api.js b/test/parallel/test-metrics-observable-observer-api.js new file mode 100644 index 00000000000000..40669154999590 --- /dev/null +++ b/test/parallel/test-metrics-observable-observer-api.js @@ -0,0 +1,216 @@ +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { metrics } = require('node:perf_hooks'); + +// This test ensures that the facade record() multi-value API works correctly +// for observable metrics, including validation and aggregation behavior. + +{ + // Test: Multi-value reporting via facade record() with different attributes + const metric = metrics.create('test.multi.value', { + description: 'Test multi-value observe API', + observable: (metric) => { + metric.record(10, { key: 'a' }); + metric.record(20, { key: 'b' }); + metric.record(30, { key: 'c' }); + }, + }); + + const consumer = metrics.createConsumer({ + groupByAttributes: true, + metrics: { + 'test.multi.value': { + aggregation: 'lastValue', + }, + }, + }); + + const collected = consumer.collect(); + consumer.close(); + metric.close(); + + assert.strictEqual(collected.length, 1); + const points = collected[0].dataPoints; + assert.strictEqual(points.length, 3); + + const values = points.map((p) => ({ value: p.value, key: p.attributes.key })) + .sort((a, b) => a.key.localeCompare(b.key)); + + assert.deepStrictEqual(values, [ + { value: 10, key: 'a' }, + { value: 20, key: 'b' }, + { value: 30, key: 'c' }, + ]); +} + +{ + // Test: facade record() with bigint values and attributes + const metric = metrics.create('test.bigint.observe', { + description: 'Test bigint values', + observable: (metric) => { + metric.record(100n, { type: 'small' }); + metric.record(9007199254740991n, { type: 'large' }); + }, + }); + + const consumer = metrics.createConsumer({ + groupByAttributes: true, + metrics: { + 'test.bigint.observe': {}, + }, + }); + + const collected = consumer.collect(); + consumer.close(); + metric.close(); + + assert.strictEqual(collected.length, 1); + const points = collected[0].dataPoints; + assert.strictEqual(points.length, 2); + + const largeValue = points.find((p) => p.attributes.type === 'large'); + assert.strictEqual(largeValue.sum, 9007199254740991n); +} + +{ + // Test: facade record() called multiple times with same attributes aggregates + const metric = metrics.create('test.same.attrs', { + description: 'Test same attributes', + observable: (metric) => { + metric.record(5, { env: 'prod' }); + metric.record(10, { env: 'prod' }); + metric.record(15, { env: 'prod' }); + }, + }); + + const consumer = metrics.createConsumer({ + groupByAttributes: true, + metrics: { + 'test.same.attrs': {}, + }, + }); + + const collected = consumer.collect(); + consumer.close(); + metric.close(); + + assert.strictEqual(collected.length, 1); + const points = collected[0].dataPoints; + // Should aggregate to single point with same attributes + assert.strictEqual(points.length, 1); + assert.strictEqual(points[0].sum, 30); + assert.strictEqual(points[0].count, 3); + assert.deepStrictEqual(points[0].attributes, { env: 'prod' }); +} + +{ + // Test: Empty observable (neither return nor observe calls) + const metric = metrics.create('test.empty.observable', { + description: 'Test empty observable', + observable: () => { + // No observe calls, no return value + }, + }); + + const consumer = metrics.createConsumer({ + 'test.empty.observable': {}, + }); + + const collected = consumer.collect(); + consumer.close(); + metric.close(); + + // Should have no data when observable produces nothing + assert.strictEqual(collected.length, 0); +} + +{ + // Test: facade record() with histogram aggregation + const metric = metrics.create('test.histogram.observe', { + description: 'Test histogram with observe', + observable: (metric) => { + metric.record(5, { bucket: 'low' }); + metric.record(15, { bucket: 'mid' }); + metric.record(25, { bucket: 'high' }); + }, + }); + + const consumer = metrics.createConsumer({ + groupByAttributes: true, + metrics: { + 'test.histogram.observe': { + aggregation: 'histogram', + }, + }, + }); + + const collected = consumer.collect(); + consumer.close(); + metric.close(); + + assert.strictEqual(collected.length, 1); + const points = collected[0].dataPoints; + assert.strictEqual(points.length, 3); + + // Verify each point has histogram-specific fields + points.forEach((point) => { + assert(Array.isArray(point.buckets)); + assert.strictEqual(typeof point.count, 'number'); + assert.strictEqual(typeof point.sum, 'number'); + assert.strictEqual(typeof point.min, 'number'); + assert.strictEqual(typeof point.max, 'number'); + }); +} + +{ + // Test: facade record() with no attributes defaults to empty object + const metric = metrics.create('test.no.attrs', { + description: 'Test no attributes', + observable: (metric) => { + metric.record(42); + }, + }); + + const consumer = metrics.createConsumer({ + 'test.no.attrs': {}, + }); + + const collected = consumer.collect(); + consumer.close(); + metric.close(); + + assert.strictEqual(collected.length, 1); + assert.strictEqual(collected[0].dataPoints.length, 1); + assert.strictEqual(collected[0].dataPoints[0].sum, 42); + // Attributes should be empty (kEmptyObject) + assert.strictEqual(Object.keys(collected[0].dataPoints[0].attributes).length, 0); +} + +{ + // Test: Without groupByAttributes, all values aggregate together + const metric = metrics.create('test.no.grouping', { + description: 'Test without groupByAttributes', + observable: (metric) => { + metric.record(10, { key: 'a' }); + metric.record(20, { key: 'b' }); + metric.record(30, { key: 'c' }); + }, + }); + + const consumer = metrics.createConsumer({ + // No groupByAttributes + 'test.no.grouping': {}, + }); + + const collected = consumer.collect(); + consumer.close(); + metric.close(); + + assert.strictEqual(collected.length, 1); + assert.strictEqual(collected[0].dataPoints.length, 1); + assert.strictEqual(collected[0].dataPoints[0].sum, 60); + assert.strictEqual(collected[0].dataPoints[0].count, 3); + // Attributes are ignored without groupByAttributes + assert.strictEqual(Object.keys(collected[0].dataPoints[0].attributes).length, 0); +} diff --git a/test/parallel/test-metrics-observable.js b/test/parallel/test-metrics-observable.js new file mode 100644 index 00000000000000..bdcb92f6a67d74 --- /dev/null +++ b/test/parallel/test-metrics-observable.js @@ -0,0 +1,67 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test observable metric +let gaugeValue = 42; +const m = metrics.create('test.observable', { + unit: '{count}', + description: 'Observable test', + observable: (metric) => { metric.record(gaugeValue); }, +}); + +assert.strictEqual(m.isObservable, true); + +// Consumer for observable +const consumer = metrics.createConsumer({ + 'test.observable': { aggregation: 'lastValue' }, +}); + +// First collect +let result = consumer.collect(); +assert.strictEqual(result.length, 1); +assert.strictEqual(result[0].dataPoints[0].value, 42); + +// Update value and collect again +gaugeValue = 100; +result = consumer.collect(); +assert.strictEqual(result[0].dataPoints[0].value, 100); + +// Value changes between collects +gaugeValue = 0; +result = consumer.collect(); +assert.strictEqual(result[0].dataPoints[0].value, 0); + +consumer.close(); + +// Test observable that returns null/undefined (should not record) +let maybeValue = null; +metrics.create('test.observable.null', { + observable: (metric) => { + if (maybeValue != null) metric.record(maybeValue); + }, +}); + +const consumer2 = metrics.createConsumer({ + 'test.observable.null': { aggregation: 'lastValue' }, +}); + +// Collect when observable returns null - should have no data points +result = consumer2.collect(); +assert.strictEqual(result.length, 0); + +// Now return a value +maybeValue = 50; +result = consumer2.collect(); +assert.strictEqual(result.length, 1); +assert.strictEqual(result[0].dataPoints[0].value, 50); + +consumer2.close(); + +// Test validation - observable must be a function +assert.throws(() => metrics.create('test.bad.observable', { observable: 'not a function' }), { + code: 'ERR_INVALID_ARG_TYPE', +}); diff --git a/test/parallel/test-metrics-pull-gauge.js b/test/parallel/test-metrics-pull-gauge.js deleted file mode 100644 index 3b8ae40ff8cbb1..00000000000000 --- a/test/parallel/test-metrics-pull-gauge.js +++ /dev/null @@ -1,61 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { metrics } = require('node:perf_hooks'); -const { createPullGauge, PullGauge, Metric, MetricReport } = metrics; - -// Test values to return from the pull function -const values = [ 1, 5, 10, 4, 6 ]; -let currentIndex = 0; - -const testPullGauge = createPullGauge('test', () => { - return values[currentIndex]; -}, { base: 'test' }); - -assert.ok(testPullGauge instanceof PullGauge); -assert.ok(testPullGauge instanceof Metric); -assert.strictEqual(testPullGauge.value, 0); - -assert.strictEqual(testPullGauge.type, 'pullGauge'); -assert.strictEqual(testPullGauge.name, 'test'); -assert.deepStrictEqual(testPullGauge.meta, { base: 'test' }); -assert.strictEqual(testPullGauge.channelName, 'metrics:pullGauge:test'); - -// Subscribe to metric reports -let reportCount = 0; -subscribe(testPullGauge.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'pullGauge'); - assert.strictEqual(report.name, 'test'); - assert.ok(report.time > 0); - assert.strictEqual(report.value, values[reportCount]); - - if (reportCount < values.length - 1) { - assert.deepStrictEqual(report.meta, { base: 'test' }); - } else { - // Last sample includes additional metadata - assert.deepStrictEqual(report.meta, { base: 'test', extra: 'metadata' }); - } - - reportCount++; -}, values.length)); - -// Test sampling -for (let i = 0; i < values.length; i++) { - currentIndex = i; - - if (i === values.length - 1) { - // Test sampling with additional metadata - const value = testPullGauge.sample({ extra: 'metadata' }); - assert.strictEqual(value, values[i]); - } else { - const value = testPullGauge.sample(); - assert.strictEqual(value, values[i]); - } -} - -// Verify all reports were received -assert.strictEqual(reportCount, values.length); diff --git a/test/parallel/test-metrics-record.js b/test/parallel/test-metrics-record.js new file mode 100644 index 00000000000000..49aad72aac961e --- /dev/null +++ b/test/parallel/test-metrics-record.js @@ -0,0 +1,58 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test recording values dispatches to consumers +const m = metrics.create('test.record', { unit: 'count' }); + +const consumer = metrics.createConsumer({ + 'test.record': { aggregation: 'sum' }, +}); + +// Record values +m.record(10); +m.record(20); +m.record(30); + +// Collect and verify +const result = consumer.collect(); +assert.strictEqual(result.length, 1); +assert.strictEqual(result[0].descriptor.name, 'test.record'); +assert.strictEqual(result[0].dataPoints.length, 1); +assert.strictEqual(result[0].dataPoints[0].sum, 60); +assert.strictEqual(result[0].dataPoints[0].count, 3); + +consumer.close(); + +// Test recording with attributes (groupByAttributes: true enables attribute differentiation) +const m2 = metrics.create('test.record.attrs'); +const consumer2 = metrics.createConsumer({ + 'groupByAttributes': true, + 'test.record.attrs': { aggregation: 'sum' }, +}); + +m2.record(1, { method: 'GET' }); +m2.record(2, { method: 'POST' }); +m2.record(3, { method: 'GET' }); + +const result2 = consumer2.collect(); +assert.strictEqual(result2[0].dataPoints.length, 2); + +// Find GET and POST data points +const getData = result2[0].dataPoints.find((dp) => dp.attributes.method === 'GET'); +const postData = result2[0].dataPoints.find((dp) => dp.attributes.method === 'POST'); + +assert.strictEqual(getData.sum, 4); // 1 + 3 +assert.strictEqual(getData.count, 2); +assert.strictEqual(postData.sum, 2); +assert.strictEqual(postData.count, 1); + +consumer2.close(); + +// Test validation +assert.throws(() => m.record('not a number'), { + code: 'ERR_INVALID_ARG_TYPE', +}); diff --git a/test/parallel/test-metrics-singleton.js b/test/parallel/test-metrics-singleton.js new file mode 100644 index 00000000000000..2a953e6052d022 --- /dev/null +++ b/test/parallel/test-metrics-singleton.js @@ -0,0 +1,131 @@ +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { metrics } = require('node:perf_hooks'); + +// This test ensures that the singleton pattern works correctly for metrics. +// Note: Warning emission for mismatched options is verified separately in manual testing. + +{ + // Test: Creating metric twice with same name returns same instance + const metric1 = metrics.create('test.singleton.basic', { + description: 'Test singleton', + unit: 'count', + }); + + const metric2 = metrics.create('test.singleton.basic', { + description: 'Test singleton', + unit: 'count', + }); + + assert.strictEqual(metric1, metric2); + metric1.close(); +} + +{ + // Test: No warning when options match + const metric1 = metrics.create('test.singleton.no.warning', { + description: 'Same options', + unit: 'count', + }); + + const metric2 = metrics.create('test.singleton.no.warning', { + description: 'Same options', + unit: 'count', + }); + + // Should be same instance + assert.strictEqual(metric1, metric2); + metric1.close(); +} + +{ + // Test: Creating metric twice with different unit returns same instance (with warning) + const metric1 = metrics.create('test.singleton.unit', { + description: 'Test unit', + unit: 'count', + }); + + const metric2 = metrics.create('test.singleton.unit', { + description: 'Test unit', + unit: 'bytes', // Different unit + }); + + // Still returns same instance despite different unit + assert.strictEqual(metric1, metric2); + // Original unit is preserved + assert.strictEqual(metric1.descriptor.unit, 'count'); + metric1.close(); +} + +{ + // Test: Creating metric twice with different description returns same instance + const metric1 = metrics.create('test.singleton.description', { + description: 'First description', + unit: 'count', + }); + + const metric2 = metrics.create('test.singleton.description', { + description: 'Second description', // Different + unit: 'count', + }); + + assert.strictEqual(metric1, metric2); + // Original description is preserved + assert.strictEqual(metric1.descriptor.description, 'First description'); + metric1.close(); +} + +{ + // Test: Creating metric twice with different observable status returns same instance + const metric1 = metrics.create('test.singleton.observable', { + description: 'Test observable', + }); + + const metric2 = metrics.create('test.singleton.observable', { + description: 'Test observable', + observable: (m) => { m.record(100); }, + }); + + assert.strictEqual(metric1, metric2); + // Original (non-observable) status is preserved + assert.strictEqual(metric1.isObservable, false); + metric1.close(); +} + +{ + // Test: Multiple option mismatches still return same instance + const metric1 = metrics.create('test.singleton.multiple', { + description: 'First', + unit: 'count', + }); + + const metric2 = metrics.create('test.singleton.multiple', { + description: 'Second', + unit: 'bytes', + }); + + assert.strictEqual(metric1, metric2); + // Original options are preserved + assert.strictEqual(metric1.descriptor.description, 'First'); + assert.strictEqual(metric1.descriptor.unit, 'count'); + metric1.close(); +} + +{ + // Test: Singleton works after metric is closed and recreated + const metric1 = metrics.create('test.singleton.recreate', { + description: 'Original', + }); + + metric1.close(); + + // Creating new metric with same name after close creates new instance + const metric2 = metrics.create('test.singleton.recreate', { + description: 'New', + }); + + assert.notStrictEqual(metric1, metric2); + assert.strictEqual(metric2.descriptor.description, 'New'); + metric2.close(); +} diff --git a/test/parallel/test-metrics-timer-dispose.js b/test/parallel/test-metrics-timer-dispose.js new file mode 100644 index 00000000000000..20a0ef397ad2a9 --- /dev/null +++ b/test/parallel/test-metrics-timer-dispose.js @@ -0,0 +1,36 @@ +'use strict'; + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Test Timer Symbol.dispose with `using` syntax +const m = metrics.create('test.timer.dispose', { unit: 'ms' }); + +const consumer = metrics.createConsumer({ + 'test.timer.dispose': { aggregation: 'sum' }, +}); + +// Test using syntax (Symbol.dispose) +{ + // eslint-disable-next-line no-unused-vars + using _timer = m.startTimer(); + // Do some work + for (let i = 0; i < 10000; i++) { + // Simulate work + } + // Timer should auto-stop when leaving block +} + +// Verify value was recorded +const result = consumer.collect(); +assert.strictEqual(result[0].dataPoints[0].count, 1); +assert.ok(result[0].dataPoints[0].sum > 0); + +consumer.close(); + +// Test that Symbol.dispose is defined +const timer = m.startTimer(); +assert.strictEqual(typeof timer[Symbol.dispose], 'function'); +timer.stop(); diff --git a/test/parallel/test-metrics-timer-readonly.js b/test/parallel/test-metrics-timer-readonly.js deleted file mode 100644 index 8941cd020161d4..00000000000000 --- a/test/parallel/test-metrics-timer-readonly.js +++ /dev/null @@ -1,51 +0,0 @@ -'use strict'; - -require('../common'); -const assert = require('assert'); -const { metrics } = require('node:perf_hooks'); -const { Timer } = metrics; - -// Test that Timer properties are read-only -const timer = new Timer(() => {}); - -// Verify initial values -assert.ok(typeof timer.start === 'number'); -assert.ok(timer.start > 0); -assert.strictEqual(timer.end, undefined); -assert.strictEqual(timer.duration, undefined); - -// Try to modify properties (should throw) -const originalStart = timer.start; -assert.throws(() => { - timer.start = 0; -}, TypeError); -assert.strictEqual(timer.start, originalStart); // Should remain unchanged - -assert.throws(() => { - timer.end = 123; -}, TypeError); -assert.strictEqual(timer.end, undefined); // Should remain undefined - -assert.throws(() => { - timer.duration = 456; -}, TypeError); -assert.strictEqual(timer.duration, undefined); // Should remain undefined - -// Stop the timer and verify values are still read-only -timer.stop(); -assert.ok(typeof timer.end === 'number'); -assert.ok(typeof timer.duration === 'number'); -assert.ok(timer.end > timer.start); -assert.ok(timer.duration > 0); - -// Try to modify after stopping (should throw) -const stoppedEnd = timer.end; -const stoppedDuration = timer.duration; -assert.throws(() => { - timer.end = 0; -}, TypeError); -assert.throws(() => { - timer.duration = 0; -}, TypeError); -assert.strictEqual(timer.end, stoppedEnd); -assert.strictEqual(timer.duration, stoppedDuration); diff --git a/test/parallel/test-metrics-timer-with-gauge.js b/test/parallel/test-metrics-timer-with-gauge.js deleted file mode 100644 index fa0389bd5876c8..00000000000000 --- a/test/parallel/test-metrics-timer-with-gauge.js +++ /dev/null @@ -1,53 +0,0 @@ -'use strict'; - -const common = require('../common'); - -const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { metrics } = require('node:perf_hooks'); -const { createGauge, Gauge, Timer, MetricReport } = metrics; - -// Create a gauge for timing -const testGauge = createGauge('test.response.time', { base: 'test' }); -assert.ok(testGauge instanceof Gauge); - -assert.strictEqual(testGauge.type, 'gauge'); -assert.strictEqual(testGauge.name, 'test.response.time'); -assert.deepStrictEqual(testGauge.meta, { base: 'test' }); -assert.strictEqual(testGauge.channelName, 'metrics:gauge:test.response.time'); - -// Create timers from the gauge -const a = testGauge.createTimer({ timer: 'a', meta: 'extra' }); -const b = testGauge.createTimer({ timer: 'b' }); - -assert.ok(a instanceof Timer); -assert.ok(b instanceof Timer); - -const messages = [ - [50, { base: 'test', timer: 'a', meta: 'extra' }], - [100, { base: 'test', timer: 'b' }], -]; - -subscribe(testGauge.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'gauge'); - assert.strictEqual(report.name, 'test.response.time'); - assert.ok(report.time > 0); - - const [value, meta] = messages.shift(); - assert.ok(near(report.value, value)); - assert.deepStrictEqual(report.meta, meta); -}, 2)); - -// NOTE: If this test is flaky, tune the threshold to give more leeway to the timing -function near(actual, expected, threshold = 10) { - return Math.abs(actual - expected) <= threshold; -} - -setTimeout(() => { - a.stop(); -}, 50); - -setTimeout(() => { - b[Symbol.dispose](); -}, 100); diff --git a/test/parallel/test-metrics-timer.js b/test/parallel/test-metrics-timer.js index 6ba597d3255ba7..0965ba3d94e6e1 100644 --- a/test/parallel/test-metrics-timer.js +++ b/test/parallel/test-metrics-timer.js @@ -1,53 +1,59 @@ 'use strict'; -const common = require('../common'); +require('../common'); const assert = require('assert'); -const { subscribe } = require('node:diagnostics_channel'); -const { metrics } = require('node:perf_hooks'); -const { createCounter, Counter, Timer, MetricReport } = metrics; - -// Create a counter for timing -const testCounter = createCounter('test.duration', { base: 'test' }); -assert.ok(testCounter instanceof Counter); - -assert.strictEqual(testCounter.type, 'counter'); -assert.strictEqual(testCounter.name, 'test.duration'); -assert.deepStrictEqual(testCounter.meta, { base: 'test' }); -assert.strictEqual(testCounter.channelName, 'metrics:counter:test.duration'); - -// Create timers from the counter -const a = testCounter.createTimer({ timer: 'a', meta: 'extra' }); -const b = testCounter.createTimer({ timer: 'b' }); - -assert.ok(a instanceof Timer); -assert.ok(b instanceof Timer); - -const messages = [ - [50, { base: 'test', timer: 'a', meta: 'extra' }], - [100, { base: 'test', timer: 'b' }], -]; - -subscribe(testCounter.channelName, common.mustCall((report) => { - assert.ok(report instanceof MetricReport); - assert.strictEqual(report.type, 'counter'); - assert.strictEqual(report.name, 'test.duration'); - assert.ok(report.time > 0); - - const [value, meta] = messages.shift(); - assert.ok(near(report.value, value)); - assert.deepStrictEqual(report.meta, meta); -}, 2)); - -// NOTE: If this test is flaky, tune the threshold to give more leeway to the timing -function near(actual, expected, threshold = 10) { - return Math.abs(actual - expected) <= threshold; +const { metrics } = require('perf_hooks'); + +// Test Timer duration measurement +const m = metrics.create('test.timer', { unit: 'ms' }); + +const consumer = metrics.createConsumer({ + 'test.timer': { aggregation: 'sum' }, +}); + +// Start timer +const timer = m.startTimer(); +assert.ok(timer); + +// Do some work +for (let i = 0; i < 100000; i++) { + // Simulate work } -setTimeout(() => { - a.stop(); -}, 50); +// Stop timer and get duration +const duration = timer.stop(); +assert.ok(typeof duration === 'number'); +assert.ok(duration > 0); +assert.ok(duration < 10000); // Should be less than 10 seconds + +// Check consumer received the value +const result = consumer.collect(); +assert.strictEqual(result[0].dataPoints[0].count, 1); +assert.ok(result[0].dataPoints[0].sum > 0); + +consumer.close(); + +// Test timer with attributes (groupByAttributes: true enables attribute tracking) +const m2 = metrics.create('test.timer.attrs', { unit: 'ms' }); + +const consumer2 = metrics.createConsumer({ + 'groupByAttributes': true, + 'test.timer.attrs': { aggregation: 'sum' }, +}); + +const timer2 = m2.startTimer({ operation: 'test' }); +timer2.stop(); + +const result2 = consumer2.collect(); +assert.deepStrictEqual(result2[0].dataPoints[0].attributes, { operation: 'test' }); + +consumer2.close(); -setTimeout(() => { - b[Symbol.dispose](); -}, 100); +// Test double-stop throws +const m3 = metrics.create('test.timer.double'); +const timer3 = m3.startTimer(); +timer3.stop(); +assert.throws(() => timer3.stop(), { + code: 'ERR_INVALID_STATE', +}); diff --git a/test/parallel/test-metrics-timestamps.js b/test/parallel/test-metrics-timestamps.js new file mode 100644 index 00000000000000..d2c2a0aa227e1d --- /dev/null +++ b/test/parallel/test-metrics-timestamps.js @@ -0,0 +1,247 @@ +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { metrics } = require('node:perf_hooks'); + +// This test ensures that timestamp reference counting works correctly +// and that timestamps are only generated when needed. + +{ + // Test: Single consumer with lastValue enables timestamps + const metric = metrics.create('test.timestamp.single', { + description: 'Test single consumer timestamps', + }); + + const consumer = metrics.createConsumer({ + 'test.timestamp.single': { + aggregation: 'lastValue', + }, + }); + + metric.record(10); + const collected = consumer.collect(); + + assert.strictEqual(collected.length, 1); + const point = collected[0].dataPoints[0]; + assert.notStrictEqual(point.timestamp, undefined); + assert(typeof point.timestamp === 'bigint' || typeof point.timestamp === 'number'); + + consumer.close(); + metric.close(); +} + +{ + // Test: Multiple consumers with lastValue share timestamp generation + const metric = metrics.create('test.timestamp.multiple', { + description: 'Test multiple consumers timestamps', + }); + + const consumer1 = metrics.createConsumer({ + 'test.timestamp.multiple': { + aggregation: 'lastValue', + }, + }); + + const consumer2 = metrics.createConsumer({ + 'test.timestamp.multiple': { + aggregation: 'lastValue', + }, + }); + + metric.record(10); + + const collected1 = consumer1.collect(); + const collected2 = consumer2.collect(); + + assert.strictEqual(collected1.length, 1); + assert.strictEqual(collected2.length, 1); + + const point1 = collected1[0].dataPoints[0]; + const point2 = collected2[0].dataPoints[0]; + + assert.notStrictEqual(point1.timestamp, undefined); + assert.notStrictEqual(point2.timestamp, undefined); + + // Timestamps should be the same or very close + assert.strictEqual(typeof point1.timestamp, typeof point2.timestamp); + + consumer1.close(); + consumer2.close(); + metric.close(); +} + +{ + // Test: Consumer close decrements timestamp reference count + const metric = metrics.create('test.timestamp.refcount', { + description: 'Test timestamp reference counting', + }); + + const consumer1 = metrics.createConsumer({ + 'test.timestamp.refcount': { + aggregation: 'lastValue', + }, + }); + + const consumer2 = metrics.createConsumer({ + 'test.timestamp.refcount': { + aggregation: 'lastValue', + }, + }); + + metric.record(10); + + const before1 = consumer1.collect(); + assert.notStrictEqual(before1[0].dataPoints[0].timestamp, undefined); + + // Close one consumer + consumer1.close(); + + // Other consumer should still get timestamps + metric.record(20); + const after = consumer2.collect(); + assert.notStrictEqual(after[0].dataPoints[0].timestamp, undefined); + + consumer2.close(); + metric.close(); +} + +{ + // Test: All consumers closed disables timestamp generation + const metric = metrics.create('test.timestamp.disabled', { + description: 'Test timestamp disabled', + }); + + const consumer = metrics.createConsumer({ + 'test.timestamp.disabled': { + aggregation: 'lastValue', + }, + }); + + metric.record(10); + const with_ts = consumer.collect(); + assert.notStrictEqual(with_ts[0].dataPoints[0].timestamp, undefined); + + consumer.close(); + + // Create new consumer without lastValue + const consumer2 = metrics.createConsumer({ + 'test.timestamp.disabled': {}, + }); + + metric.record(20); + const without_ts = consumer2.collect(); + + // Without lastValue, timestamps should not be present + assert.strictEqual(without_ts[0].dataPoints[0].timestamp, undefined); + + consumer2.close(); + metric.close(); +} + +{ + // Test: Mix of consumers (some need timestamps, some don't) + const metric = metrics.create('test.timestamp.mixed', { + description: 'Test mixed timestamp needs', + }); + + const consumerWithTS = metrics.createConsumer({ + 'test.timestamp.mixed': { + aggregation: 'lastValue', + }, + }); + + const consumerWithoutTS = metrics.createConsumer({ + 'test.timestamp.mixed': {}, + }); + + metric.record(10); + + const with_ts = consumerWithTS.collect(); + const without_ts = consumerWithoutTS.collect(); + + assert.notStrictEqual(with_ts[0].dataPoints[0].timestamp, undefined); + assert.strictEqual(without_ts[0].dataPoints[0].timestamp, undefined); + + consumerWithTS.close(); + consumerWithoutTS.close(); + metric.close(); +} + +{ + // Test: Timestamp precision verification + const metric = metrics.create('test.timestamp.precision', { + description: 'Test timestamp precision', + }); + + const consumer = metrics.createConsumer({ + 'test.timestamp.precision': { + aggregation: 'lastValue', + }, + }); + + const { performance } = require('node:perf_hooks'); + const before = performance.now(); + metric.record(10); + const after = performance.now(); + + const collected = consumer.collect(); + const timestamp = collected[0].dataPoints[0].timestamp; + + // Timestamp should be between before and after (with small tolerance) + assert(timestamp >= before - 1); + assert(timestamp <= after + 1); + + consumer.close(); + metric.close(); +} + +{ + // Test: Timestamps with delta temporality + const metric = metrics.create('test.timestamp.delta', { + description: 'Test delta timestamps', + }); + + const consumer = metrics.createConsumer({ + 'test.timestamp.delta': { + temporality: 'delta', + aggregation: 'lastValue', + }, + }); + + metric.record(10); + const first = consumer.collect(); + + metric.record(20); + const second = consumer.collect(); + + // Both collections should have timestamps + assert.notStrictEqual(first[0].dataPoints[0].timestamp, undefined); + assert.notStrictEqual(second[0].dataPoints[0].timestamp, undefined); + + // Second timestamp should be greater than first + assert(second[0].dataPoints[0].timestamp > first[0].dataPoints[0].timestamp); + + consumer.close(); + metric.close(); +} + +{ + // Test: Consumer without lastValue doesn't enable timestamps + const metric = metrics.create('test.no.timestamp', { + description: 'Test no timestamps', + }); + + const consumer = metrics.createConsumer({ + 'test.no.timestamp': {}, + }); + + metric.record(10); + const collected = consumer.collect(); + + assert.strictEqual(collected.length, 1); + const point = collected[0].dataPoints[0]; + assert.strictEqual(point.timestamp, undefined); + + consumer.close(); + metric.close(); +} diff --git a/test/parallel/test-metrics-validation.js b/test/parallel/test-metrics-validation.js new file mode 100644 index 00000000000000..803750d014b141 --- /dev/null +++ b/test/parallel/test-metrics-validation.js @@ -0,0 +1,240 @@ +'use strict'; +require('../common'); +const assert = require('node:assert'); +const { metrics } = require('node:perf_hooks'); + +// This test ensures that validation is properly enforced throughout the metrics API. + +{ + // Test: Empty string metric name throws + assert.throws(() => { + metrics.create('', { + description: 'Invalid empty name', + }); + }, { + code: 'ERR_INVALID_ARG_VALUE', + }); +} + +{ + // Test: Non-string metric name throws + assert.throws(() => { + metrics.create(123, { + description: 'Invalid number name', + }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metrics.create(null, { + description: 'Invalid null name', + }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metrics.create(undefined, { + description: 'Invalid undefined name', + }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +} + +{ + // Test: autoCollect() with interval < 1 throws + const metric = metrics.create('test.autocollect.interval', { + description: 'Test interval validation', + }); + + const consumer = metrics.createConsumer({ + 'test.autocollect.interval': {}, + }); + + assert.throws(() => { + consumer.autoCollect(0, () => {}); + }, { + code: 'ERR_OUT_OF_RANGE', + }); + + assert.throws(() => { + consumer.autoCollect(-1, () => {}); + }, { + code: 'ERR_OUT_OF_RANGE', + }); + + consumer.close(); + metric.close(); +} + +{ + // Test: autoCollect() with non-function callback throws + const metric = metrics.create('test.autocollect.callback', { + description: 'Test callback validation', + }); + + const consumer = metrics.createConsumer({ + 'test.autocollect.callback': {}, + }); + + assert.throws(() => { + consumer.autoCollect(1000, 'not a function'); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + consumer.autoCollect(1000, null); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + consumer.close(); + metric.close(); +} + +{ + // Test: autoCollect() when already active throws + const metric = metrics.create('test.autocollect.duplicate', { + description: 'Test duplicate autoCollect', + }); + + const consumer = metrics.createConsumer({ + 'test.autocollect.duplicate': {}, + }); + + const stop = consumer.autoCollect(1000, () => {}); + + assert.throws(() => { + consumer.autoCollect(1000, () => {}); + }, { + code: 'ERR_INVALID_STATE', + }); + + stop(); // Call the stop function + consumer.close(); + metric.close(); +} + +{ + // Test: autoCollect() on closed consumer throws + const metric = metrics.create('test.autocollect.closed', { + description: 'Test autoCollect on closed consumer', + }); + + const consumer = metrics.createConsumer({ + 'test.autocollect.closed': {}, + }); + + consumer.close(); + + assert.throws(() => { + consumer.autoCollect(1000, () => {}); + }, { + code: 'ERR_INVALID_STATE', + }); + + metric.close(); +} + +{ + // Test: record() with non-number/bigint throws + const metric = metrics.create('test.record.validation', { + description: 'Test record validation', + }); + + assert.throws(() => { + metric.record('not a number'); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metric.record(null); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metric.record(undefined); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metric.record([10]); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + metric.close(); +} + +{ + // Test: record() with non-object attributes throws + const metric = metrics.create('test.attributes.validation', { + description: 'Test attributes validation', + }); + + assert.throws(() => { + metric.record(10, 'not an object'); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metric.record(10, 123); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metric.record(10, []); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + metric.close(); +} + +{ + // Test: Timer constructor with non-object attributes throws + const metric = metrics.create('test.timer.attributes', { + description: 'Test timer attributes validation', + }); + + assert.throws(() => { + metric.startTimer('not an object'); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metric.startTimer(123); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + metric.close(); +} + +{ + // Test: Observable must be a function + assert.throws(() => { + metrics.create('test.bad.observable', { + observable: 'not a function', + }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); + + assert.throws(() => { + metrics.create('test.bad.observable2', { + observable: 123, + }); + }, { + code: 'ERR_INVALID_ARG_TYPE', + }); +} diff --git a/test/parallel/test-metrics-wildcard-consumers.js b/test/parallel/test-metrics-wildcard-consumers.js new file mode 100644 index 00000000000000..415acbffe789ae --- /dev/null +++ b/test/parallel/test-metrics-wildcard-consumers.js @@ -0,0 +1,111 @@ +'use strict'; + +// Test multiple wildcard consumers (consumers with no specific metrics configured). +// Each should independently aggregate all metrics. + +require('../common'); + +const assert = require('assert'); +const { metrics } = require('perf_hooks'); + +// Create multiple wildcard consumers with different configurations +const consumer1 = metrics.createConsumer({ + defaultAggregation: 'sum', + defaultTemporality: 'cumulative', +}); + +const consumer2 = metrics.createConsumer({ + defaultAggregation: 'lastValue', + defaultTemporality: 'cumulative', +}); + +const consumer3 = metrics.createConsumer({ + defaultAggregation: 'sum', + defaultTemporality: 'delta', +}); + +// Create a metric and record values +const m = metrics.create('test.wildcard.metric'); +m.record(10); +m.record(20); +m.record(30); + +// All consumers should see the metric +const result1 = consumer1.collect(); +const result2 = consumer2.collect(); +const result3 = consumer3.collect(); + +// Find our metric in each result +const find = (result, name) => result.find((s) => s.descriptor.name === name); + +const metric1 = find(result1, 'test.wildcard.metric'); +const metric2 = find(result2, 'test.wildcard.metric'); +const metric3 = find(result3, 'test.wildcard.metric'); + +assert.ok(metric1, 'Consumer 1 should have the metric'); +assert.ok(metric2, 'Consumer 2 should have the metric'); +assert.ok(metric3, 'Consumer 3 should have the metric'); + +// Consumer 1: Sum aggregation +assert.strictEqual(metric1.dataPoints[0].sum, 60); +assert.strictEqual(metric1.temporality, 'cumulative'); + +// Consumer 2: LastValue aggregation +assert.strictEqual(metric2.dataPoints[0].value, 30); +assert.strictEqual(metric2.temporality, 'cumulative'); + +// Consumer 3: Sum with delta temporality +assert.strictEqual(metric3.dataPoints[0].sum, 60); +assert.strictEqual(metric3.temporality, 'delta'); + +// Test delta reset for consumer3 +m.record(5); + +const result1b = consumer1.collect(); +const result3b = consumer3.collect(); + +const metric1b = find(result1b, 'test.wildcard.metric'); +const metric3b = find(result3b, 'test.wildcard.metric'); + +// Consumer 1 (cumulative): Should be 65 (60 + 5) +assert.strictEqual(metric1b.dataPoints[0].sum, 65); + +// Consumer 3 (delta): Should be 5 (only new value since last collect) +assert.strictEqual(metric3b.dataPoints[0].sum, 5); + +// Test that new metrics are picked up by all wildcard consumers +const m2 = metrics.create('test.wildcard.new'); +m2.record(100); + +const result1c = consumer1.collect(); +const result2c = consumer2.collect(); +const result3c = consumer3.collect(); + +const newMetric1 = find(result1c, 'test.wildcard.new'); +const newMetric2 = find(result2c, 'test.wildcard.new'); +const newMetric3 = find(result3c, 'test.wildcard.new'); + +assert.ok(newMetric1, 'Consumer 1 should see new metric'); +assert.ok(newMetric2, 'Consumer 2 should see new metric'); +assert.ok(newMetric3, 'Consumer 3 should see new metric'); + +assert.strictEqual(newMetric1.dataPoints[0].sum, 100); +assert.strictEqual(newMetric2.dataPoints[0].value, 100); +assert.strictEqual(newMetric3.dataPoints[0].sum, 100); + +// Test closing one consumer doesn't affect others +consumer1.close(); + +m.record(15); + +const result2d = consumer2.collect(); +const result3d = consumer3.collect(); + +const metric2d = find(result2d, 'test.wildcard.metric'); +const metric3d = find(result3d, 'test.wildcard.metric'); + +assert.strictEqual(metric2d.dataPoints[0].value, 15); +assert.strictEqual(metric3d.dataPoints[0].sum, 15); + +consumer2.close(); +consumer3.close(); diff --git a/test/parallel/test-object-identity.js b/test/parallel/test-object-identity.js new file mode 100644 index 00000000000000..86861addc031cb --- /dev/null +++ b/test/parallel/test-object-identity.js @@ -0,0 +1,218 @@ +// Flags: --expose-internals +'use strict'; + +require('../common'); +const assert = require('assert'); +const { ObjectIdentity } = require('internal/util/object_identity'); + +// Test 1: Empty object returns 0 +{ + const oid = new ObjectIdentity(); + const hash = oid.getId({}); + assert.strictEqual(hash, 0); +} + +// Test 2: Determinism - order doesn't matter +{ + const oid = new ObjectIdentity(); + const hash1 = oid.getId({ a: 1, b: 2 }); + const hash2 = oid.getId({ b: 2, a: 1 }); + assert.strictEqual(hash1, hash2); +} + +// Test 3: Different values produce different hashes +{ + const oid = new ObjectIdentity(); + const hash1 = oid.getId({ a: 1, b: 2 }); + const hash2 = oid.getId({ a: 1, b: 3 }); + assert.notStrictEqual(hash1, hash2); +} + +// Test 4: Single key +{ + const oid = new ObjectIdentity(); + const hash = oid.getId({ method: 'GET' }); + assert.strictEqual(typeof hash, 'number'); + assert(hash > 0); +} + +// Test 5: Multiple keys with different orders +{ + const oid = new ObjectIdentity(); + const hash1 = oid.getId({ method: 'GET', status: 200, path: '/api' }); + const hash2 = oid.getId({ status: 200, path: '/api', method: 'GET' }); + const hash3 = oid.getId({ path: '/api', method: 'GET', status: 200 }); + assert.strictEqual(hash1, hash2); + assert.strictEqual(hash2, hash3); +} + +// Test 6: Hash is always a 32-bit unsigned integer +{ + const oid = new ObjectIdentity(); + const hash = oid.getId({ a: 'test', b: 123, c: true }); + assert.strictEqual(typeof hash, 'number'); + assert(hash >= 0); + assert(hash <= 0xFFFFFFFF); + assert.strictEqual(hash, hash >>> 0); // Verify it's a 32-bit uint +} + +// Test 7: Same content produces same hash across instances +{ + const oid1 = new ObjectIdentity(); + const oid2 = new ObjectIdentity(); + const attrs = { method: 'POST', status: 201 }; + assert.strictEqual(oid1.getId(attrs), oid2.getId(attrs)); +} + +// Test 8: Sorted keys cache +{ + const oid = new ObjectIdentity({ sortedKeysCacheSize: 10 }); + + // First call - cache miss + const hash1 = oid.getId({ d: 4, c: 3, b: 2, a: 1 }); + const stats1 = oid.getCacheStats(); + assert.strictEqual(stats1.size, 1); + + // Second call with same key ORDER - cache hit + const hash2 = oid.getId({ d: 4, c: 3, b: 2, a: 1 }); + assert.strictEqual(hash1, hash2); + const stats2 = oid.getCacheStats(); + assert.strictEqual(stats2.size, 1); // Same order = cache hit + + // Third call with different key ORDER - new cache entry but same hash + const hash3 = oid.getId({ a: 1, b: 2, c: 3, d: 4 }); + assert.strictEqual(hash1, hash3); // Same content = same hash + const stats3 = oid.getCacheStats(); + assert.strictEqual(stats3.size, 2); // Different order = new cache entry +} + +// Test 9: Cache size limit and LRU eviction +{ + const oid = new ObjectIdentity({ sortedKeysCacheSize: 3 }); + + // Fill cache to capacity + oid.getId({ a: 1, b: 2, c: 3, d: 4 }); + oid.getId({ e: 5, f: 6, g: 7, h: 8 }); + oid.getId({ i: 9, j: 10, k: 11, l: 12 }); + + let stats = oid.getCacheStats(); + assert.strictEqual(stats.size, 3); + assert.strictEqual(stats.max, 3); + + // Add one more - should evict least recently used + oid.getId({ m: 13, n: 14, o: 15, p: 16 }); + stats = oid.getCacheStats(); + assert.strictEqual(stats.size, 3); +} + +// Test 10: Clear cache +{ + const oid = new ObjectIdentity(); + oid.getId({ a: 1, b: 2, c: 3, d: 4 }); + + let stats = oid.getCacheStats(); + assert.strictEqual(stats.size, 1); + + oid.clearCache(); + stats = oid.getCacheStats(); + assert.strictEqual(stats.size, 0); +} + +// Test 11: Inline sort optimization for 2-3 keys +{ + const oid = new ObjectIdentity(); + + // 2 keys - should use inline sort + const hash2a = oid.getId({ b: 2, a: 1 }); + const hash2b = oid.getId({ a: 1, b: 2 }); + assert.strictEqual(hash2a, hash2b); + + // 3 keys - should use inline sort network + const hash3a = oid.getId({ c: 3, a: 1, b: 2 }); + const hash3b = oid.getId({ b: 2, c: 3, a: 1 }); + assert.strictEqual(hash3a, hash3b); +} + +// Test 12: String values are handled correctly +{ + const oid = new ObjectIdentity(); + const hash1 = oid.getId({ method: 'GET', path: '/api/users' }); + const hash2 = oid.getId({ path: '/api/users', method: 'GET' }); + assert.strictEqual(hash1, hash2); + + // Different string values should produce different hashes + const hash3 = oid.getId({ method: 'POST', path: '/api/users' }); + assert.notStrictEqual(hash1, hash3); +} + +// Test 13: Numeric values are handled correctly +{ + const oid = new ObjectIdentity(); + const hash1 = oid.getId({ status: 200 }); + const hash2 = oid.getId({ status: 404 }); + assert.notStrictEqual(hash1, hash2); +} + +// Test 14: Boolean values are handled correctly +{ + const oid = new ObjectIdentity(); + const hash1 = oid.getId({ success: true }); + const hash2 = oid.getId({ success: false }); + assert.notStrictEqual(hash1, hash2); +} + +// Test 15: Realistic metric attributes +{ + const oid = new ObjectIdentity(); + + // Typical HTTP request attributes + const attrs1 = { + method: 'GET', + status: 200, + path: '/api/users', + host: 'example.com', + }; + + const attrs2 = { + host: 'example.com', + path: '/api/users', + status: 200, + method: 'GET', + }; + + const hash1 = oid.getId(attrs1); + const hash2 = oid.getId(attrs2); + assert.strictEqual(hash1, hash2); + + // Different attributes should produce different hash + const attrs3 = { ...attrs1, status: 404 }; + const hash3 = oid.getId(attrs3); + assert.notStrictEqual(hash1, hash3); +} + +// Test 16: Hash distribution (no obvious collisions in small set) +{ + const oid = new ObjectIdentity(); + const hashes = new Set(); + + // Generate 100 different attribute combinations + for (let i = 0; i < 100; i++) { + const hash = oid.getId({ + method: ['GET', 'POST', 'PUT'][i % 3], + status: 200 + (i % 5), + id: i, + }); + hashes.add(hash); + } + + // Should have close to 100 unique hashes (allowing for rare collisions) + assert(hashes.size >= 95, `Expected >= 95 unique hashes, got ${hashes.size}`); +} + +// Test 17: kEmptyObject constant optimization +{ + const { kEmptyObject } = require('internal/util'); + const oid = new ObjectIdentity(); + const hash = oid.getId(kEmptyObject); + assert.strictEqual(hash, 0); +} From e0255062c4591361f082b2ea625b6bdcdd705a68 Mon Sep 17 00:00:00 2001 From: Stephen Belanger Date: Mon, 23 Mar 2026 01:32:48 +0800 Subject: [PATCH 18/18] lib: fix lint errors after rebase - Add missing Promise primordials to diagnostics_channel.js - Fix JSDoc @returns and @param issues in metrics.js and object_identity.js - Wrap setImmediate/Promise.then callbacks in mustCall in tests Co-Authored-By: Claude Sonnet 4.6 --- doc/api/diagnostics_channel.md | 29 -- lib/diagnostics_channel.js | 385 +++++------------- lib/internal/perf/metrics.js | 10 +- lib/internal/util/object_identity.js | 2 +- ...test-metrics-exemplar-asynclocalstorage.js | 6 +- .../test-metrics-observable-exception.js | 4 +- 6 files changed, 115 insertions(+), 321 deletions(-) diff --git a/doc/api/diagnostics_channel.md b/doc/api/diagnostics_channel.md index 9d47174388a4f0..1c5e2b6535adbf 100644 --- a/doc/api/diagnostics_channel.md +++ b/doc/api/diagnostics_channel.md @@ -132,35 +132,6 @@ if (diagnostics_channel.hasSubscribers('my-channel')) { } ``` -#### `diagnostics_channel.hasChannel(name)` - - - -* `name` {string|symbol} The channel name - -* Returns: {boolean} If the channel exists - -Check if a channel with the given name exists. This is useful to check if a -channel has been created to add additional logic if it's the first time. - -```mjs -import diagnostics_channel from 'node:diagnostics_channel'; - -if (!diagnostics_channel.hasChannel('my-channel')) { - // Channel does not exist yet, do additional setup -} -``` - -```cjs -const diagnostics_channel = require('node:diagnostics_channel'); - -if (!diagnostics_channel.hasChannel('my-channel')) { - // Channel does not exist yet, do additional setup -} -``` - #### `diagnostics_channel.channel(name)`