diff --git a/docs/source/run-workflows/observe/observe-workflow-with-weave.md b/docs/source/run-workflows/observe/observe-workflow-with-weave.md index cc8a5a6a22..faf231be95 100644 --- a/docs/source/run-workflows/observe/observe-workflow-with-weave.md +++ b/docs/source/run-workflows/observe/observe-workflow-with-weave.md @@ -194,8 +194,66 @@ def my_auth_callback(request): context.metadata.trace_user_email = user_info.get("email") ``` +## OTel-Based Exporter + +Weave also supports an [OTel-based integration](https://weave-docs.wandb.ai/guides/tracking/otel) that sends traces via OpenTelemetry. This uses the `opentelemetry` plugin instead of the `weave` plugin, which means it does not require installing `weave` as a dependency — only the `opentelemetry` extra is needed. + +### Install the OTel plugin + +::::{tab-set} +:sync-group: install-tool + +:::{tab-item} source +:selected: +:sync: source + +```bash +uv pip install -e ".[opentelemetry]" +``` + +::: + +:::{tab-item} package +:sync: package + +```bash +uv pip install "nvidia-nat[opentelemetry]" +``` + +::: + +:::: + +### Configuration + +Update your workflow configuration to use the `weave_otel` exporter type: + +```yaml +general: + telemetry: + tracing: + weave_otel: + _type: weave_otel + project: "nat-demo" + entity: "your-team-name" +``` + +| Parameter | Description | Example | +|-----------|-------------|---------| +| `project` | The name of your W&B Weave project | `"nat-demo"` | +| `entity` | Your W&B username or team name | `"your-team-name"` | +| `endpoint` | The W&B Weave OTel endpoint (optional) | `"https://trace.wandb.ai/otel/v1/traces"` | +| `api_key` | The W&B API key (optional, falls back to `WANDB_API_KEY` env var) | — | + +### Run the Workflow + +```bash +nat run --config_file examples/observability/simple_calculator_observability/configs/config-weave-otel.yml --input "What's the sum of 7 and 8?" +``` + ## Resources - Learn more about tracing [here](https://weave-docs.wandb.ai/guides/tracking/tracing). - Learn more about how to navigate the logged traces [here](https://weave-docs.wandb.ai/guides/tracking/trace-tree). - Learn more about PII redaction [here](https://weave-docs.wandb.ai/guides/tracking/redact-pii). +- Learn more about the OTel-based integration [here](https://weave-docs.wandb.ai/guides/tracking/otel). diff --git a/examples/observability/simple_calculator_observability/README.md b/examples/observability/simple_calculator_observability/README.md index bed2ec3644..6aaaa59374 100644 --- a/examples/observability/simple_calculator_observability/README.md +++ b/examples/observability/simple_calculator_observability/README.md @@ -210,6 +210,22 @@ For simple local development and debugging, you can export traces directly to a For detailed Weave setup instructions, refer to the [Fine-grained Tracing with Weave](../../../docs/source/run-workflows/observe/observe-workflow-with-weave.md) guide. +### Weave OTel Integration + +Weave also supports an [OTel-based integration](https://weave-docs.wandb.ai/guides/tracking/otel) that sends traces via OpenTelemetry. This approach uses the `opentelemetry` plugin instead of the `weave` plugin, which means it does not require installing `weave` as a dependency. + +1. Set your Weights & Biases API key: + + ```bash + export WANDB_API_KEY= + ``` + +2. Run the workflow: + + ```bash + nat run --config_file examples/observability/simple_calculator_observability/configs/config-weave-otel.yml --input "What's the sum of 7 and 8?" + ``` + ### AI Safety Monitoring with Patronus [Patronus](https://patronus.ai/) enables AI safety monitoring and compliance tracking. @@ -339,6 +355,7 @@ The example includes multiple configuration files for different observability pl | `config-langfuse.yml` | Langfuse | Langfuse monitoring and analytics | | `config-langsmith.yml` | LangSmith | LangChain/LangGraph ecosystem integration | | `config-weave.yml` | Weave | Workflow-focused tracking | +| `config-weave-otel.yml` | Weave (OTel) | W&B Weave OTel tracing and monitoring integration | | `config-patronus.yml` | Patronus | AI safety and compliance monitoring | | `config-catalyst.yml` | Catalyst | RagaAI Catalyst integration | | `config-galileo.yml` | Galileo | Galileo integration | diff --git a/examples/observability/simple_calculator_observability/configs/config-weave-otel.yml b/examples/observability/simple_calculator_observability/configs/config-weave-otel.yml new file mode 100644 index 0000000000..bd5c353f47 --- /dev/null +++ b/examples/observability/simple_calculator_observability/configs/config-weave-otel.yml @@ -0,0 +1,45 @@ +# SPDX-FileCopyrightText: Copyright (c) 2024-2026, NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: Apache-2.0 +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +general: + telemetry: + tracing: + weave_otel: + _type: weave_otel + project: "nat-demo" + entity: "your-team-name" + +function_groups: + calculator: + _type: calculator + +functions: + current_datetime: + _type: current_datetime + +llms: + nim_llm: + _type: nim + model_name: meta/llama-3.1-70b-instruct + temperature: 0.0 + max_tokens: 1024 + +workflow: + _type: react_agent + tool_names: [calculator, current_datetime] + llm_name: nim_llm + verbose: true + parse_agent_response_max_retries: 3 diff --git a/packages/nvidia_nat_opentelemetry/src/nat/plugins/opentelemetry/register.py b/packages/nvidia_nat_opentelemetry/src/nat/plugins/opentelemetry/register.py index f9bac87023..489a930fae 100644 --- a/packages/nvidia_nat_opentelemetry/src/nat/plugins/opentelemetry/register.py +++ b/packages/nvidia_nat_opentelemetry/src/nat/plugins/opentelemetry/register.py @@ -15,6 +15,7 @@ import logging import os +from collections.abc import AsyncGenerator from pydantic import Field @@ -209,6 +210,49 @@ async def galileo_telemetry_exporter(config: GalileoTelemetryExporter, builder: ) +class WeaveOtelTelemetryExporter(BatchConfigMixin, TelemetryExporterBaseConfig, name="weave_otel"): + """A telemetry exporter to transmit traces to Weights & Biases Weave via OTel.""" + + endpoint: str = Field( + description="The W&B Weave OTel endpoint", + default="https://trace.wandb.ai/otel/v1/traces", + ) + api_key: SerializableSecretStr = Field(description="The W&B API key", + default_factory=lambda: SerializableSecretStr("")) + project: str = Field(description="The W&B project name.") + entity: str = Field(description="The W&B username or team name.") + + +@register_telemetry_exporter(config_type=WeaveOtelTelemetryExporter) +async def weave_otel_telemetry_exporter( + config: WeaveOtelTelemetryExporter, builder: Builder, +) -> AsyncGenerator["OTLPSpanAdapterExporter", None]: + """Create a Weave OTel telemetry exporter.""" + + from nat.plugins.opentelemetry import OTLPSpanAdapterExporter + + api_key = get_secret_value(config.api_key) if config.api_key else os.environ.get("WANDB_API_KEY") + if not api_key: + raise ValueError("API key is required for Weave (set api_key or WANDB_API_KEY env var)") + + headers = {"wandb-api-key": api_key} + resource_attributes = { + "wandb.project": config.project, + "wandb.entity": config.entity, + } + + yield OTLPSpanAdapterExporter( + endpoint=config.endpoint, + headers=headers, + resource_attributes=resource_attributes, + batch_size=config.batch_size, + flush_interval=config.flush_interval, + max_queue_size=config.max_queue_size, + drop_on_overflow=config.drop_on_overflow, + shutdown_timeout=config.shutdown_timeout, + ) + + class DBNLTelemetryExporter(BatchConfigMixin, TelemetryExporterBaseConfig, name="dbnl"): """A telemetry exporter to transmit traces to DBNL."""