Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions .changeset/opentelemetry-instrumentation.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
---
"@tanstack/ai-opentelemetry": minor
"@tanstack/ai-openai": patch
"@tanstack/ai-anthropic": patch
---

Add `@tanstack/ai-opentelemetry` package for OpenTelemetry instrumentation that bridges aiEventClient events to OTEL spans following GenAI Semantic Conventions. Remove console.log/error from openai and anthropic adapters for security.
20 changes: 0 additions & 20 deletions packages/typescript/ai-anthropic/src/anthropic-adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -99,16 +99,6 @@ export class Anthropic extends BaseAdapter<
this.generateId(),
)
} catch (error: any) {
console.error('[Anthropic Adapter] Error in chatStream:', {
message: error?.message,
status: error?.status,
statusText: error?.statusText,
code: error?.code,
type: error?.type,
error: error,
stack: error?.stack,
})

// Emit an error chunk
yield {
type: 'error',
Expand Down Expand Up @@ -613,16 +603,6 @@ export class Anthropic extends BaseAdapter<
}
}
} catch (error: any) {
console.error('[Anthropic Adapter] Error in processAnthropicStream:', {
message: error?.message,
status: error?.status,
statusText: error?.statusText,
code: error?.code,
type: error?.type,
error: error,
stack: error?.stack,
})

yield {
type: 'error',
id: generateId(),
Expand Down
14 changes: 0 additions & 14 deletions packages/typescript/ai-openai/src/openai-adapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -116,10 +116,6 @@ export class OpenAI extends BaseAdapter<
() => this.generateId(),
)
} catch (error: any) {
console.error('>>> chatStream: Fatal error during response creation <<<')
console.error('>>> Error message:', error?.message)
console.error('>>> Error stack:', error?.stack)
console.error('>>> Full error:', error)
throw error
}
}
Expand Down Expand Up @@ -215,8 +211,6 @@ export class OpenAI extends BaseAdapter<
let responseId: string | null = null
let model: string = options.model

const eventTypeCounts = new Map<string, number>()

try {
for await (const chunk of stream) {
chunkCount++
Expand Down Expand Up @@ -466,14 +460,6 @@ export class OpenAI extends BaseAdapter<
}
}
} catch (error: any) {
console.log(
'[OpenAI Adapter] Stream ended with error. Event type summary:',
{
totalChunks: chunkCount,
eventTypes: Object.fromEntries(eventTypeCounts),
error: error.message,
},
)
yield {
type: 'error',
id: generateId(),
Expand Down
122 changes: 122 additions & 0 deletions packages/typescript/ai-opentelemetry/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
# @tanstack/ai-opentelemetry

OpenTelemetry instrumentation for TanStack AI. Automatically creates traces and spans for AI operations following [GenAI Semantic Conventions](https://opentelemetry.io/docs/specs/semconv/gen-ai/).

## Installation

```bash
npm install @tanstack/ai-opentelemetry @opentelemetry/api
```

## Quick Start

```typescript
import { enableOpenTelemetry } from '@tanstack/ai-opentelemetry'

// Enable instrumentation (uses global tracer provider)
enableOpenTelemetry()

// Now all TanStack AI operations will be traced
import { chat } from '@tanstack/ai'
import { openai } from '@tanstack/ai-openai'

const stream = chat({
adapter: openai(),
model: 'gpt-4o',
messages: [{ role: 'user', content: 'Hello!' }],
})
```

## Configuration

```typescript
import { enableOpenTelemetry } from '@tanstack/ai-opentelemetry'
import { trace } from '@opentelemetry/api'

enableOpenTelemetry({
// Provide custom tracer
tracer: trace.getTracer('my-app', '1.0.0'),

// Record prompt/response content (disabled by default for privacy)
recordContent: true,

// Record tool call arguments and results (enabled by default)
recordToolCalls: true,
})
```

## Spans Created

The instrumentation creates the following spans:

### Chat Span
- **Name**: `chat {model}`
- **Kind**: CLIENT
- **Attributes**:
- `gen_ai.system`: Provider name (openai, anthropic, etc.)
- `gen_ai.request.model`: Requested model
- `gen_ai.response.model`: Actual model used
- `gen_ai.response.finish_reasons`: How the response ended
- `gen_ai.usage.input_tokens`: Prompt tokens
- `gen_ai.usage.output_tokens`: Completion tokens
- `gen_ai.usage.total_tokens`: Total tokens

### Stream Span
- **Name**: `stream {model}`
- **Kind**: INTERNAL
- **Attributes**:
- `tanstack_ai.stream.total_chunks`: Number of chunks received
- `tanstack_ai.stream.duration_ms`: Stream duration

### Tool Span
- **Name**: `tool {toolName}`
- **Kind**: INTERNAL
- **Attributes**:
- `gen_ai.tool.name`: Tool name
- `gen_ai.tool.call_id`: Unique tool call ID
- `gen_ai.tool.duration_ms`: Execution duration

## Example with Jaeger

```typescript
import { NodeSDK } from '@opentelemetry/sdk-node'
import { JaegerExporter } from '@opentelemetry/exporter-jaeger'
import { enableOpenTelemetry } from '@tanstack/ai-opentelemetry'

// Setup OpenTelemetry SDK
const sdk = new NodeSDK({
traceExporter: new JaegerExporter({
endpoint: 'http://localhost:14268/api/traces',
}),
serviceName: 'my-ai-app',
})
sdk.start()

// Enable TanStack AI instrumentation
enableOpenTelemetry()
```

## Disabling Instrumentation

```typescript
import { disableOpenTelemetry } from '@tanstack/ai-opentelemetry'

disableOpenTelemetry()
```

## Manual Instrumentation

For more control, you can create and manage the instrumentation instance directly:

```typescript
import { TanStackAIInstrumentation } from '@tanstack/ai-opentelemetry'

const instrumentation = new TanStackAIInstrumentation({
recordContent: true,
})

instrumentation.enable()

// Later...
instrumentation.disable()
```
54 changes: 54 additions & 0 deletions packages/typescript/ai-opentelemetry/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
{
"name": "@tanstack/ai-opentelemetry",
"version": "0.0.3",
"description": "OpenTelemetry instrumentation for TanStack AI",
"author": "",
"license": "MIT",
"repository": {
"type": "git",
"url": "git+https://github.com/TanStack/ai.git",
"directory": "packages/typescript/ai-opentelemetry"
},
"keywords": [
"ai",
"opentelemetry",
"tracing",
"observability",
"tanstack",
"instrumentation"
],
"type": "module",
"module": "./dist/esm/index.js",
"types": "./dist/esm/index.d.ts",
"exports": {
".": {
"types": "./dist/esm/index.d.ts",
"import": "./dist/esm/index.js"
}
},
"files": [
"dist",
"src"
],
"scripts": {
"build": "vite build",
"clean": "premove ./build ./dist",
"lint:fix": "eslint ./src --fix",
"test:build": "publint --strict",
"test:eslint": "eslint ./src",
"test:lib": "vitest",
"test:lib:dev": "pnpm test:lib --watch",
"test:types": "tsc"
},
"dependencies": {
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/semantic-conventions": "^1.28.0"
},
"devDependencies": {
"@tanstack/ai": "workspace:*",
"@vitest/coverage-v8": "4.0.14"
},
"peerDependencies": {
"@tanstack/ai": "workspace:*"
}
}
10 changes: 10 additions & 0 deletions packages/typescript/ai-opentelemetry/src/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
export {
TanStackAIInstrumentation,
enableOpenTelemetry,
disableOpenTelemetry,
} from './instrumentation'

export {
GenAIAttributes,
type TanStackAIInstrumentationConfig,
} from './types'
Loading
Loading