Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(llmobs): add support for Azure OpenAI and Deepseek calls made through the OpenAI SDK #5381

Merged
merged 3 commits into from
Mar 10, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 15 additions & 2 deletions packages/dd-trace/src/llmobs/plugins/openai.js
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,13 @@ class OpenAiLLMObsPlugin extends LLMObsPlugin {
const inputs = ctx.args[0] // completion, chat completion, and embeddings take one argument
const operation = getOperation(methodName)
const kind = operation === 'embedding' ? 'embedding' : 'llm'
const name = `openai.${methodName}`

const { modelProvider, client } = this._getModelProviderAndClient(ctx.basePath)

const name = `${client}.${methodName}`

return {
modelProvider: 'openai',
modelProvider,
modelName: inputs.model,
kind,
name
Expand Down Expand Up @@ -59,6 +62,16 @@ class OpenAiLLMObsPlugin extends LLMObsPlugin {
}
}

_getModelProviderAndClient (baseUrl = '') {
if (baseUrl.includes('azure')) {
return { modelProvider: 'azure_openai', client: 'AzureOpenAI' }
} else if (baseUrl.includes('deepseek')) {
return { modelProvider: 'deepseek', client: 'DeepSeek' }
} else {
return { modelProvider: 'openai', client: 'OpenAI' }
}
}

_extractMetrics (response) {
const metrics = {}
const tokenUsage = response.usage
Expand Down
12 changes: 6 additions & 6 deletions packages/dd-trace/test/llmobs/plugins/openai/openaiv3.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createCompletion',
name: 'OpenAI.createCompletion',
inputMessages: [
{ content: 'How are you?' }
],
Expand Down Expand Up @@ -144,7 +144,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createChatCompletion',
name: 'OpenAI.createChatCompletion',
inputMessages: [
{ role: 'system', content: 'You are a helpful assistant' },
{ role: 'user', content: 'How are you?' }
Expand Down Expand Up @@ -198,7 +198,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'embedding',
name: 'openai.createEmbedding',
name: 'OpenAI.createEmbedding',
inputDocuments: [
{ text: 'Hello, world!' }
],
Expand Down Expand Up @@ -256,7 +256,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createChatCompletion',
name: 'OpenAI.createChatCompletion',
modelName: 'gpt-3.5-turbo-0301',
modelProvider: 'openai',
inputMessages: [{ role: 'user', content: 'What is SpongeBob SquarePants\'s origin?' }],
Expand Down Expand Up @@ -305,7 +305,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createCompletion',
name: 'OpenAI.createCompletion',
inputMessages: [{ content: 'Hello' }],
outputMessages: [{ content: '' }],
modelName: 'gpt-3.5-turbo',
Expand Down Expand Up @@ -348,7 +348,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createChatCompletion',
name: 'OpenAI.createChatCompletion',
inputMessages: [{ role: 'user', content: 'Hello' }],
outputMessages: [{ content: '' }],
modelName: 'gpt-3.5-turbo',
Expand Down
89 changes: 80 additions & 9 deletions packages/dd-trace/test/llmobs/plugins/openai/openaiv4.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,8 @@ const satisfiesStream = version => semver.intersects('>4.1.0', version)

describe('integrations', () => {
let openai
let azureOpenai
let deepseekOpenai

describe('openai', () => {
before(() => {
Expand Down Expand Up @@ -69,6 +71,26 @@ describe('integrations', () => {
openai = new OpenAI({
apiKey: 'test'
})

const AzureOpenAI = OpenAI.AzureOpenAI ?? OpenAI
if (OpenAI.AzureOpenAI) {
azureOpenai = new AzureOpenAI({
endpoint: 'https://dd.openai.azure.com/',
apiKey: 'test',
apiVersion: '2024-05-01-preview'
})
} else {
azureOpenai = new OpenAI({
baseURL: 'https://dd.openai.azure.com/',
apiKey: 'test',
apiVersion: '2024-05-01-preview'
})
}

deepseekOpenai = new OpenAI({
baseURL: 'https://api.deepseek.com/',
apiKey: 'test'
})
})

it('submits a completion span', async () => {
Expand All @@ -92,7 +114,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createCompletion',
name: 'OpenAI.createCompletion',
inputMessages: [
{ content: 'How are you?' }
],
Expand Down Expand Up @@ -147,7 +169,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createChatCompletion',
name: 'OpenAI.createChatCompletion',
inputMessages: [
{ role: 'system', content: 'You are a helpful assistant' },
{ role: 'user', content: 'How are you?' }
Expand Down Expand Up @@ -200,7 +222,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'embedding',
name: 'openai.createEmbedding',
name: 'OpenAI.createEmbedding',
inputDocuments: [
{ text: 'Hello, world!' }
],
Expand Down Expand Up @@ -264,7 +286,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createChatCompletion',
name: 'OpenAI.createChatCompletion',
modelName: 'gpt-3.5-turbo-0301',
modelProvider: 'openai',
inputMessages: [{ role: 'user', content: 'What is SpongeBob SquarePants\'s origin?' }],
Expand Down Expand Up @@ -322,7 +344,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createCompletion',
name: 'OpenAI.createCompletion',
inputMessages: [
{ content: 'Can you say this is a test?' }
],
Expand Down Expand Up @@ -373,7 +395,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createChatCompletion',
name: 'OpenAI.createChatCompletion',
inputMessages: [
{ role: 'user', content: 'Hello' }
],
Expand Down Expand Up @@ -424,7 +446,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createChatCompletion',
name: 'OpenAI.createChatCompletion',
modelName: 'gpt-3.5-turbo-0301',
modelProvider: 'openai',
inputMessages: [{ role: 'user', content: 'What function would you call to finish this?' }],
Expand Down Expand Up @@ -479,7 +501,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createCompletion',
name: 'OpenAI.createCompletion',
inputMessages: [{ content: 'Hello' }],
outputMessages: [{ content: '' }],
modelName: 'gpt-3.5-turbo',
Expand Down Expand Up @@ -521,7 +543,7 @@ describe('integrations', () => {
const expected = expectedLLMObsLLMSpanEvent({
span,
spanKind: 'llm',
name: 'openai.createChatCompletion',
name: 'OpenAI.createChatCompletion',
inputMessages: [{ role: 'user', content: 'Hello' }],
outputMessages: [{ content: '' }],
modelName: 'gpt-3.5-turbo',
Expand Down Expand Up @@ -549,6 +571,55 @@ describe('integrations', () => {

await checkSpan
})

it('submits an AzureOpenAI completion', async () => {
const isFromAzureOpenAIClass = azureOpenai.constructor.name === 'AzureOpenAI'
const postEndpoint = isFromAzureOpenAIClass
? '//openai/deployments/some-model/chat/completions'
: '/chat/completions'
const query = isFromAzureOpenAIClass
? { 'api-version': '2024-05-01-preview' }
: {}

nock('https://dd.openai.azure.com:443')
.post(postEndpoint)
.query(query)
.reply(200, {})

const checkSpan = agent.use(traces => {
const spanEvent = LLMObsAgentProxySpanWriter.prototype.append.getCall(0).args[0]

expect(spanEvent).to.have.property('name', 'AzureOpenAI.createChatCompletion')
expect(spanEvent.meta).to.have.property('model_provider', 'azure_openai')
})

await azureOpenai.chat.completions.create({
model: 'some-model',
messages: []
})

await checkSpan
})

it('submits an DeepSeek completion', async () => {
nock('https://api.deepseek.com:443')
.post('/chat/completions')
.reply(200, {})

const checkSpan = agent.use(traces => {
const spanEvent = LLMObsAgentProxySpanWriter.prototype.append.getCall(0).args[0]

expect(spanEvent).to.have.property('name', 'DeepSeek.createChatCompletion')
expect(spanEvent.meta).to.have.property('model_provider', 'deepseek')
})

await deepseekOpenai.chat.completions.create({
model: 'some-model',
messages: []
})

await checkSpan
})
})
})
})
Loading