Skip to content

Commit 669090c

Browse files
authored
chore(openai): clean up code a bit (#6126)
1 parent 43fee33 commit 669090c

File tree

2 files changed

+58
-59
lines changed

2 files changed

+58
-59
lines changed

packages/datadog-plugin-openai/src/stream-helpers.js

Lines changed: 56 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -17,42 +17,60 @@ function convertBuffersToObjects (chunks) {
1717
}
1818

1919
/**
20-
* Constructs the entire response from a stream of OpenAI completion chunks,
21-
* mainly combining the text choices of each chunk into a single string per choice.
20+
* Common function for combining chunks with n choices into a single response body.
21+
* The shared logic will add a new choice index entry if it doesn't exist, and otherwise
22+
* hand off to a onChoice handler to add that choice to the previously stored choice.
23+
*
2224
* @param {Array<Record<string, any>>} chunks
23-
* @param {number} n the number of choices to expect in the response
25+
* @param {number} n
26+
* @param {function(Record<string, any>, Record<string, any>): void} onChoice
2427
* @returns {Record<string, any>}
2528
*/
26-
function constructCompletionResponseFromStreamedChunks (chunks, n) {
29+
function constructResponseFromStreamedChunks (chunks, n, onChoice) {
2730
const body = { ...chunks[0], choices: Array.from({ length: n }) }
2831

2932
for (const chunk of chunks) {
3033
body.usage = chunk.usage
3134
for (const choice of chunk.choices) {
3235
const choiceIdx = choice.index
3336
const oldChoice = body.choices.find(choice => choice?.index === choiceIdx)
34-
if (oldChoice) {
35-
if (!oldChoice.finish_reason) {
36-
oldChoice.finish_reason = choice.finish_reason
37-
}
3837

39-
const text = choice.text
40-
if (text) {
41-
if (oldChoice.text) {
42-
oldChoice.text += text
43-
} else {
44-
oldChoice.text = text
45-
}
46-
}
47-
} else {
38+
if (!oldChoice) {
4839
body.choices[choiceIdx] = choice
40+
continue
4941
}
42+
43+
if (!oldChoice.finish_reason) {
44+
oldChoice.finish_reason = choice.finish_reason
45+
}
46+
47+
onChoice(choice, oldChoice)
5048
}
5149
}
5250

5351
return body
5452
}
5553

54+
/**
55+
* Constructs the entire response from a stream of OpenAI completion chunks,
56+
* mainly combining the text choices of each chunk into a single string per choice.
57+
* @param {Array<Record<string, any>>} chunks
58+
* @param {number} n the number of choices to expect in the response
59+
* @returns {Record<string, any>}
60+
*/
61+
function constructCompletionResponseFromStreamedChunks (chunks, n) {
62+
return constructResponseFromStreamedChunks(chunks, n, (choice, oldChoice) => {
63+
const text = choice.text
64+
if (text) {
65+
if (oldChoice.text) {
66+
oldChoice.text += text
67+
} else {
68+
oldChoice.text = text
69+
}
70+
}
71+
})
72+
}
73+
5674
/**
5775
* Constructs the entire response from a stream of OpenAI chat completion chunks,
5876
* mainly combining the text choices of each chunk into a single string per choice.
@@ -61,49 +79,32 @@ function constructCompletionResponseFromStreamedChunks (chunks, n) {
6179
* @returns {Record<string, any>}
6280
*/
6381
function constructChatCompletionResponseFromStreamedChunks (chunks, n) {
64-
const body = { ...chunks[0], choices: Array.from({ length: n }) }
65-
66-
for (const chunk of chunks) {
67-
body.usage = chunk.usage
68-
for (const choice of chunk.choices) {
69-
const choiceIdx = choice.index
70-
const oldChoice = body.choices.find(choice => choice?.index === choiceIdx)
71-
if (oldChoice) {
72-
if (!oldChoice.finish_reason) {
73-
oldChoice.finish_reason = choice.finish_reason
74-
}
75-
76-
const delta = choice.delta
77-
if (!delta) continue
82+
return constructResponseFromStreamedChunks(chunks, n, (choice, oldChoice) => {
83+
const delta = choice.delta
84+
if (!delta) return
7885

79-
const content = delta.content
80-
if (content) {
81-
if (oldChoice.delta.content) {
82-
oldChoice.delta.content += content
83-
} else {
84-
oldChoice.delta.content = content
85-
}
86-
}
87-
88-
const tools = choice.delta.tool_calls
89-
if (!tools) continue
90-
91-
oldChoice.delta.tool_calls = tools.map((newTool, toolIdx) => {
92-
const oldTool = oldChoice.delta.tool_calls?.[toolIdx]
93-
if (oldTool) {
94-
oldTool.function.arguments += newTool.function.arguments
95-
return oldTool
96-
}
97-
98-
return newTool
99-
})
86+
const content = delta.content
87+
if (content) {
88+
if (oldChoice.delta.content) {
89+
oldChoice.delta.content += content
10090
} else {
101-
body.choices[choiceIdx] = choice
91+
oldChoice.delta.content = content
10292
}
10393
}
104-
}
10594

106-
return body
95+
const tools = delta.tool_calls
96+
if (!tools) return
97+
98+
oldChoice.delta.tool_calls = tools.map((newTool, toolIdx) => {
99+
const oldTool = oldChoice.delta.tool_calls?.[toolIdx]
100+
if (oldTool) {
101+
oldTool.function.arguments += newTool.function.arguments
102+
return oldTool
103+
}
104+
105+
return newTool
106+
})
107+
})
107108
}
108109

109110
module.exports = {

packages/datadog-plugin-openai/src/tracing.js

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -64,10 +64,8 @@ class OpenAiTracingPlugin extends TracingPlugin {
6464
if (chunks.length === 0) return
6565

6666
const firstChunk = chunks[0]
67-
// TODO(BridgeAR): It likely depends on the options being passed
68-
// through if the stream returns buffers or not. By reading that,
69-
// we don't have to do the instanceof check anymore, which is
70-
// relatively expensive.
67+
// OpenAI in legacy versions returns chunked buffers instead of objects.
68+
// These buffers will need to be combined and coalesced into a list of object chunks.
7169
if (firstChunk instanceof Buffer) {
7270
chunks = convertBuffersToObjects(chunks)
7371
}

0 commit comments

Comments
 (0)