499 lines
26 KiB
JavaScript
499 lines
26 KiB
JavaScript
|
var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {
|
||
|
if (kind === "m") throw new TypeError("Private method is not writable");
|
||
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter");
|
||
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it");
|
||
|
return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;
|
||
|
};
|
||
|
var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
|
||
|
if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
|
||
|
if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
|
||
|
return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
|
||
|
};
|
||
|
var _ChatCompletionStream_instances, _ChatCompletionStream_params, _ChatCompletionStream_choiceEventStates, _ChatCompletionStream_currentChatCompletionSnapshot, _ChatCompletionStream_beginRequest, _ChatCompletionStream_getChoiceEventState, _ChatCompletionStream_addChunk, _ChatCompletionStream_emitToolCallDoneEvent, _ChatCompletionStream_emitContentDoneEvents, _ChatCompletionStream_endRequest, _ChatCompletionStream_getAutoParseableResponseFormat, _ChatCompletionStream_accumulateChatCompletion;
|
||
|
import { OpenAIError, APIUserAbortError, LengthFinishReasonError, ContentFilterFinishReasonError, } from 'openai/error';
|
||
|
import { AbstractChatCompletionRunner, } from "./AbstractChatCompletionRunner.mjs";
|
||
|
import { Stream } from 'openai/streaming';
|
||
|
import { hasAutoParseableInput, isAutoParsableResponseFormat, isAutoParsableTool, maybeParseChatCompletion, shouldParseToolCall, } from 'openai/lib/parser';
|
||
|
import { partialParse } from "../_vendor/partial-json-parser/parser.mjs";
|
||
|
export class ChatCompletionStream extends AbstractChatCompletionRunner {
|
||
|
constructor(params) {
|
||
|
super();
|
||
|
_ChatCompletionStream_instances.add(this);
|
||
|
_ChatCompletionStream_params.set(this, void 0);
|
||
|
_ChatCompletionStream_choiceEventStates.set(this, void 0);
|
||
|
_ChatCompletionStream_currentChatCompletionSnapshot.set(this, void 0);
|
||
|
__classPrivateFieldSet(this, _ChatCompletionStream_params, params, "f");
|
||
|
__classPrivateFieldSet(this, _ChatCompletionStream_choiceEventStates, [], "f");
|
||
|
}
|
||
|
get currentChatCompletionSnapshot() {
|
||
|
return __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
|
||
|
}
|
||
|
/**
|
||
|
* Intended for use on the frontend, consuming a stream produced with
|
||
|
* `.toReadableStream()` on the backend.
|
||
|
*
|
||
|
* Note that messages sent to the model do not appear in `.on('message')`
|
||
|
* in this context.
|
||
|
*/
|
||
|
static fromReadableStream(stream) {
|
||
|
const runner = new ChatCompletionStream(null);
|
||
|
runner._run(() => runner._fromReadableStream(stream));
|
||
|
return runner;
|
||
|
}
|
||
|
static createChatCompletion(client, params, options) {
|
||
|
const runner = new ChatCompletionStream(params);
|
||
|
runner._run(() => runner._runChatCompletion(client, { ...params, stream: true }, { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } }));
|
||
|
return runner;
|
||
|
}
|
||
|
async _createChatCompletion(client, params, options) {
|
||
|
super._createChatCompletion;
|
||
|
const signal = options?.signal;
|
||
|
if (signal) {
|
||
|
if (signal.aborted)
|
||
|
this.controller.abort();
|
||
|
signal.addEventListener('abort', () => this.controller.abort());
|
||
|
}
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this);
|
||
|
const stream = await client.chat.completions.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });
|
||
|
this._connected();
|
||
|
for await (const chunk of stream) {
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk);
|
||
|
}
|
||
|
if (stream.controller.signal?.aborted) {
|
||
|
throw new APIUserAbortError();
|
||
|
}
|
||
|
return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
|
||
|
}
|
||
|
async _fromReadableStream(readableStream, options) {
|
||
|
const signal = options?.signal;
|
||
|
if (signal) {
|
||
|
if (signal.aborted)
|
||
|
this.controller.abort();
|
||
|
signal.addEventListener('abort', () => this.controller.abort());
|
||
|
}
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_beginRequest).call(this);
|
||
|
this._connected();
|
||
|
const stream = Stream.fromReadableStream(readableStream, this.controller);
|
||
|
let chatId;
|
||
|
for await (const chunk of stream) {
|
||
|
if (chatId && chatId !== chunk.id) {
|
||
|
// A new request has been made.
|
||
|
this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
|
||
|
}
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_addChunk).call(this, chunk);
|
||
|
chatId = chunk.id;
|
||
|
}
|
||
|
if (stream.controller.signal?.aborted) {
|
||
|
throw new APIUserAbortError();
|
||
|
}
|
||
|
return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_endRequest).call(this));
|
||
|
}
|
||
|
[(_ChatCompletionStream_params = new WeakMap(), _ChatCompletionStream_choiceEventStates = new WeakMap(), _ChatCompletionStream_currentChatCompletionSnapshot = new WeakMap(), _ChatCompletionStream_instances = new WeakSet(), _ChatCompletionStream_beginRequest = function _ChatCompletionStream_beginRequest() {
|
||
|
if (this.ended)
|
||
|
return;
|
||
|
__classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f");
|
||
|
}, _ChatCompletionStream_getChoiceEventState = function _ChatCompletionStream_getChoiceEventState(choice) {
|
||
|
let state = __classPrivateFieldGet(this, _ChatCompletionStream_choiceEventStates, "f")[choice.index];
|
||
|
if (state) {
|
||
|
return state;
|
||
|
}
|
||
|
state = {
|
||
|
content_done: false,
|
||
|
refusal_done: false,
|
||
|
logprobs_content_done: false,
|
||
|
logprobs_refusal_done: false,
|
||
|
done_tool_calls: new Set(),
|
||
|
current_tool_call_index: null,
|
||
|
};
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_choiceEventStates, "f")[choice.index] = state;
|
||
|
return state;
|
||
|
}, _ChatCompletionStream_addChunk = function _ChatCompletionStream_addChunk(chunk) {
|
||
|
if (this.ended)
|
||
|
return;
|
||
|
const completion = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_accumulateChatCompletion).call(this, chunk);
|
||
|
this._emit('chunk', chunk, completion);
|
||
|
for (const choice of chunk.choices) {
|
||
|
const choiceSnapshot = completion.choices[choice.index];
|
||
|
if (choice.delta.content != null &&
|
||
|
choiceSnapshot.message?.role === 'assistant' &&
|
||
|
choiceSnapshot.message?.content) {
|
||
|
this._emit('content', choice.delta.content, choiceSnapshot.message.content);
|
||
|
this._emit('content.delta', {
|
||
|
delta: choice.delta.content,
|
||
|
snapshot: choiceSnapshot.message.content,
|
||
|
parsed: choiceSnapshot.message.parsed,
|
||
|
});
|
||
|
}
|
||
|
if (choice.delta.refusal != null &&
|
||
|
choiceSnapshot.message?.role === 'assistant' &&
|
||
|
choiceSnapshot.message?.refusal) {
|
||
|
this._emit('refusal.delta', {
|
||
|
delta: choice.delta.refusal,
|
||
|
snapshot: choiceSnapshot.message.refusal,
|
||
|
});
|
||
|
}
|
||
|
if (choice.logprobs?.content != null && choiceSnapshot.message?.role === 'assistant') {
|
||
|
this._emit('logprobs.content.delta', {
|
||
|
content: choice.logprobs?.content,
|
||
|
snapshot: choiceSnapshot.logprobs?.content ?? [],
|
||
|
});
|
||
|
}
|
||
|
if (choice.logprobs?.refusal != null && choiceSnapshot.message?.role === 'assistant') {
|
||
|
this._emit('logprobs.refusal.delta', {
|
||
|
refusal: choice.logprobs?.refusal,
|
||
|
snapshot: choiceSnapshot.logprobs?.refusal ?? [],
|
||
|
});
|
||
|
}
|
||
|
const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot);
|
||
|
if (choiceSnapshot.finish_reason) {
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitContentDoneEvents).call(this, choiceSnapshot);
|
||
|
if (state.current_tool_call_index != null) {
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitToolCallDoneEvent).call(this, choiceSnapshot, state.current_tool_call_index);
|
||
|
}
|
||
|
}
|
||
|
for (const toolCall of choice.delta.tool_calls ?? []) {
|
||
|
if (state.current_tool_call_index !== toolCall.index) {
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitContentDoneEvents).call(this, choiceSnapshot);
|
||
|
// new tool call started, the previous one is done
|
||
|
if (state.current_tool_call_index != null) {
|
||
|
__classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_emitToolCallDoneEvent).call(this, choiceSnapshot, state.current_tool_call_index);
|
||
|
}
|
||
|
}
|
||
|
state.current_tool_call_index = toolCall.index;
|
||
|
}
|
||
|
for (const toolCallDelta of choice.delta.tool_calls ?? []) {
|
||
|
const toolCallSnapshot = choiceSnapshot.message.tool_calls?.[toolCallDelta.index];
|
||
|
if (!toolCallSnapshot?.type) {
|
||
|
continue;
|
||
|
}
|
||
|
if (toolCallSnapshot?.type === 'function') {
|
||
|
this._emit('tool_calls.function.arguments.delta', {
|
||
|
name: toolCallSnapshot.function?.name,
|
||
|
index: toolCallDelta.index,
|
||
|
arguments: toolCallSnapshot.function.arguments,
|
||
|
parsed_arguments: toolCallSnapshot.function.parsed_arguments,
|
||
|
arguments_delta: toolCallDelta.function?.arguments ?? '',
|
||
|
});
|
||
|
}
|
||
|
else {
|
||
|
assertNever(toolCallSnapshot?.type);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}, _ChatCompletionStream_emitToolCallDoneEvent = function _ChatCompletionStream_emitToolCallDoneEvent(choiceSnapshot, toolCallIndex) {
|
||
|
const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot);
|
||
|
if (state.done_tool_calls.has(toolCallIndex)) {
|
||
|
// we've already fired the done event
|
||
|
return;
|
||
|
}
|
||
|
const toolCallSnapshot = choiceSnapshot.message.tool_calls?.[toolCallIndex];
|
||
|
if (!toolCallSnapshot) {
|
||
|
throw new Error('no tool call snapshot');
|
||
|
}
|
||
|
if (!toolCallSnapshot.type) {
|
||
|
throw new Error('tool call snapshot missing `type`');
|
||
|
}
|
||
|
if (toolCallSnapshot.type === 'function') {
|
||
|
const inputTool = __classPrivateFieldGet(this, _ChatCompletionStream_params, "f")?.tools?.find((tool) => tool.type === 'function' && tool.function.name === toolCallSnapshot.function.name);
|
||
|
this._emit('tool_calls.function.arguments.done', {
|
||
|
name: toolCallSnapshot.function.name,
|
||
|
index: toolCallIndex,
|
||
|
arguments: toolCallSnapshot.function.arguments,
|
||
|
parsed_arguments: isAutoParsableTool(inputTool) ? inputTool.$parseRaw(toolCallSnapshot.function.arguments)
|
||
|
: inputTool?.function.strict ? JSON.parse(toolCallSnapshot.function.arguments)
|
||
|
: null,
|
||
|
});
|
||
|
}
|
||
|
else {
|
||
|
assertNever(toolCallSnapshot.type);
|
||
|
}
|
||
|
}, _ChatCompletionStream_emitContentDoneEvents = function _ChatCompletionStream_emitContentDoneEvents(choiceSnapshot) {
|
||
|
const state = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getChoiceEventState).call(this, choiceSnapshot);
|
||
|
if (choiceSnapshot.message.content && !state.content_done) {
|
||
|
state.content_done = true;
|
||
|
const responseFormat = __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getAutoParseableResponseFormat).call(this);
|
||
|
this._emit('content.done', {
|
||
|
content: choiceSnapshot.message.content,
|
||
|
parsed: responseFormat ? responseFormat.$parseRaw(choiceSnapshot.message.content) : null,
|
||
|
});
|
||
|
}
|
||
|
if (choiceSnapshot.message.refusal && !state.refusal_done) {
|
||
|
state.refusal_done = true;
|
||
|
this._emit('refusal.done', { refusal: choiceSnapshot.message.refusal });
|
||
|
}
|
||
|
if (choiceSnapshot.logprobs?.content && !state.logprobs_content_done) {
|
||
|
state.logprobs_content_done = true;
|
||
|
this._emit('logprobs.content.done', { content: choiceSnapshot.logprobs.content });
|
||
|
}
|
||
|
if (choiceSnapshot.logprobs?.refusal && !state.logprobs_refusal_done) {
|
||
|
state.logprobs_refusal_done = true;
|
||
|
this._emit('logprobs.refusal.done', { refusal: choiceSnapshot.logprobs.refusal });
|
||
|
}
|
||
|
}, _ChatCompletionStream_endRequest = function _ChatCompletionStream_endRequest() {
|
||
|
if (this.ended) {
|
||
|
throw new OpenAIError(`stream has ended, this shouldn't happen`);
|
||
|
}
|
||
|
const snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
|
||
|
if (!snapshot) {
|
||
|
throw new OpenAIError(`request ended without sending any chunks`);
|
||
|
}
|
||
|
__classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, "f");
|
||
|
__classPrivateFieldSet(this, _ChatCompletionStream_choiceEventStates, [], "f");
|
||
|
return finalizeChatCompletion(snapshot, __classPrivateFieldGet(this, _ChatCompletionStream_params, "f"));
|
||
|
}, _ChatCompletionStream_getAutoParseableResponseFormat = function _ChatCompletionStream_getAutoParseableResponseFormat() {
|
||
|
const responseFormat = __classPrivateFieldGet(this, _ChatCompletionStream_params, "f")?.response_format;
|
||
|
if (isAutoParsableResponseFormat(responseFormat)) {
|
||
|
return responseFormat;
|
||
|
}
|
||
|
return null;
|
||
|
}, _ChatCompletionStream_accumulateChatCompletion = function _ChatCompletionStream_accumulateChatCompletion(chunk) {
|
||
|
var _a, _b, _c, _d;
|
||
|
let snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, "f");
|
||
|
const { choices, ...rest } = chunk;
|
||
|
if (!snapshot) {
|
||
|
snapshot = __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, {
|
||
|
...rest,
|
||
|
choices: [],
|
||
|
}, "f");
|
||
|
}
|
||
|
else {
|
||
|
Object.assign(snapshot, rest);
|
||
|
}
|
||
|
for (const { delta, finish_reason, index, logprobs = null, ...other } of chunk.choices) {
|
||
|
let choice = snapshot.choices[index];
|
||
|
if (!choice) {
|
||
|
choice = snapshot.choices[index] = { finish_reason, index, message: {}, logprobs, ...other };
|
||
|
}
|
||
|
if (logprobs) {
|
||
|
if (!choice.logprobs) {
|
||
|
choice.logprobs = Object.assign({}, logprobs);
|
||
|
}
|
||
|
else {
|
||
|
const { content, refusal, ...rest } = logprobs;
|
||
|
assertIsEmpty(rest);
|
||
|
Object.assign(choice.logprobs, rest);
|
||
|
if (content) {
|
||
|
(_a = choice.logprobs).content ?? (_a.content = []);
|
||
|
choice.logprobs.content.push(...content);
|
||
|
}
|
||
|
if (refusal) {
|
||
|
(_b = choice.logprobs).refusal ?? (_b.refusal = []);
|
||
|
choice.logprobs.refusal.push(...refusal);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
if (finish_reason) {
|
||
|
choice.finish_reason = finish_reason;
|
||
|
if (__classPrivateFieldGet(this, _ChatCompletionStream_params, "f") && hasAutoParseableInput(__classPrivateFieldGet(this, _ChatCompletionStream_params, "f"))) {
|
||
|
if (finish_reason === 'length') {
|
||
|
throw new LengthFinishReasonError();
|
||
|
}
|
||
|
if (finish_reason === 'content_filter') {
|
||
|
throw new ContentFilterFinishReasonError();
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
Object.assign(choice, other);
|
||
|
if (!delta)
|
||
|
continue; // Shouldn't happen; just in case.
|
||
|
const { content, refusal, function_call, role, tool_calls, ...rest } = delta;
|
||
|
assertIsEmpty(rest);
|
||
|
Object.assign(choice.message, rest);
|
||
|
if (refusal) {
|
||
|
choice.message.refusal = (choice.message.refusal || '') + refusal;
|
||
|
}
|
||
|
if (role)
|
||
|
choice.message.role = role;
|
||
|
if (function_call) {
|
||
|
if (!choice.message.function_call) {
|
||
|
choice.message.function_call = function_call;
|
||
|
}
|
||
|
else {
|
||
|
if (function_call.name)
|
||
|
choice.message.function_call.name = function_call.name;
|
||
|
if (function_call.arguments) {
|
||
|
(_c = choice.message.function_call).arguments ?? (_c.arguments = '');
|
||
|
choice.message.function_call.arguments += function_call.arguments;
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
if (content) {
|
||
|
choice.message.content = (choice.message.content || '') + content;
|
||
|
if (!choice.message.refusal && __classPrivateFieldGet(this, _ChatCompletionStream_instances, "m", _ChatCompletionStream_getAutoParseableResponseFormat).call(this)) {
|
||
|
choice.message.parsed = partialParse(choice.message.content);
|
||
|
}
|
||
|
}
|
||
|
if (tool_calls) {
|
||
|
if (!choice.message.tool_calls)
|
||
|
choice.message.tool_calls = [];
|
||
|
for (const { index, id, type, function: fn, ...rest } of tool_calls) {
|
||
|
const tool_call = ((_d = choice.message.tool_calls)[index] ?? (_d[index] = {}));
|
||
|
Object.assign(tool_call, rest);
|
||
|
if (id)
|
||
|
tool_call.id = id;
|
||
|
if (type)
|
||
|
tool_call.type = type;
|
||
|
if (fn)
|
||
|
tool_call.function ?? (tool_call.function = { name: fn.name ?? '', arguments: '' });
|
||
|
if (fn?.name)
|
||
|
tool_call.function.name = fn.name;
|
||
|
if (fn?.arguments) {
|
||
|
tool_call.function.arguments += fn.arguments;
|
||
|
if (shouldParseToolCall(__classPrivateFieldGet(this, _ChatCompletionStream_params, "f"), tool_call)) {
|
||
|
tool_call.function.parsed_arguments = partialParse(tool_call.function.arguments);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return snapshot;
|
||
|
}, Symbol.asyncIterator)]() {
|
||
|
const pushQueue = [];
|
||
|
const readQueue = [];
|
||
|
let done = false;
|
||
|
this.on('chunk', (chunk) => {
|
||
|
const reader = readQueue.shift();
|
||
|
if (reader) {
|
||
|
reader.resolve(chunk);
|
||
|
}
|
||
|
else {
|
||
|
pushQueue.push(chunk);
|
||
|
}
|
||
|
});
|
||
|
this.on('end', () => {
|
||
|
done = true;
|
||
|
for (const reader of readQueue) {
|
||
|
reader.resolve(undefined);
|
||
|
}
|
||
|
readQueue.length = 0;
|
||
|
});
|
||
|
this.on('abort', (err) => {
|
||
|
done = true;
|
||
|
for (const reader of readQueue) {
|
||
|
reader.reject(err);
|
||
|
}
|
||
|
readQueue.length = 0;
|
||
|
});
|
||
|
this.on('error', (err) => {
|
||
|
done = true;
|
||
|
for (const reader of readQueue) {
|
||
|
reader.reject(err);
|
||
|
}
|
||
|
readQueue.length = 0;
|
||
|
});
|
||
|
return {
|
||
|
next: async () => {
|
||
|
if (!pushQueue.length) {
|
||
|
if (done) {
|
||
|
return { value: undefined, done: true };
|
||
|
}
|
||
|
return new Promise((resolve, reject) => readQueue.push({ resolve, reject })).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true }));
|
||
|
}
|
||
|
const chunk = pushQueue.shift();
|
||
|
return { value: chunk, done: false };
|
||
|
},
|
||
|
return: async () => {
|
||
|
this.abort();
|
||
|
return { value: undefined, done: true };
|
||
|
},
|
||
|
};
|
||
|
}
|
||
|
toReadableStream() {
|
||
|
const stream = new Stream(this[Symbol.asyncIterator].bind(this), this.controller);
|
||
|
return stream.toReadableStream();
|
||
|
}
|
||
|
}
|
||
|
function finalizeChatCompletion(snapshot, params) {
|
||
|
const { id, choices, created, model, system_fingerprint, ...rest } = snapshot;
|
||
|
const completion = {
|
||
|
...rest,
|
||
|
id,
|
||
|
choices: choices.map(({ message, finish_reason, index, logprobs, ...choiceRest }) => {
|
||
|
if (!finish_reason) {
|
||
|
throw new OpenAIError(`missing finish_reason for choice ${index}`);
|
||
|
}
|
||
|
const { content = null, function_call, tool_calls, ...messageRest } = message;
|
||
|
const role = message.role; // this is what we expect; in theory it could be different which would make our types a slight lie but would be fine.
|
||
|
if (!role) {
|
||
|
throw new OpenAIError(`missing role for choice ${index}`);
|
||
|
}
|
||
|
if (function_call) {
|
||
|
const { arguments: args, name } = function_call;
|
||
|
if (args == null) {
|
||
|
throw new OpenAIError(`missing function_call.arguments for choice ${index}`);
|
||
|
}
|
||
|
if (!name) {
|
||
|
throw new OpenAIError(`missing function_call.name for choice ${index}`);
|
||
|
}
|
||
|
return {
|
||
|
...choiceRest,
|
||
|
message: {
|
||
|
content,
|
||
|
function_call: { arguments: args, name },
|
||
|
role,
|
||
|
refusal: message.refusal ?? null,
|
||
|
},
|
||
|
finish_reason,
|
||
|
index,
|
||
|
logprobs,
|
||
|
};
|
||
|
}
|
||
|
if (tool_calls) {
|
||
|
return {
|
||
|
...choiceRest,
|
||
|
index,
|
||
|
finish_reason,
|
||
|
logprobs,
|
||
|
message: {
|
||
|
...messageRest,
|
||
|
role,
|
||
|
content,
|
||
|
refusal: message.refusal ?? null,
|
||
|
tool_calls: tool_calls.map((tool_call, i) => {
|
||
|
const { function: fn, type, id, ...toolRest } = tool_call;
|
||
|
const { arguments: args, name, ...fnRest } = fn || {};
|
||
|
if (id == null) {
|
||
|
throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].id\n${str(snapshot)}`);
|
||
|
}
|
||
|
if (type == null) {
|
||
|
throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].type\n${str(snapshot)}`);
|
||
|
}
|
||
|
if (name == null) {
|
||
|
throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].function.name\n${str(snapshot)}`);
|
||
|
}
|
||
|
if (args == null) {
|
||
|
throw new OpenAIError(`missing choices[${index}].tool_calls[${i}].function.arguments\n${str(snapshot)}`);
|
||
|
}
|
||
|
return { ...toolRest, id, type, function: { ...fnRest, name, arguments: args } };
|
||
|
}),
|
||
|
},
|
||
|
};
|
||
|
}
|
||
|
return {
|
||
|
...choiceRest,
|
||
|
message: { ...messageRest, content, role, refusal: message.refusal ?? null },
|
||
|
finish_reason,
|
||
|
index,
|
||
|
logprobs,
|
||
|
};
|
||
|
}),
|
||
|
created,
|
||
|
model,
|
||
|
object: 'chat.completion',
|
||
|
...(system_fingerprint ? { system_fingerprint } : {}),
|
||
|
};
|
||
|
return maybeParseChatCompletion(completion, params);
|
||
|
}
|
||
|
function str(x) {
|
||
|
return JSON.stringify(x);
|
||
|
}
|
||
|
/**
|
||
|
* Ensures the given argument is an empty object, useful for
|
||
|
* asserting that all known properties on an object have been
|
||
|
* destructured.
|
||
|
*/
|
||
|
function assertIsEmpty(obj) {
|
||
|
return;
|
||
|
}
|
||
|
function assertNever(_x) { }
|
||
|
//# sourceMappingURL=ChatCompletionStream.mjs.map
|