164 lines
No EOL
6.3 KiB
JavaScript
164 lines
No EOL
6.3 KiB
JavaScript
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
import { APIResource } from "../../../../resource.mjs";
|
|
import { isRequestOptions } from "../../../../core.mjs";
|
|
import { AssistantStream } from "../../../../lib/AssistantStream.mjs";
|
|
import { sleep } from "../../../../core.mjs";
|
|
import * as RunsAPI from "./runs.mjs";
|
|
import * as StepsAPI from "./steps.mjs";
|
|
import { CursorPage } from "../../../../pagination.mjs";
|
|
export class Runs extends APIResource {
|
|
constructor() {
|
|
super(...arguments);
|
|
this.steps = new StepsAPI.Steps(this._client);
|
|
}
|
|
create(threadId, params, options) {
|
|
const { include, ...body } = params;
|
|
return this._client.post(`/threads/${threadId}/runs`, {
|
|
query: { include },
|
|
body,
|
|
...options,
|
|
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
stream: params.stream ?? false,
|
|
});
|
|
}
|
|
/**
|
|
* Retrieves a run.
|
|
*/
|
|
retrieve(threadId, runId, options) {
|
|
return this._client.get(`/threads/${threadId}/runs/${runId}`, {
|
|
...options,
|
|
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
});
|
|
}
|
|
/**
|
|
* Modifies a run.
|
|
*/
|
|
update(threadId, runId, body, options) {
|
|
return this._client.post(`/threads/${threadId}/runs/${runId}`, {
|
|
body,
|
|
...options,
|
|
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
});
|
|
}
|
|
list(threadId, query = {}, options) {
|
|
if (isRequestOptions(query)) {
|
|
return this.list(threadId, {}, query);
|
|
}
|
|
return this._client.getAPIList(`/threads/${threadId}/runs`, RunsPage, {
|
|
query,
|
|
...options,
|
|
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
});
|
|
}
|
|
/**
|
|
* Cancels a run that is `in_progress`.
|
|
*/
|
|
cancel(threadId, runId, options) {
|
|
return this._client.post(`/threads/${threadId}/runs/${runId}/cancel`, {
|
|
...options,
|
|
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
});
|
|
}
|
|
/**
|
|
* A helper to create a run an poll for a terminal state. More information on Run
|
|
* lifecycles can be found here:
|
|
* https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
|
|
*/
|
|
async createAndPoll(threadId, body, options) {
|
|
const run = await this.create(threadId, body, options);
|
|
return await this.poll(threadId, run.id, options);
|
|
}
|
|
/**
|
|
* Create a Run stream
|
|
*
|
|
* @deprecated use `stream` instead
|
|
*/
|
|
createAndStream(threadId, body, options) {
|
|
return AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options);
|
|
}
|
|
/**
|
|
* A helper to poll a run status until it reaches a terminal state. More
|
|
* information on Run lifecycles can be found here:
|
|
* https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
|
|
*/
|
|
async poll(threadId, runId, options) {
|
|
const headers = { ...options?.headers, 'X-Stainless-Poll-Helper': 'true' };
|
|
if (options?.pollIntervalMs) {
|
|
headers['X-Stainless-Custom-Poll-Interval'] = options.pollIntervalMs.toString();
|
|
}
|
|
while (true) {
|
|
const { data: run, response } = await this.retrieve(threadId, runId, {
|
|
...options,
|
|
headers: { ...options?.headers, ...headers },
|
|
}).withResponse();
|
|
switch (run.status) {
|
|
//If we are in any sort of intermediate state we poll
|
|
case 'queued':
|
|
case 'in_progress':
|
|
case 'cancelling':
|
|
let sleepInterval = 5000;
|
|
if (options?.pollIntervalMs) {
|
|
sleepInterval = options.pollIntervalMs;
|
|
}
|
|
else {
|
|
const headerInterval = response.headers.get('openai-poll-after-ms');
|
|
if (headerInterval) {
|
|
const headerIntervalMs = parseInt(headerInterval);
|
|
if (!isNaN(headerIntervalMs)) {
|
|
sleepInterval = headerIntervalMs;
|
|
}
|
|
}
|
|
}
|
|
await sleep(sleepInterval);
|
|
break;
|
|
//We return the run in any terminal state.
|
|
case 'requires_action':
|
|
case 'incomplete':
|
|
case 'cancelled':
|
|
case 'completed':
|
|
case 'failed':
|
|
case 'expired':
|
|
return run;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Create a Run stream
|
|
*/
|
|
stream(threadId, body, options) {
|
|
return AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options);
|
|
}
|
|
submitToolOutputs(threadId, runId, body, options) {
|
|
return this._client.post(`/threads/${threadId}/runs/${runId}/submit_tool_outputs`, {
|
|
body,
|
|
...options,
|
|
headers: { 'OpenAI-Beta': 'assistants=v2', ...options?.headers },
|
|
stream: body.stream ?? false,
|
|
});
|
|
}
|
|
/**
|
|
* A helper to submit a tool output to a run and poll for a terminal run state.
|
|
* More information on Run lifecycles can be found here:
|
|
* https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
|
|
*/
|
|
async submitToolOutputsAndPoll(threadId, runId, body, options) {
|
|
const run = await this.submitToolOutputs(threadId, runId, body, options);
|
|
return await this.poll(threadId, run.id, options);
|
|
}
|
|
/**
|
|
* Submit the tool outputs from a previous run and stream the run to a terminal
|
|
* state. More information on Run lifecycles can be found here:
|
|
* https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
|
|
*/
|
|
submitToolOutputsStream(threadId, runId, body, options) {
|
|
return AssistantStream.createToolAssistantStream(threadId, runId, this._client.beta.threads.runs, body, options);
|
|
}
|
|
}
|
|
export class RunsPage extends CursorPage {
|
|
}
|
|
(function (Runs) {
|
|
Runs.RunsPage = RunsAPI.RunsPage;
|
|
Runs.Steps = StepsAPI.Steps;
|
|
Runs.RunStepsPage = StepsAPI.RunStepsPage;
|
|
})(Runs || (Runs = {}));
|
|
//# sourceMappingURL=runs.mjs.map
|