182 lines
7.4 KiB
JavaScript
182 lines
7.4 KiB
JavaScript
"use strict";
|
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.Runs = void 0;
|
|
const tslib_1 = require("../../../../internal/tslib.js");
|
|
const resource_1 = require("../../../../core/resource.js");
|
|
const StepsAPI = tslib_1.__importStar(require("./steps.js"));
|
|
const steps_1 = require("./steps.js");
|
|
const pagination_1 = require("../../../../core/pagination.js");
|
|
const headers_1 = require("../../../../internal/headers.js");
|
|
const AssistantStream_1 = require("../../../../lib/AssistantStream.js");
|
|
const sleep_1 = require("../../../../internal/utils/sleep.js");
|
|
const path_1 = require("../../../../internal/utils/path.js");
|
|
/**
|
|
* @deprecated The Assistants API is deprecated in favor of the Responses API
|
|
*/
|
|
class Runs extends resource_1.APIResource {
|
|
constructor() {
|
|
super(...arguments);
|
|
this.steps = new StepsAPI.Steps(this._client);
|
|
}
|
|
create(threadID, params, options) {
|
|
const { include, ...body } = params;
|
|
return this._client.post((0, path_1.path) `/threads/${threadID}/runs`, {
|
|
query: { include },
|
|
body,
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
stream: params.stream ?? false,
|
|
});
|
|
}
|
|
/**
|
|
* Retrieves a run.
|
|
*
|
|
* @deprecated The Assistants API is deprecated in favor of the Responses API
|
|
*/
|
|
retrieve(runID, params, options) {
|
|
const { thread_id } = params;
|
|
return this._client.get((0, path_1.path) `/threads/${thread_id}/runs/${runID}`, {
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* Modifies a run.
|
|
*
|
|
* @deprecated The Assistants API is deprecated in favor of the Responses API
|
|
*/
|
|
update(runID, params, options) {
|
|
const { thread_id, ...body } = params;
|
|
return this._client.post((0, path_1.path) `/threads/${thread_id}/runs/${runID}`, {
|
|
body,
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* Returns a list of runs belonging to a thread.
|
|
*
|
|
* @deprecated The Assistants API is deprecated in favor of the Responses API
|
|
*/
|
|
list(threadID, query = {}, options) {
|
|
return this._client.getAPIList((0, path_1.path) `/threads/${threadID}/runs`, (pagination_1.CursorPage), {
|
|
query,
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* Cancels a run that is `in_progress`.
|
|
*
|
|
* @deprecated The Assistants API is deprecated in favor of the Responses API
|
|
*/
|
|
cancel(runID, params, options) {
|
|
const { thread_id } = params;
|
|
return this._client.post((0, path_1.path) `/threads/${thread_id}/runs/${runID}/cancel`, {
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* A helper to create a run an poll for a terminal state. More information on Run
|
|
* lifecycles can be found here:
|
|
* https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
|
|
*/
|
|
async createAndPoll(threadId, body, options) {
|
|
const run = await this.create(threadId, body, options);
|
|
return await this.poll(run.id, { thread_id: threadId }, options);
|
|
}
|
|
/**
|
|
* Create a Run stream
|
|
*
|
|
* @deprecated use `stream` instead
|
|
*/
|
|
createAndStream(threadId, body, options) {
|
|
return AssistantStream_1.AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options);
|
|
}
|
|
/**
|
|
* A helper to poll a run status until it reaches a terminal state. More
|
|
* information on Run lifecycles can be found here:
|
|
* https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
|
|
*/
|
|
async poll(runId, params, options) {
|
|
const headers = (0, headers_1.buildHeaders)([
|
|
options?.headers,
|
|
{
|
|
'X-Stainless-Poll-Helper': 'true',
|
|
'X-Stainless-Custom-Poll-Interval': options?.pollIntervalMs?.toString() ?? undefined,
|
|
},
|
|
]);
|
|
while (true) {
|
|
const { data: run, response } = await this.retrieve(runId, params, {
|
|
...options,
|
|
headers: { ...options?.headers, ...headers },
|
|
}).withResponse();
|
|
switch (run.status) {
|
|
//If we are in any sort of intermediate state we poll
|
|
case 'queued':
|
|
case 'in_progress':
|
|
case 'cancelling':
|
|
let sleepInterval = 5000;
|
|
if (options?.pollIntervalMs) {
|
|
sleepInterval = options.pollIntervalMs;
|
|
}
|
|
else {
|
|
const headerInterval = response.headers.get('openai-poll-after-ms');
|
|
if (headerInterval) {
|
|
const headerIntervalMs = parseInt(headerInterval);
|
|
if (!isNaN(headerIntervalMs)) {
|
|
sleepInterval = headerIntervalMs;
|
|
}
|
|
}
|
|
}
|
|
await (0, sleep_1.sleep)(sleepInterval);
|
|
break;
|
|
//We return the run in any terminal state.
|
|
case 'requires_action':
|
|
case 'incomplete':
|
|
case 'cancelled':
|
|
case 'completed':
|
|
case 'failed':
|
|
case 'expired':
|
|
return run;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Create a Run stream
|
|
*/
|
|
stream(threadId, body, options) {
|
|
return AssistantStream_1.AssistantStream.createAssistantStream(threadId, this._client.beta.threads.runs, body, options);
|
|
}
|
|
submitToolOutputs(runID, params, options) {
|
|
const { thread_id, ...body } = params;
|
|
return this._client.post((0, path_1.path) `/threads/${thread_id}/runs/${runID}/submit_tool_outputs`, {
|
|
body,
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
stream: params.stream ?? false,
|
|
});
|
|
}
|
|
/**
|
|
* A helper to submit a tool output to a run and poll for a terminal run state.
|
|
* More information on Run lifecycles can be found here:
|
|
* https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
|
|
*/
|
|
async submitToolOutputsAndPoll(runId, params, options) {
|
|
const run = await this.submitToolOutputs(runId, params, options);
|
|
return await this.poll(run.id, params, options);
|
|
}
|
|
/**
|
|
* Submit the tool outputs from a previous run and stream the run to a terminal
|
|
* state. More information on Run lifecycles can be found here:
|
|
* https://platform.openai.com/docs/assistants/how-it-works/runs-and-run-steps
|
|
*/
|
|
submitToolOutputsStream(runId, params, options) {
|
|
return AssistantStream_1.AssistantStream.createToolAssistantStream(runId, this._client.beta.threads.runs, params, options);
|
|
}
|
|
}
|
|
exports.Runs = Runs;
|
|
Runs.Steps = steps_1.Steps;
|
|
//# sourceMappingURL=runs.js.map
|