534 lines
27 KiB
JavaScript
534 lines
27 KiB
JavaScript
"use strict";
|
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
var _OpenAI_instances, _a, _OpenAI_encoder, _OpenAI_baseURLOverridden;
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.OpenAI = void 0;
|
|
const tslib_1 = require("./internal/tslib.js");
|
|
const uuid_1 = require("./internal/utils/uuid.js");
|
|
const values_1 = require("./internal/utils/values.js");
|
|
const sleep_1 = require("./internal/utils/sleep.js");
|
|
const errors_1 = require("./internal/errors.js");
|
|
const detect_platform_1 = require("./internal/detect-platform.js");
|
|
const Shims = tslib_1.__importStar(require("./internal/shims.js"));
|
|
const Opts = tslib_1.__importStar(require("./internal/request-options.js"));
|
|
const qs = tslib_1.__importStar(require("./internal/qs/index.js"));
|
|
const version_1 = require("./version.js");
|
|
const Errors = tslib_1.__importStar(require("./core/error.js"));
|
|
const Pagination = tslib_1.__importStar(require("./core/pagination.js"));
|
|
const Uploads = tslib_1.__importStar(require("./core/uploads.js"));
|
|
const API = tslib_1.__importStar(require("./resources/index.js"));
|
|
const api_promise_1 = require("./core/api-promise.js");
|
|
const batches_1 = require("./resources/batches.js");
|
|
const completions_1 = require("./resources/completions.js");
|
|
const embeddings_1 = require("./resources/embeddings.js");
|
|
const files_1 = require("./resources/files.js");
|
|
const images_1 = require("./resources/images.js");
|
|
const models_1 = require("./resources/models.js");
|
|
const moderations_1 = require("./resources/moderations.js");
|
|
const webhooks_1 = require("./resources/webhooks.js");
|
|
const audio_1 = require("./resources/audio/audio.js");
|
|
const beta_1 = require("./resources/beta/beta.js");
|
|
const chat_1 = require("./resources/chat/chat.js");
|
|
const containers_1 = require("./resources/containers/containers.js");
|
|
const conversations_1 = require("./resources/conversations/conversations.js");
|
|
const evals_1 = require("./resources/evals/evals.js");
|
|
const fine_tuning_1 = require("./resources/fine-tuning/fine-tuning.js");
|
|
const graders_1 = require("./resources/graders/graders.js");
|
|
const responses_1 = require("./resources/responses/responses.js");
|
|
const uploads_1 = require("./resources/uploads/uploads.js");
|
|
const vector_stores_1 = require("./resources/vector-stores/vector-stores.js");
|
|
const detect_platform_2 = require("./internal/detect-platform.js");
|
|
const headers_1 = require("./internal/headers.js");
|
|
const env_1 = require("./internal/utils/env.js");
|
|
const log_1 = require("./internal/utils/log.js");
|
|
const values_2 = require("./internal/utils/values.js");
|
|
/**
|
|
* API Client for interfacing with the OpenAI API.
|
|
*/
|
|
class OpenAI {
|
|
/**
|
|
* API Client for interfacing with the OpenAI API.
|
|
*
|
|
* @param {string | undefined} [opts.apiKey=process.env['OPENAI_API_KEY'] ?? undefined]
|
|
* @param {string | null | undefined} [opts.organization=process.env['OPENAI_ORG_ID'] ?? null]
|
|
* @param {string | null | undefined} [opts.project=process.env['OPENAI_PROJECT_ID'] ?? null]
|
|
* @param {string | null | undefined} [opts.webhookSecret=process.env['OPENAI_WEBHOOK_SECRET'] ?? null]
|
|
* @param {string} [opts.baseURL=process.env['OPENAI_BASE_URL'] ?? https://api.openai.com/v1] - Override the default base URL for the API.
|
|
* @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.
|
|
* @param {MergedRequestInit} [opts.fetchOptions] - Additional `RequestInit` options to be passed to `fetch` calls.
|
|
* @param {Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.
|
|
* @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.
|
|
* @param {HeadersLike} opts.defaultHeaders - Default headers to include with every request to the API.
|
|
* @param {Record<string, string | undefined>} opts.defaultQuery - Default query parameters to include with every request to the API.
|
|
* @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.
|
|
*/
|
|
constructor({ baseURL = (0, env_1.readEnv)('OPENAI_BASE_URL'), apiKey = (0, env_1.readEnv)('OPENAI_API_KEY'), organization = (0, env_1.readEnv)('OPENAI_ORG_ID') ?? null, project = (0, env_1.readEnv)('OPENAI_PROJECT_ID') ?? null, webhookSecret = (0, env_1.readEnv)('OPENAI_WEBHOOK_SECRET') ?? null, ...opts } = {}) {
|
|
_OpenAI_instances.add(this);
|
|
_OpenAI_encoder.set(this, void 0);
|
|
this.completions = new API.Completions(this);
|
|
this.chat = new API.Chat(this);
|
|
this.embeddings = new API.Embeddings(this);
|
|
this.files = new API.Files(this);
|
|
this.images = new API.Images(this);
|
|
this.audio = new API.Audio(this);
|
|
this.moderations = new API.Moderations(this);
|
|
this.models = new API.Models(this);
|
|
this.fineTuning = new API.FineTuning(this);
|
|
this.graders = new API.Graders(this);
|
|
this.vectorStores = new API.VectorStores(this);
|
|
this.webhooks = new API.Webhooks(this);
|
|
this.beta = new API.Beta(this);
|
|
this.batches = new API.Batches(this);
|
|
this.uploads = new API.Uploads(this);
|
|
this.responses = new API.Responses(this);
|
|
this.conversations = new API.Conversations(this);
|
|
this.evals = new API.Evals(this);
|
|
this.containers = new API.Containers(this);
|
|
if (apiKey === undefined) {
|
|
throw new Errors.OpenAIError("The OPENAI_API_KEY environment variable is missing or empty; either provide it, or instantiate the OpenAI client with an apiKey option, like new OpenAI({ apiKey: 'My API Key' }).");
|
|
}
|
|
const options = {
|
|
apiKey,
|
|
organization,
|
|
project,
|
|
webhookSecret,
|
|
...opts,
|
|
baseURL: baseURL || `https://api.openai.com/v1`,
|
|
};
|
|
if (!options.dangerouslyAllowBrowser && (0, detect_platform_2.isRunningInBrowser)()) {
|
|
throw new Errors.OpenAIError("It looks like you're running in a browser-like environment.\n\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\nIf you understand the risks and have appropriate mitigations in place,\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\n\nnew OpenAI({ apiKey, dangerouslyAllowBrowser: true });\n\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\n");
|
|
}
|
|
this.baseURL = options.baseURL;
|
|
this.timeout = options.timeout ?? _a.DEFAULT_TIMEOUT /* 10 minutes */;
|
|
this.logger = options.logger ?? console;
|
|
const defaultLogLevel = 'warn';
|
|
// Set default logLevel early so that we can log a warning in parseLogLevel.
|
|
this.logLevel = defaultLogLevel;
|
|
this.logLevel =
|
|
(0, log_1.parseLogLevel)(options.logLevel, 'ClientOptions.logLevel', this) ??
|
|
(0, log_1.parseLogLevel)((0, env_1.readEnv)('OPENAI_LOG'), "process.env['OPENAI_LOG']", this) ??
|
|
defaultLogLevel;
|
|
this.fetchOptions = options.fetchOptions;
|
|
this.maxRetries = options.maxRetries ?? 2;
|
|
this.fetch = options.fetch ?? Shims.getDefaultFetch();
|
|
tslib_1.__classPrivateFieldSet(this, _OpenAI_encoder, Opts.FallbackEncoder, "f");
|
|
this._options = options;
|
|
this.apiKey = apiKey;
|
|
this.organization = organization;
|
|
this.project = project;
|
|
this.webhookSecret = webhookSecret;
|
|
}
|
|
/**
|
|
* Create a new client instance re-using the same options given to the current client with optional overriding.
|
|
*/
|
|
withOptions(options) {
|
|
const client = new this.constructor({
|
|
...this._options,
|
|
baseURL: this.baseURL,
|
|
maxRetries: this.maxRetries,
|
|
timeout: this.timeout,
|
|
logger: this.logger,
|
|
logLevel: this.logLevel,
|
|
fetch: this.fetch,
|
|
fetchOptions: this.fetchOptions,
|
|
apiKey: this.apiKey,
|
|
organization: this.organization,
|
|
project: this.project,
|
|
webhookSecret: this.webhookSecret,
|
|
...options,
|
|
});
|
|
return client;
|
|
}
|
|
defaultQuery() {
|
|
return this._options.defaultQuery;
|
|
}
|
|
validateHeaders({ values, nulls }) {
|
|
return;
|
|
}
|
|
async authHeaders(opts) {
|
|
return (0, headers_1.buildHeaders)([{ Authorization: `Bearer ${this.apiKey}` }]);
|
|
}
|
|
stringifyQuery(query) {
|
|
return qs.stringify(query, { arrayFormat: 'brackets' });
|
|
}
|
|
getUserAgent() {
|
|
return `${this.constructor.name}/JS ${version_1.VERSION}`;
|
|
}
|
|
defaultIdempotencyKey() {
|
|
return `stainless-node-retry-${(0, uuid_1.uuid4)()}`;
|
|
}
|
|
makeStatusError(status, error, message, headers) {
|
|
return Errors.APIError.generate(status, error, message, headers);
|
|
}
|
|
buildURL(path, query, defaultBaseURL) {
|
|
const baseURL = (!tslib_1.__classPrivateFieldGet(this, _OpenAI_instances, "m", _OpenAI_baseURLOverridden).call(this) && defaultBaseURL) || this.baseURL;
|
|
const url = (0, values_1.isAbsoluteURL)(path) ?
|
|
new URL(path)
|
|
: new URL(baseURL + (baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path));
|
|
const defaultQuery = this.defaultQuery();
|
|
if (!(0, values_2.isEmptyObj)(defaultQuery)) {
|
|
query = { ...defaultQuery, ...query };
|
|
}
|
|
if (typeof query === 'object' && query && !Array.isArray(query)) {
|
|
url.search = this.stringifyQuery(query);
|
|
}
|
|
return url.toString();
|
|
}
|
|
/**
|
|
* Used as a callback for mutating the given `FinalRequestOptions` object.
|
|
*/
|
|
async prepareOptions(options) { }
|
|
/**
|
|
* Used as a callback for mutating the given `RequestInit` object.
|
|
*
|
|
* This is useful for cases where you want to add certain headers based off of
|
|
* the request properties, e.g. `method` or `url`.
|
|
*/
|
|
async prepareRequest(request, { url, options }) { }
|
|
get(path, opts) {
|
|
return this.methodRequest('get', path, opts);
|
|
}
|
|
post(path, opts) {
|
|
return this.methodRequest('post', path, opts);
|
|
}
|
|
patch(path, opts) {
|
|
return this.methodRequest('patch', path, opts);
|
|
}
|
|
put(path, opts) {
|
|
return this.methodRequest('put', path, opts);
|
|
}
|
|
delete(path, opts) {
|
|
return this.methodRequest('delete', path, opts);
|
|
}
|
|
methodRequest(method, path, opts) {
|
|
return this.request(Promise.resolve(opts).then((opts) => {
|
|
return { method, path, ...opts };
|
|
}));
|
|
}
|
|
request(options, remainingRetries = null) {
|
|
return new api_promise_1.APIPromise(this, this.makeRequest(options, remainingRetries, undefined));
|
|
}
|
|
async makeRequest(optionsInput, retriesRemaining, retryOfRequestLogID) {
|
|
const options = await optionsInput;
|
|
const maxRetries = options.maxRetries ?? this.maxRetries;
|
|
if (retriesRemaining == null) {
|
|
retriesRemaining = maxRetries;
|
|
}
|
|
await this.prepareOptions(options);
|
|
const { req, url, timeout } = await this.buildRequest(options, {
|
|
retryCount: maxRetries - retriesRemaining,
|
|
});
|
|
await this.prepareRequest(req, { url, options });
|
|
/** Not an API request ID, just for correlating local log entries. */
|
|
const requestLogID = 'log_' + ((Math.random() * (1 << 24)) | 0).toString(16).padStart(6, '0');
|
|
const retryLogStr = retryOfRequestLogID === undefined ? '' : `, retryOf: ${retryOfRequestLogID}`;
|
|
const startTime = Date.now();
|
|
(0, log_1.loggerFor)(this).debug(`[${requestLogID}] sending request`, (0, log_1.formatRequestDetails)({
|
|
retryOfRequestLogID,
|
|
method: options.method,
|
|
url,
|
|
options,
|
|
headers: req.headers,
|
|
}));
|
|
if (options.signal?.aborted) {
|
|
throw new Errors.APIUserAbortError();
|
|
}
|
|
const controller = new AbortController();
|
|
const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(errors_1.castToError);
|
|
const headersTime = Date.now();
|
|
if (response instanceof Error) {
|
|
const retryMessage = `retrying, ${retriesRemaining} attempts remaining`;
|
|
if (options.signal?.aborted) {
|
|
throw new Errors.APIUserAbortError();
|
|
}
|
|
// detect native connection timeout errors
|
|
// deno throws "TypeError: error sending request for url (https://example/): client error (Connect): tcp connect error: Operation timed out (os error 60): Operation timed out (os error 60)"
|
|
// undici throws "TypeError: fetch failed" with cause "ConnectTimeoutError: Connect Timeout Error (attempted address: example:443, timeout: 1ms)"
|
|
// others do not provide enough information to distinguish timeouts from other connection errors
|
|
const isTimeout = (0, errors_1.isAbortError)(response) ||
|
|
/timed? ?out/i.test(String(response) + ('cause' in response ? String(response.cause) : ''));
|
|
if (retriesRemaining) {
|
|
(0, log_1.loggerFor)(this).info(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} - ${retryMessage}`);
|
|
(0, log_1.loggerFor)(this).debug(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} (${retryMessage})`, (0, log_1.formatRequestDetails)({
|
|
retryOfRequestLogID,
|
|
url,
|
|
durationMs: headersTime - startTime,
|
|
message: response.message,
|
|
}));
|
|
return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID);
|
|
}
|
|
(0, log_1.loggerFor)(this).info(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} - error; no more retries left`);
|
|
(0, log_1.loggerFor)(this).debug(`[${requestLogID}] connection ${isTimeout ? 'timed out' : 'failed'} (error; no more retries left)`, (0, log_1.formatRequestDetails)({
|
|
retryOfRequestLogID,
|
|
url,
|
|
durationMs: headersTime - startTime,
|
|
message: response.message,
|
|
}));
|
|
if (isTimeout) {
|
|
throw new Errors.APIConnectionTimeoutError();
|
|
}
|
|
throw new Errors.APIConnectionError({ cause: response });
|
|
}
|
|
const specialHeaders = [...response.headers.entries()]
|
|
.filter(([name]) => name === 'x-request-id')
|
|
.map(([name, value]) => ', ' + name + ': ' + JSON.stringify(value))
|
|
.join('');
|
|
const responseInfo = `[${requestLogID}${retryLogStr}${specialHeaders}] ${req.method} ${url} ${response.ok ? 'succeeded' : 'failed'} with status ${response.status} in ${headersTime - startTime}ms`;
|
|
if (!response.ok) {
|
|
const shouldRetry = await this.shouldRetry(response);
|
|
if (retriesRemaining && shouldRetry) {
|
|
const retryMessage = `retrying, ${retriesRemaining} attempts remaining`;
|
|
// We don't need the body of this response.
|
|
await Shims.CancelReadableStream(response.body);
|
|
(0, log_1.loggerFor)(this).info(`${responseInfo} - ${retryMessage}`);
|
|
(0, log_1.loggerFor)(this).debug(`[${requestLogID}] response error (${retryMessage})`, (0, log_1.formatRequestDetails)({
|
|
retryOfRequestLogID,
|
|
url: response.url,
|
|
status: response.status,
|
|
headers: response.headers,
|
|
durationMs: headersTime - startTime,
|
|
}));
|
|
return this.retryRequest(options, retriesRemaining, retryOfRequestLogID ?? requestLogID, response.headers);
|
|
}
|
|
const retryMessage = shouldRetry ? `error; no more retries left` : `error; not retryable`;
|
|
(0, log_1.loggerFor)(this).info(`${responseInfo} - ${retryMessage}`);
|
|
const errText = await response.text().catch((err) => (0, errors_1.castToError)(err).message);
|
|
const errJSON = (0, values_1.safeJSON)(errText);
|
|
const errMessage = errJSON ? undefined : errText;
|
|
(0, log_1.loggerFor)(this).debug(`[${requestLogID}] response error (${retryMessage})`, (0, log_1.formatRequestDetails)({
|
|
retryOfRequestLogID,
|
|
url: response.url,
|
|
status: response.status,
|
|
headers: response.headers,
|
|
message: errMessage,
|
|
durationMs: Date.now() - startTime,
|
|
}));
|
|
const err = this.makeStatusError(response.status, errJSON, errMessage, response.headers);
|
|
throw err;
|
|
}
|
|
(0, log_1.loggerFor)(this).info(responseInfo);
|
|
(0, log_1.loggerFor)(this).debug(`[${requestLogID}] response start`, (0, log_1.formatRequestDetails)({
|
|
retryOfRequestLogID,
|
|
url: response.url,
|
|
status: response.status,
|
|
headers: response.headers,
|
|
durationMs: headersTime - startTime,
|
|
}));
|
|
return { response, options, controller, requestLogID, retryOfRequestLogID, startTime };
|
|
}
|
|
getAPIList(path, Page, opts) {
|
|
return this.requestAPIList(Page, { method: 'get', path, ...opts });
|
|
}
|
|
requestAPIList(Page, options) {
|
|
const request = this.makeRequest(options, null, undefined);
|
|
return new Pagination.PagePromise(this, request, Page);
|
|
}
|
|
async fetchWithTimeout(url, init, ms, controller) {
|
|
const { signal, method, ...options } = init || {};
|
|
if (signal)
|
|
signal.addEventListener('abort', () => controller.abort());
|
|
const timeout = setTimeout(() => controller.abort(), ms);
|
|
const isReadableBody = (globalThis.ReadableStream && options.body instanceof globalThis.ReadableStream) ||
|
|
(typeof options.body === 'object' && options.body !== null && Symbol.asyncIterator in options.body);
|
|
const fetchOptions = {
|
|
signal: controller.signal,
|
|
...(isReadableBody ? { duplex: 'half' } : {}),
|
|
method: 'GET',
|
|
...options,
|
|
};
|
|
if (method) {
|
|
// Custom methods like 'patch' need to be uppercased
|
|
// See https://github.com/nodejs/undici/issues/2294
|
|
fetchOptions.method = method.toUpperCase();
|
|
}
|
|
try {
|
|
// use undefined this binding; fetch errors if bound to something else in browser/cloudflare
|
|
return await this.fetch.call(undefined, url, fetchOptions);
|
|
}
|
|
finally {
|
|
clearTimeout(timeout);
|
|
}
|
|
}
|
|
async shouldRetry(response) {
|
|
// Note this is not a standard header.
|
|
const shouldRetryHeader = response.headers.get('x-should-retry');
|
|
// If the server explicitly says whether or not to retry, obey.
|
|
if (shouldRetryHeader === 'true')
|
|
return true;
|
|
if (shouldRetryHeader === 'false')
|
|
return false;
|
|
// Retry on request timeouts.
|
|
if (response.status === 408)
|
|
return true;
|
|
// Retry on lock timeouts.
|
|
if (response.status === 409)
|
|
return true;
|
|
// Retry on rate limits.
|
|
if (response.status === 429)
|
|
return true;
|
|
// Retry internal errors.
|
|
if (response.status >= 500)
|
|
return true;
|
|
return false;
|
|
}
|
|
async retryRequest(options, retriesRemaining, requestLogID, responseHeaders) {
|
|
let timeoutMillis;
|
|
// Note the `retry-after-ms` header may not be standard, but is a good idea and we'd like proactive support for it.
|
|
const retryAfterMillisHeader = responseHeaders?.get('retry-after-ms');
|
|
if (retryAfterMillisHeader) {
|
|
const timeoutMs = parseFloat(retryAfterMillisHeader);
|
|
if (!Number.isNaN(timeoutMs)) {
|
|
timeoutMillis = timeoutMs;
|
|
}
|
|
}
|
|
// About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After
|
|
const retryAfterHeader = responseHeaders?.get('retry-after');
|
|
if (retryAfterHeader && !timeoutMillis) {
|
|
const timeoutSeconds = parseFloat(retryAfterHeader);
|
|
if (!Number.isNaN(timeoutSeconds)) {
|
|
timeoutMillis = timeoutSeconds * 1000;
|
|
}
|
|
else {
|
|
timeoutMillis = Date.parse(retryAfterHeader) - Date.now();
|
|
}
|
|
}
|
|
// If the API asks us to wait a certain amount of time (and it's a reasonable amount),
|
|
// just do what it says, but otherwise calculate a default
|
|
if (!(timeoutMillis && 0 <= timeoutMillis && timeoutMillis < 60 * 1000)) {
|
|
const maxRetries = options.maxRetries ?? this.maxRetries;
|
|
timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries);
|
|
}
|
|
await (0, sleep_1.sleep)(timeoutMillis);
|
|
return this.makeRequest(options, retriesRemaining - 1, requestLogID);
|
|
}
|
|
calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) {
|
|
const initialRetryDelay = 0.5;
|
|
const maxRetryDelay = 8.0;
|
|
const numRetries = maxRetries - retriesRemaining;
|
|
// Apply exponential backoff, but not more than the max.
|
|
const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay);
|
|
// Apply some jitter, take up to at most 25 percent of the retry time.
|
|
const jitter = 1 - Math.random() * 0.25;
|
|
return sleepSeconds * jitter * 1000;
|
|
}
|
|
async buildRequest(inputOptions, { retryCount = 0 } = {}) {
|
|
const options = { ...inputOptions };
|
|
const { method, path, query, defaultBaseURL } = options;
|
|
const url = this.buildURL(path, query, defaultBaseURL);
|
|
if ('timeout' in options)
|
|
(0, values_1.validatePositiveInteger)('timeout', options.timeout);
|
|
options.timeout = options.timeout ?? this.timeout;
|
|
const { bodyHeaders, body } = this.buildBody({ options });
|
|
const reqHeaders = await this.buildHeaders({ options: inputOptions, method, bodyHeaders, retryCount });
|
|
const req = {
|
|
method,
|
|
headers: reqHeaders,
|
|
...(options.signal && { signal: options.signal }),
|
|
...(globalThis.ReadableStream &&
|
|
body instanceof globalThis.ReadableStream && { duplex: 'half' }),
|
|
...(body && { body }),
|
|
...(this.fetchOptions ?? {}),
|
|
...(options.fetchOptions ?? {}),
|
|
};
|
|
return { req, url, timeout: options.timeout };
|
|
}
|
|
async buildHeaders({ options, method, bodyHeaders, retryCount, }) {
|
|
let idempotencyHeaders = {};
|
|
if (this.idempotencyHeader && method !== 'get') {
|
|
if (!options.idempotencyKey)
|
|
options.idempotencyKey = this.defaultIdempotencyKey();
|
|
idempotencyHeaders[this.idempotencyHeader] = options.idempotencyKey;
|
|
}
|
|
const headers = (0, headers_1.buildHeaders)([
|
|
idempotencyHeaders,
|
|
{
|
|
Accept: 'application/json',
|
|
'User-Agent': this.getUserAgent(),
|
|
'X-Stainless-Retry-Count': String(retryCount),
|
|
...(options.timeout ? { 'X-Stainless-Timeout': String(Math.trunc(options.timeout / 1000)) } : {}),
|
|
...(0, detect_platform_1.getPlatformHeaders)(),
|
|
'OpenAI-Organization': this.organization,
|
|
'OpenAI-Project': this.project,
|
|
},
|
|
await this.authHeaders(options),
|
|
this._options.defaultHeaders,
|
|
bodyHeaders,
|
|
options.headers,
|
|
]);
|
|
this.validateHeaders(headers);
|
|
return headers.values;
|
|
}
|
|
buildBody({ options: { body, headers: rawHeaders } }) {
|
|
if (!body) {
|
|
return { bodyHeaders: undefined, body: undefined };
|
|
}
|
|
const headers = (0, headers_1.buildHeaders)([rawHeaders]);
|
|
if (
|
|
// Pass raw type verbatim
|
|
ArrayBuffer.isView(body) ||
|
|
body instanceof ArrayBuffer ||
|
|
body instanceof DataView ||
|
|
(typeof body === 'string' &&
|
|
// Preserve legacy string encoding behavior for now
|
|
headers.values.has('content-type')) ||
|
|
// `Blob` is superset of `File`
|
|
(globalThis.Blob && body instanceof globalThis.Blob) ||
|
|
// `FormData` -> `multipart/form-data`
|
|
body instanceof FormData ||
|
|
// `URLSearchParams` -> `application/x-www-form-urlencoded`
|
|
body instanceof URLSearchParams ||
|
|
// Send chunked stream (each chunk has own `length`)
|
|
(globalThis.ReadableStream && body instanceof globalThis.ReadableStream)) {
|
|
return { bodyHeaders: undefined, body: body };
|
|
}
|
|
else if (typeof body === 'object' &&
|
|
(Symbol.asyncIterator in body ||
|
|
(Symbol.iterator in body && 'next' in body && typeof body.next === 'function'))) {
|
|
return { bodyHeaders: undefined, body: Shims.ReadableStreamFrom(body) };
|
|
}
|
|
else {
|
|
return tslib_1.__classPrivateFieldGet(this, _OpenAI_encoder, "f").call(this, { body, headers });
|
|
}
|
|
}
|
|
}
|
|
exports.OpenAI = OpenAI;
|
|
_a = OpenAI, _OpenAI_encoder = new WeakMap(), _OpenAI_instances = new WeakSet(), _OpenAI_baseURLOverridden = function _OpenAI_baseURLOverridden() {
|
|
return this.baseURL !== 'https://api.openai.com/v1';
|
|
};
|
|
OpenAI.OpenAI = _a;
|
|
OpenAI.DEFAULT_TIMEOUT = 600000; // 10 minutes
|
|
OpenAI.OpenAIError = Errors.OpenAIError;
|
|
OpenAI.APIError = Errors.APIError;
|
|
OpenAI.APIConnectionError = Errors.APIConnectionError;
|
|
OpenAI.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError;
|
|
OpenAI.APIUserAbortError = Errors.APIUserAbortError;
|
|
OpenAI.NotFoundError = Errors.NotFoundError;
|
|
OpenAI.ConflictError = Errors.ConflictError;
|
|
OpenAI.RateLimitError = Errors.RateLimitError;
|
|
OpenAI.BadRequestError = Errors.BadRequestError;
|
|
OpenAI.AuthenticationError = Errors.AuthenticationError;
|
|
OpenAI.InternalServerError = Errors.InternalServerError;
|
|
OpenAI.PermissionDeniedError = Errors.PermissionDeniedError;
|
|
OpenAI.UnprocessableEntityError = Errors.UnprocessableEntityError;
|
|
OpenAI.InvalidWebhookSignatureError = Errors.InvalidWebhookSignatureError;
|
|
OpenAI.toFile = Uploads.toFile;
|
|
OpenAI.Completions = completions_1.Completions;
|
|
OpenAI.Chat = chat_1.Chat;
|
|
OpenAI.Embeddings = embeddings_1.Embeddings;
|
|
OpenAI.Files = files_1.Files;
|
|
OpenAI.Images = images_1.Images;
|
|
OpenAI.Audio = audio_1.Audio;
|
|
OpenAI.Moderations = moderations_1.Moderations;
|
|
OpenAI.Models = models_1.Models;
|
|
OpenAI.FineTuning = fine_tuning_1.FineTuning;
|
|
OpenAI.Graders = graders_1.Graders;
|
|
OpenAI.VectorStores = vector_stores_1.VectorStores;
|
|
OpenAI.Webhooks = webhooks_1.Webhooks;
|
|
OpenAI.Beta = beta_1.Beta;
|
|
OpenAI.Batches = batches_1.Batches;
|
|
OpenAI.Uploads = uploads_1.Uploads;
|
|
OpenAI.Responses = responses_1.Responses;
|
|
OpenAI.Conversations = conversations_1.Conversations;
|
|
OpenAI.Evals = evals_1.Evals;
|
|
OpenAI.Containers = containers_1.Containers;
|
|
//# sourceMappingURL=client.js.map
|