ModelAi/node_modules/openai/resources/files.js
2025-09-15 10:04:47 +08:00

87 lines
3.5 KiB
JavaScript

"use strict";
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
Object.defineProperty(exports, "__esModule", { value: true });
exports.Files = void 0;
const resource_1 = require("../core/resource.js");
const pagination_1 = require("../core/pagination.js");
const headers_1 = require("../internal/headers.js");
const sleep_1 = require("../internal/utils/sleep.js");
const error_1 = require("../error.js");
const uploads_1 = require("../internal/uploads.js");
const path_1 = require("../internal/utils/path.js");
class Files extends resource_1.APIResource {
/**
* Upload a file that can be used across various endpoints. Individual files can be
* up to 512 MB, and the size of all files uploaded by one organization can be up
* to 1 TB.
*
* The Assistants API supports files up to 2 million tokens and of specific file
* types. See the
* [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) for
* details.
*
* The Fine-tuning API only supports `.jsonl` files. The input also has certain
* required formats for fine-tuning
* [chat](https://platform.openai.com/docs/api-reference/fine-tuning/chat-input) or
* [completions](https://platform.openai.com/docs/api-reference/fine-tuning/completions-input)
* models.
*
* The Batch API only supports `.jsonl` files up to 200 MB in size. The input also
* has a specific required
* [format](https://platform.openai.com/docs/api-reference/batch/request-input).
*
* Please [contact us](https://help.openai.com/) if you need to increase these
* storage limits.
*/
create(body, options) {
return this._client.post('/files', (0, uploads_1.multipartFormRequestOptions)({ body, ...options }, this._client));
}
/**
* Returns information about a specific file.
*/
retrieve(fileID, options) {
return this._client.get((0, path_1.path) `/files/${fileID}`, options);
}
/**
* Returns a list of files.
*/
list(query = {}, options) {
return this._client.getAPIList('/files', (pagination_1.CursorPage), { query, ...options });
}
/**
* Delete a file.
*/
delete(fileID, options) {
return this._client.delete((0, path_1.path) `/files/${fileID}`, options);
}
/**
* Returns the contents of the specified file.
*/
content(fileID, options) {
return this._client.get((0, path_1.path) `/files/${fileID}/content`, {
...options,
headers: (0, headers_1.buildHeaders)([{ Accept: 'application/binary' }, options?.headers]),
__binaryResponse: true,
});
}
/**
* Waits for the given file to be processed, default timeout is 30 mins.
*/
async waitForProcessing(id, { pollInterval = 5000, maxWait = 30 * 60 * 1000 } = {}) {
const TERMINAL_STATES = new Set(['processed', 'error', 'deleted']);
const start = Date.now();
let file = await this.retrieve(id);
while (!file.status || !TERMINAL_STATES.has(file.status)) {
await (0, sleep_1.sleep)(pollInterval);
file = await this.retrieve(id);
if (Date.now() - start > maxWait) {
throw new error_1.APIConnectionTimeoutError({
message: `Giving up on waiting for file ${id} to finish processing after ${maxWait} milliseconds.`,
});
}
}
return file;
}
}
exports.Files = Files;
//# sourceMappingURL=files.js.map