142 lines
6.0 KiB
JavaScript
142 lines
6.0 KiB
JavaScript
"use strict";
|
|
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
exports.Files = void 0;
|
|
const resource_1 = require("../../core/resource.js");
|
|
const pagination_1 = require("../../core/pagination.js");
|
|
const headers_1 = require("../../internal/headers.js");
|
|
const utils_1 = require("../../internal/utils.js");
|
|
const path_1 = require("../../internal/utils/path.js");
|
|
class Files extends resource_1.APIResource {
|
|
/**
|
|
* Create a vector store file by attaching a
|
|
* [File](https://platform.openai.com/docs/api-reference/files) to a
|
|
* [vector store](https://platform.openai.com/docs/api-reference/vector-stores/object).
|
|
*/
|
|
create(vectorStoreID, body, options) {
|
|
return this._client.post((0, path_1.path) `/vector_stores/${vectorStoreID}/files`, {
|
|
body,
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* Retrieves a vector store file.
|
|
*/
|
|
retrieve(fileID, params, options) {
|
|
const { vector_store_id } = params;
|
|
return this._client.get((0, path_1.path) `/vector_stores/${vector_store_id}/files/${fileID}`, {
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* Update attributes on a vector store file.
|
|
*/
|
|
update(fileID, params, options) {
|
|
const { vector_store_id, ...body } = params;
|
|
return this._client.post((0, path_1.path) `/vector_stores/${vector_store_id}/files/${fileID}`, {
|
|
body,
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* Returns a list of vector store files.
|
|
*/
|
|
list(vectorStoreID, query = {}, options) {
|
|
return this._client.getAPIList((0, path_1.path) `/vector_stores/${vectorStoreID}/files`, (pagination_1.CursorPage), {
|
|
query,
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* Delete a vector store file. This will remove the file from the vector store but
|
|
* the file itself will not be deleted. To delete the file, use the
|
|
* [delete file](https://platform.openai.com/docs/api-reference/files/delete)
|
|
* endpoint.
|
|
*/
|
|
delete(fileID, params, options) {
|
|
const { vector_store_id } = params;
|
|
return this._client.delete((0, path_1.path) `/vector_stores/${vector_store_id}/files/${fileID}`, {
|
|
...options,
|
|
headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]),
|
|
});
|
|
}
|
|
/**
|
|
* Attach a file to the given vector store and wait for it to be processed.
|
|
*/
|
|
async createAndPoll(vectorStoreId, body, options) {
|
|
const file = await this.create(vectorStoreId, body, options);
|
|
return await this.poll(vectorStoreId, file.id, options);
|
|
}
|
|
/**
|
|
* Wait for the vector store file to finish processing.
|
|
*
|
|
* Note: this will return even if the file failed to process, you need to check
|
|
* file.last_error and file.status to handle these cases
|
|
*/
|
|
async poll(vectorStoreID, fileID, options) {
|
|
const headers = (0, headers_1.buildHeaders)([
|
|
options?.headers,
|
|
{
|
|
'X-Stainless-Poll-Helper': 'true',
|
|
'X-Stainless-Custom-Poll-Interval': options?.pollIntervalMs?.toString() ?? undefined,
|
|
},
|
|
]);
|
|
while (true) {
|
|
const fileResponse = await this.retrieve(fileID, {
|
|
vector_store_id: vectorStoreID,
|
|
}, { ...options, headers }).withResponse();
|
|
const file = fileResponse.data;
|
|
switch (file.status) {
|
|
case 'in_progress':
|
|
let sleepInterval = 5000;
|
|
if (options?.pollIntervalMs) {
|
|
sleepInterval = options.pollIntervalMs;
|
|
}
|
|
else {
|
|
const headerInterval = fileResponse.response.headers.get('openai-poll-after-ms');
|
|
if (headerInterval) {
|
|
const headerIntervalMs = parseInt(headerInterval);
|
|
if (!isNaN(headerIntervalMs)) {
|
|
sleepInterval = headerIntervalMs;
|
|
}
|
|
}
|
|
}
|
|
await (0, utils_1.sleep)(sleepInterval);
|
|
break;
|
|
case 'failed':
|
|
case 'completed':
|
|
return file;
|
|
}
|
|
}
|
|
}
|
|
/**
|
|
* Upload a file to the `files` API and then attach it to the given vector store.
|
|
*
|
|
* Note the file will be asynchronously processed (you can use the alternative
|
|
* polling helper method to wait for processing to complete).
|
|
*/
|
|
async upload(vectorStoreId, file, options) {
|
|
const fileInfo = await this._client.files.create({ file: file, purpose: 'assistants' }, options);
|
|
return this.create(vectorStoreId, { file_id: fileInfo.id }, options);
|
|
}
|
|
/**
|
|
* Add a file to a vector store and poll until processing is complete.
|
|
*/
|
|
async uploadAndPoll(vectorStoreId, file, options) {
|
|
const fileInfo = await this.upload(vectorStoreId, file, options);
|
|
return await this.poll(vectorStoreId, fileInfo.id, options);
|
|
}
|
|
/**
|
|
* Retrieve the parsed contents of a vector store file.
|
|
*/
|
|
content(fileID, params, options) {
|
|
const { vector_store_id } = params;
|
|
return this._client.getAPIList((0, path_1.path) `/vector_stores/${vector_store_id}/files/${fileID}/content`, (pagination_1.Page), { ...options, headers: (0, headers_1.buildHeaders)([{ 'OpenAI-Beta': 'assistants=v2' }, options?.headers]) });
|
|
}
|
|
}
|
|
exports.Files = Files;
|
|
//# sourceMappingURL=files.js.map
|