428 lines
14 KiB
JavaScript
428 lines
14 KiB
JavaScript
|
'use strict';
|
||
|
|
||
|
require('whatwg-fetch');
|
||
|
|
||
|
const version = "0.5.9";
|
||
|
|
||
|
var __defProp$1 = Object.defineProperty;
|
||
|
var __defNormalProp$1 = (obj, key, value) => key in obj ? __defProp$1(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
||
|
var __publicField$1 = (obj, key, value) => {
|
||
|
__defNormalProp$1(obj, typeof key !== "symbol" ? key + "" : key, value);
|
||
|
return value;
|
||
|
};
|
||
|
class ResponseError extends Error {
|
||
|
constructor(error, status_code) {
|
||
|
super(error);
|
||
|
this.error = error;
|
||
|
this.status_code = status_code;
|
||
|
this.name = "ResponseError";
|
||
|
if (Error.captureStackTrace) {
|
||
|
Error.captureStackTrace(this, ResponseError);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
class AbortableAsyncIterator {
|
||
|
constructor(abortController, itr, doneCallback) {
|
||
|
__publicField$1(this, "abortController");
|
||
|
__publicField$1(this, "itr");
|
||
|
__publicField$1(this, "doneCallback");
|
||
|
this.abortController = abortController;
|
||
|
this.itr = itr;
|
||
|
this.doneCallback = doneCallback;
|
||
|
}
|
||
|
abort() {
|
||
|
this.abortController.abort();
|
||
|
}
|
||
|
async *[Symbol.asyncIterator]() {
|
||
|
for await (const message of this.itr) {
|
||
|
if ("error" in message) {
|
||
|
throw new Error(message.error);
|
||
|
}
|
||
|
yield message;
|
||
|
if (message.done || message.status === "success") {
|
||
|
this.doneCallback();
|
||
|
return;
|
||
|
}
|
||
|
}
|
||
|
throw new Error("Did not receive done or success response in stream.");
|
||
|
}
|
||
|
}
|
||
|
const checkOk = async (response) => {
|
||
|
if (response.ok) {
|
||
|
return;
|
||
|
}
|
||
|
let message = `Error ${response.status}: ${response.statusText}`;
|
||
|
let errorData = null;
|
||
|
if (response.headers.get("content-type")?.includes("application/json")) {
|
||
|
try {
|
||
|
errorData = await response.json();
|
||
|
message = errorData.error || message;
|
||
|
} catch (error) {
|
||
|
console.log("Failed to parse error response as JSON");
|
||
|
}
|
||
|
} else {
|
||
|
try {
|
||
|
console.log("Getting text from response");
|
||
|
const textResponse = await response.text();
|
||
|
message = textResponse || message;
|
||
|
} catch (error) {
|
||
|
console.log("Failed to get text from error response");
|
||
|
}
|
||
|
}
|
||
|
throw new ResponseError(message, response.status);
|
||
|
};
|
||
|
function getPlatform() {
|
||
|
if (typeof window !== "undefined" && window.navigator) {
|
||
|
return `${window.navigator.platform.toLowerCase()} Browser/${navigator.userAgent};`;
|
||
|
} else if (typeof process !== "undefined") {
|
||
|
return `${process.arch} ${process.platform} Node.js/${process.version}`;
|
||
|
}
|
||
|
return "";
|
||
|
}
|
||
|
const fetchWithHeaders = async (fetch, url, options = {}) => {
|
||
|
const defaultHeaders = {
|
||
|
"Content-Type": "application/json",
|
||
|
Accept: "application/json",
|
||
|
"User-Agent": `ollama-js/${version} (${getPlatform()})`
|
||
|
};
|
||
|
if (!options.headers) {
|
||
|
options.headers = {};
|
||
|
}
|
||
|
options.headers = {
|
||
|
...defaultHeaders,
|
||
|
...options.headers
|
||
|
};
|
||
|
return fetch(url, options);
|
||
|
};
|
||
|
const get = async (fetch, host) => {
|
||
|
const response = await fetchWithHeaders(fetch, host);
|
||
|
await checkOk(response);
|
||
|
return response;
|
||
|
};
|
||
|
const head = async (fetch, host) => {
|
||
|
const response = await fetchWithHeaders(fetch, host, {
|
||
|
method: "HEAD"
|
||
|
});
|
||
|
await checkOk(response);
|
||
|
return response;
|
||
|
};
|
||
|
const post = async (fetch, host, data, options) => {
|
||
|
const isRecord = (input) => {
|
||
|
return input !== null && typeof input === "object" && !Array.isArray(input);
|
||
|
};
|
||
|
const formattedData = isRecord(data) ? JSON.stringify(data) : data;
|
||
|
const response = await fetchWithHeaders(fetch, host, {
|
||
|
method: "POST",
|
||
|
body: formattedData,
|
||
|
signal: options?.signal,
|
||
|
headers: options?.headers
|
||
|
});
|
||
|
await checkOk(response);
|
||
|
return response;
|
||
|
};
|
||
|
const del = async (fetch, host, data) => {
|
||
|
const response = await fetchWithHeaders(fetch, host, {
|
||
|
method: "DELETE",
|
||
|
body: JSON.stringify(data)
|
||
|
});
|
||
|
await checkOk(response);
|
||
|
return response;
|
||
|
};
|
||
|
const parseJSON = async function* (itr) {
|
||
|
const decoder = new TextDecoder("utf-8");
|
||
|
let buffer = "";
|
||
|
const reader = itr.getReader();
|
||
|
while (true) {
|
||
|
const { done, value: chunk } = await reader.read();
|
||
|
if (done) {
|
||
|
break;
|
||
|
}
|
||
|
buffer += decoder.decode(chunk);
|
||
|
const parts = buffer.split("\n");
|
||
|
buffer = parts.pop() ?? "";
|
||
|
for (const part of parts) {
|
||
|
try {
|
||
|
yield JSON.parse(part);
|
||
|
} catch (error) {
|
||
|
console.warn("invalid json: ", part);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
for (const part of buffer.split("\n").filter((p) => p !== "")) {
|
||
|
try {
|
||
|
yield JSON.parse(part);
|
||
|
} catch (error) {
|
||
|
console.warn("invalid json: ", part);
|
||
|
}
|
||
|
}
|
||
|
};
|
||
|
const formatHost = (host) => {
|
||
|
if (!host) {
|
||
|
return "http://127.0.0.1:11434";
|
||
|
}
|
||
|
let isExplicitProtocol = host.includes("://");
|
||
|
if (host.startsWith(":")) {
|
||
|
host = `http://127.0.0.1${host}`;
|
||
|
isExplicitProtocol = true;
|
||
|
}
|
||
|
if (!isExplicitProtocol) {
|
||
|
host = `http://${host}`;
|
||
|
}
|
||
|
const url = new URL(host);
|
||
|
let port = url.port;
|
||
|
if (!port) {
|
||
|
if (!isExplicitProtocol) {
|
||
|
port = "11434";
|
||
|
} else {
|
||
|
port = url.protocol === "https:" ? "443" : "80";
|
||
|
}
|
||
|
}
|
||
|
let formattedHost = `${url.protocol}//${url.hostname}:${port}${url.pathname}`;
|
||
|
if (formattedHost.endsWith("/")) {
|
||
|
formattedHost = formattedHost.slice(0, -1);
|
||
|
}
|
||
|
return formattedHost;
|
||
|
};
|
||
|
|
||
|
var __defProp = Object.defineProperty;
|
||
|
var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value;
|
||
|
var __publicField = (obj, key, value) => {
|
||
|
__defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value);
|
||
|
return value;
|
||
|
};
|
||
|
let Ollama$1 = class Ollama {
|
||
|
constructor(config) {
|
||
|
__publicField(this, "config");
|
||
|
__publicField(this, "fetch");
|
||
|
__publicField(this, "ongoingStreamedRequests", []);
|
||
|
this.config = {
|
||
|
host: ""
|
||
|
};
|
||
|
if (!config?.proxy) {
|
||
|
this.config.host = formatHost(config?.host ?? "http://127.0.0.1:11434");
|
||
|
}
|
||
|
this.fetch = fetch;
|
||
|
if (config?.fetch != null) {
|
||
|
this.fetch = config.fetch;
|
||
|
}
|
||
|
}
|
||
|
// Abort any ongoing streamed requests to Ollama
|
||
|
abort() {
|
||
|
for (const request of this.ongoingStreamedRequests) {
|
||
|
request.abort();
|
||
|
}
|
||
|
this.ongoingStreamedRequests.length = 0;
|
||
|
}
|
||
|
/**
|
||
|
* Processes a request to the Ollama server. If the request is streamable, it will return a
|
||
|
* AbortableAsyncIterator that yields the response messages. Otherwise, it will return the response
|
||
|
* object.
|
||
|
* @param endpoint {string} - The endpoint to send the request to.
|
||
|
* @param request {object} - The request object to send to the endpoint.
|
||
|
* @protected {T | AbortableAsyncIterator<T>} - The response object or a AbortableAsyncIterator that yields
|
||
|
* response messages.
|
||
|
* @throws {Error} - If the response body is missing or if the response is an error.
|
||
|
* @returns {Promise<T | AbortableAsyncIterator<T>>} - The response object or a AbortableAsyncIterator that yields the streamed response.
|
||
|
*/
|
||
|
async processStreamableRequest(endpoint, request) {
|
||
|
request.stream = request.stream ?? false;
|
||
|
const host = `${this.config.host}/api/${endpoint}`;
|
||
|
if (request.stream) {
|
||
|
const abortController = new AbortController();
|
||
|
const response2 = await post(this.fetch, host, request, {
|
||
|
signal: abortController.signal,
|
||
|
headers: this.config.headers
|
||
|
});
|
||
|
if (!response2.body) {
|
||
|
throw new Error("Missing body");
|
||
|
}
|
||
|
const itr = parseJSON(response2.body);
|
||
|
const abortableAsyncIterator = new AbortableAsyncIterator(
|
||
|
abortController,
|
||
|
itr,
|
||
|
() => {
|
||
|
const i = this.ongoingStreamedRequests.indexOf(abortableAsyncIterator);
|
||
|
if (i > -1) {
|
||
|
this.ongoingStreamedRequests.splice(i, 1);
|
||
|
}
|
||
|
}
|
||
|
);
|
||
|
this.ongoingStreamedRequests.push(abortableAsyncIterator);
|
||
|
return abortableAsyncIterator;
|
||
|
}
|
||
|
const response = await post(this.fetch, host, request, {
|
||
|
headers: this.config.headers
|
||
|
});
|
||
|
return await response.json();
|
||
|
}
|
||
|
/**
|
||
|
* Encodes an image to base64 if it is a Uint8Array.
|
||
|
* @param image {Uint8Array | string} - The image to encode.
|
||
|
* @returns {Promise<string>} - The base64 encoded image.
|
||
|
*/
|
||
|
async encodeImage(image) {
|
||
|
if (typeof image !== "string") {
|
||
|
const uint8Array = new Uint8Array(image);
|
||
|
let byteString = "";
|
||
|
const len = uint8Array.byteLength;
|
||
|
for (let i = 0; i < len; i++) {
|
||
|
byteString += String.fromCharCode(uint8Array[i]);
|
||
|
}
|
||
|
return btoa(byteString);
|
||
|
}
|
||
|
return image;
|
||
|
}
|
||
|
/**
|
||
|
* Generates a response from a text prompt.
|
||
|
* @param request {GenerateRequest} - The request object.
|
||
|
* @returns {Promise<GenerateResponse | AbortableAsyncIterator<GenerateResponse>>} - The response object or
|
||
|
* an AbortableAsyncIterator that yields response messages.
|
||
|
*/
|
||
|
async generate(request) {
|
||
|
if (request.images) {
|
||
|
request.images = await Promise.all(request.images.map(this.encodeImage.bind(this)));
|
||
|
}
|
||
|
return this.processStreamableRequest("generate", request);
|
||
|
}
|
||
|
/**
|
||
|
* Chats with the model. The request object can contain messages with images that are either
|
||
|
* Uint8Arrays or base64 encoded strings. The images will be base64 encoded before sending the
|
||
|
* request.
|
||
|
* @param request {ChatRequest} - The request object.
|
||
|
* @returns {Promise<ChatResponse | AbortableAsyncIterator<ChatResponse>>} - The response object or an
|
||
|
* AbortableAsyncIterator that yields response messages.
|
||
|
*/
|
||
|
async chat(request) {
|
||
|
if (request.messages) {
|
||
|
for (const message of request.messages) {
|
||
|
if (message.images) {
|
||
|
message.images = await Promise.all(
|
||
|
message.images.map(this.encodeImage.bind(this))
|
||
|
);
|
||
|
}
|
||
|
}
|
||
|
}
|
||
|
return this.processStreamableRequest("chat", request);
|
||
|
}
|
||
|
/**
|
||
|
* Creates a new model from a stream of data.
|
||
|
* @param request {CreateRequest} - The request object.
|
||
|
* @returns {Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>>} - The response object or a stream of progress responses.
|
||
|
*/
|
||
|
async create(request) {
|
||
|
return this.processStreamableRequest("create", {
|
||
|
name: request.model,
|
||
|
stream: request.stream,
|
||
|
modelfile: request.modelfile,
|
||
|
quantize: request.quantize
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Pulls a model from the Ollama registry. The request object can contain a stream flag to indicate if the
|
||
|
* response should be streamed.
|
||
|
* @param request {PullRequest} - The request object.
|
||
|
* @returns {Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>>} - The response object or
|
||
|
* an AbortableAsyncIterator that yields response messages.
|
||
|
*/
|
||
|
async pull(request) {
|
||
|
return this.processStreamableRequest("pull", {
|
||
|
name: request.model,
|
||
|
stream: request.stream,
|
||
|
insecure: request.insecure
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Pushes a model to the Ollama registry. The request object can contain a stream flag to indicate if the
|
||
|
* response should be streamed.
|
||
|
* @param request {PushRequest} - The request object.
|
||
|
* @returns {Promise<ProgressResponse | AbortableAsyncIterator<ProgressResponse>>} - The response object or
|
||
|
* an AbortableAsyncIterator that yields response messages.
|
||
|
*/
|
||
|
async push(request) {
|
||
|
return this.processStreamableRequest("push", {
|
||
|
name: request.model,
|
||
|
stream: request.stream,
|
||
|
insecure: request.insecure
|
||
|
});
|
||
|
}
|
||
|
/**
|
||
|
* Deletes a model from the server. The request object should contain the name of the model to
|
||
|
* delete.
|
||
|
* @param request {DeleteRequest} - The request object.
|
||
|
* @returns {Promise<StatusResponse>} - The response object.
|
||
|
*/
|
||
|
async delete(request) {
|
||
|
await del(this.fetch, `${this.config.host}/api/delete`, {
|
||
|
name: request.model
|
||
|
});
|
||
|
return { status: "success" };
|
||
|
}
|
||
|
/**
|
||
|
* Copies a model from one name to another. The request object should contain the name of the
|
||
|
* model to copy and the new name.
|
||
|
* @param request {CopyRequest} - The request object.
|
||
|
* @returns {Promise<StatusResponse>} - The response object.
|
||
|
*/
|
||
|
async copy(request) {
|
||
|
await post(this.fetch, `${this.config.host}/api/copy`, { ...request });
|
||
|
return { status: "success" };
|
||
|
}
|
||
|
/**
|
||
|
* Lists the models on the server.
|
||
|
* @returns {Promise<ListResponse>} - The response object.
|
||
|
* @throws {Error} - If the response body is missing.
|
||
|
*/
|
||
|
async list() {
|
||
|
const response = await get(this.fetch, `${this.config.host}/api/tags`);
|
||
|
return await response.json();
|
||
|
}
|
||
|
/**
|
||
|
* Shows the metadata of a model. The request object should contain the name of the model.
|
||
|
* @param request {ShowRequest} - The request object.
|
||
|
* @returns {Promise<ShowResponse>} - The response object.
|
||
|
*/
|
||
|
async show(request) {
|
||
|
const response = await post(this.fetch, `${this.config.host}/api/show`, {
|
||
|
...request
|
||
|
});
|
||
|
return await response.json();
|
||
|
}
|
||
|
/**
|
||
|
* Embeds text input into vectors.
|
||
|
* @param request {EmbedRequest} - The request object.
|
||
|
* @returns {Promise<EmbedResponse>} - The response object.
|
||
|
*/
|
||
|
async embed(request) {
|
||
|
const response = await post(this.fetch, `${this.config.host}/api/embed`, {
|
||
|
...request
|
||
|
});
|
||
|
return await response.json();
|
||
|
}
|
||
|
/**
|
||
|
* Embeds a text prompt into a vector.
|
||
|
* @param request {EmbeddingsRequest} - The request object.
|
||
|
* @returns {Promise<EmbeddingsResponse>} - The response object.
|
||
|
*/
|
||
|
async embeddings(request) {
|
||
|
const response = await post(this.fetch, `${this.config.host}/api/embeddings`, {
|
||
|
...request
|
||
|
});
|
||
|
return await response.json();
|
||
|
}
|
||
|
/**
|
||
|
* Lists the running models on the server
|
||
|
* @returns {Promise<ListResponse>} - The response object.
|
||
|
* @throws {Error} - If the response body is missing.
|
||
|
*/
|
||
|
async ps() {
|
||
|
const response = await get(this.fetch, `${this.config.host}/api/ps`);
|
||
|
return await response.json();
|
||
|
}
|
||
|
};
|
||
|
const browser = new Ollama$1();
|
||
|
|
||
|
exports.Ollama = Ollama$1;
|
||
|
exports.browser = browser;
|
||
|
exports.head = head;
|
||
|
exports.post = post;
|