agsamantha/node_modules/@langchain/community/dist/llms/writer.d.ts
2024-10-02 15:15:21 -05:00

60 lines
1.9 KiB
TypeScript

import { type BaseLLMParams, LLM } from "@langchain/core/language_models/llms";
/**
* Interface for the input parameters specific to the Writer model.
*/
export interface WriterInput extends BaseLLMParams {
/** Writer API key */
apiKey?: string;
/** Writer organization ID */
orgId?: string | number;
/** Model to use */
model?: string;
/** Sampling temperature to use */
temperature?: number;
/** Minimum number of tokens to generate. */
minTokens?: number;
/** Maximum number of tokens to generate in the completion. */
maxTokens?: number;
/** Generates this many completions server-side and returns the "best"." */
bestOf?: number;
/** Penalizes repeated tokens according to frequency. */
frequencyPenalty?: number;
/** Whether to return log probabilities. */
logprobs?: number;
/** Number of completions to generate. */
n?: number;
/** Penalizes repeated tokens regardless of frequency. */
presencePenalty?: number;
/** Total probability mass of tokens to consider at each step. */
topP?: number;
}
/**
* Class representing a Writer Large Language Model (LLM). It interacts
* with the Writer API to generate text completions.
*/
export declare class Writer extends LLM implements WriterInput {
static lc_name(): string;
get lc_secrets(): {
[key: string]: string;
} | undefined;
get lc_aliases(): {
[key: string]: string;
} | undefined;
lc_serializable: boolean;
apiKey: string;
orgId: number;
model: string;
temperature?: number;
minTokens?: number;
maxTokens?: number;
bestOf?: number;
frequencyPenalty?: number;
logprobs?: number;
n?: number;
presencePenalty?: number;
topP?: number;
constructor(fields?: WriterInput);
_llmType(): string;
/** @ignore */
_call(prompt: string, options: this["ParsedCallOptions"]): Promise<string>;
}