agsamantha/node_modules/langchain/dist/chains/graph_qa/cypher.d.ts
2024-10-02 15:15:21 -05:00

54 lines
1.8 KiB
TypeScript

import type { BaseLanguageModelInterface } from "@langchain/core/language_models/base";
import { ChainValues } from "@langchain/core/utils/types";
import { BasePromptTemplate } from "@langchain/core/prompts";
import { CallbackManagerForChainRun } from "@langchain/core/callbacks/manager";
import { LLMChain } from "../llm_chain.js";
import { BaseChain, ChainInputs } from "../base.js";
export declare const INTERMEDIATE_STEPS_KEY = "intermediateSteps";
export interface GraphCypherQAChainInput extends ChainInputs {
graph: any;
cypherGenerationChain: LLMChain;
qaChain: LLMChain;
inputKey?: string;
outputKey?: string;
topK?: number;
returnIntermediateSteps?: boolean;
returnDirect?: boolean;
}
export interface FromLLMInput {
graph: any;
llm?: BaseLanguageModelInterface;
cypherLLM?: BaseLanguageModelInterface;
qaLLM?: BaseLanguageModelInterface;
qaPrompt?: BasePromptTemplate;
cypherPrompt?: BasePromptTemplate;
returnIntermediateSteps?: boolean;
returnDirect?: boolean;
}
/**
* @example
* ```typescript
* const chain = new GraphCypherQAChain({
* llm: new ChatOpenAI({ temperature: 0 }),
* graph: new Neo4jGraph(),
* });
* const res = await chain.invoke("Who played in Pulp Fiction?");
* ```
*/
export declare class GraphCypherQAChain extends BaseChain {
private graph;
private cypherGenerationChain;
private qaChain;
private inputKey;
private outputKey;
private topK;
private returnDirect;
private returnIntermediateSteps;
constructor(props: GraphCypherQAChainInput);
_chainType(): "graph_cypher_chain";
get inputKeys(): string[];
get outputKeys(): string[];
static fromLLM(props: FromLLMInput): GraphCypherQAChain;
private extractCypher;
_call(values: ChainValues, runManager?: CallbackManagerForChainRun): Promise<ChainValues>;
}