Interface BaseLanguageModelInterface<RunOutput, CallOptions>

Base interface implemented by all runnables. Used for cross-compatibility between different versions of LangChain core.

Should not change on patch releases.

interface BaseLanguageModelInterface<RunOutput, CallOptions> {
    get callKeys(): string[];
    batch(inputs, options?, batchOptions?): Promise<RunOutput[]>;
    batch(inputs, options?, batchOptions?): Promise<(Error | RunOutput)[]>;
    batch(inputs, options?, batchOptions?): Promise<(Error | RunOutput)[]>;
    generatePrompt(promptValues, options?, callbacks?): Promise<LLMResult>;
    getName(suffix?): string;
    getNumTokens(content): Promise<number>;
    invoke(input, options?): Promise<RunOutput>;
    predict(text, options?, callbacks?): Promise<string>;
    predictMessages(messages, options?, callbacks?): Promise<BaseMessage>;
    serialize(): SerializedLLM;
    stream(input, options?): Promise<IterableReadableStreamInterface<RunOutput>>;
    transform(generator, options): AsyncGenerator<RunOutput, any, unknown>;
}

Type Parameters

Hierarchy (view full)

Implemented by

Accessors

Methods

  • Parameters

    Returns Promise<string>

    ⚠️ Deprecated ⚠️

    Use .invoke() instead. Will be removed in 0.2.0.

    This feature is deprecated and will be removed in the future.

    It is not recommended for use.

Generated using TypeDoc