انتقل إلى المحتوى الرئيسي

Class: Anthropic

Anthropic LLM implementation

Hierarchy

  • BaseLLM

    Anthropic

Constructors

constructor

new Anthropic(init?): Anthropic

Parameters

NameType
init?Partial<Anthropic>

Returns

Anthropic

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/LLM.ts:654

Properties

apiKey

Optional apiKey: string = undefined

Defined in

packages/core/src/llm/LLM.ts:647


callbackManager

Optional callbackManager: CallbackManager

Defined in

packages/core/src/llm/LLM.ts:652


maxRetries

maxRetries: number

Defined in

packages/core/src/llm/LLM.ts:648


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/LLM.ts:644


model

model: "claude-3-opus" | "claude-3-sonnet" | "claude-2.1" | "claude-instant-1.2"

Defined in

packages/core/src/llm/LLM.ts:641


session

session: AnthropicSession

Defined in

packages/core/src/llm/LLM.ts:650


temperature

temperature: number

Defined in

packages/core/src/llm/LLM.ts:642


timeout

Optional timeout: number

Defined in

packages/core/src/llm/LLM.ts:649


topP

topP: number

Defined in

packages/core/src/llm/LLM.ts:643

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"claude-3-opus" | "claude-3-sonnet" | "claude-2.1" | "claude-instant-1.2"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/LLM.ts:679

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/LLM.ts:710

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming

Returns

Promise<ChatResponse>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/LLM.ts:713


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:18

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:21


formatMessages

formatMessages(messages): { content: any = message.content; role: "user" | "assistant" = message.role }[]

Parameters

NameType
messagesChatMessage[]

Returns

{ content: any = message.content; role: "user" | "assistant" = message.role }[]

Defined in

packages/core/src/llm/LLM.ts:697


getModelName

getModelName(model): string

Parameters

NameType
modelstring

Returns

string

Defined in

packages/core/src/llm/LLM.ts:690


streamChat

streamChat(messages, parentEvent?, systemPrompt?): AsyncIterable<ChatResponseChunk>

Parameters

NameType
messagesChatMessage[]
parentEvent?Event
systemPrompt?null | string

Returns

AsyncIterable<ChatResponseChunk>

Defined in

packages/core/src/llm/LLM.ts:754


tokens

tokens(messages): number

Parameters

NameType
messagesChatMessage[]

Returns

number

Overrides

BaseLLM.tokens

Defined in

packages/core/src/llm/LLM.ts:675