본문으로 건너뛰기

Class: Anthropic

Hierarchy

  • BaseLLM

    Anthropic

Constructors

constructor

new Anthropic(init?): Anthropic

Parameters

NameType
init?Partial<Anthropic>

Returns

Anthropic

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/anthropic.ts:97

Properties

apiKey

Optional apiKey: string = undefined

Defined in

packages/core/src/llm/anthropic.ts:92


maxRetries

maxRetries: number

Defined in

packages/core/src/llm/anthropic.ts:93


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/anthropic.ts:89


model

model: "claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"

Defined in

packages/core/src/llm/anthropic.ts:86


session

session: AnthropicSession

Defined in

packages/core/src/llm/anthropic.ts:95


temperature

temperature: number

Defined in

packages/core/src/llm/anthropic.ts:87


timeout

Optional timeout: number

Defined in

packages/core/src/llm/anthropic.ts:94


topP

topP: number

Defined in

packages/core/src/llm/anthropic.ts:88

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"claude-3-opus" | "claude-3-sonnet" | "claude-3-haiku" | "claude-2.1" | "claude-instant-1.2"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/anthropic.ts:116

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming<Record<string, unknown>>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/anthropic.ts:147

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming<Record<string, unknown>>

Returns

Promise<ChatResponse>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/anthropic.ts:150


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:23

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:26


formatMessages

formatMessages(messages): { content: string ; role: "user" | "assistant" = message.role }[]

Parameters

NameType
messagesChatMessage[]

Returns

{ content: string ; role: "user" | "assistant" = message.role }[]

Defined in

packages/core/src/llm/anthropic.ts:134


getModelName

getModelName(model): string

Parameters

NameType
modelstring

Returns

string

Defined in

packages/core/src/llm/anthropic.ts:127


streamChat

streamChat(messages, systemPrompt?): AsyncIterable<ChatResponseChunk>

Parameters

NameType
messagesChatMessage[]
systemPrompt?null | string

Returns

AsyncIterable<ChatResponseChunk>

Defined in

packages/core/src/llm/anthropic.ts:192