انتقل إلى المحتوى الرئيسي

Class: MistralAI

MistralAI LLM implementation

Hierarchy

  • BaseLLM

    MistralAI

Constructors

constructor

new MistralAI(init?): MistralAI

Parameters

NameType
init?Partial<MistralAI>

Returns

MistralAI

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/mistral.ts:58

Properties

apiKey

Optional apiKey: string

Defined in

packages/core/src/llm/mistral.ts:52


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/mistral.ts:51


model

model: "mistral-tiny" | "mistral-small" | "mistral-medium"

Defined in

packages/core/src/llm/mistral.ts:48


randomSeed

Optional randomSeed: number

Defined in

packages/core/src/llm/mistral.ts:54


safeMode

safeMode: boolean

Defined in

packages/core/src/llm/mistral.ts:53


session

Private session: MistralAISession

Defined in

packages/core/src/llm/mistral.ts:56


temperature

temperature: number

Defined in

packages/core/src/llm/mistral.ts:49


topP

topP: number

Defined in

packages/core/src/llm/mistral.ts:50

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"mistral-tiny" | "mistral-small" | "mistral-medium"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/mistral.ts:69

Methods

buildParams

buildParams(messages): any

Parameters

NameType
messagesChatMessage[]

Returns

any

Defined in

packages/core/src/llm/mistral.ts:80


chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming<Record<string, unknown>>

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/mistral.ts:92

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming<Record<string, unknown>>

Returns

Promise<ChatResponse>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/mistral.ts:95


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:23

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:26


streamChat

streamChat(«destructured»): AsyncIterable<ChatResponseChunk>

Parameters

NameType
«destructured»LLMChatParamsStreaming<Record<string, unknown>>

Returns

AsyncIterable<ChatResponseChunk>

Defined in

packages/core/src/llm/mistral.ts:113