انتقل إلى المحتوى الرئيسي

Class: MistralAI

MistralAI LLM implementation

Hierarchy

  • BaseLLM

    MistralAI

Constructors

constructor

new MistralAI(init?)

Parameters

NameType
init?Partial<MistralAI>

Overrides

BaseLLM.constructor

Defined in

packages/core/src/llm/mistral.ts:64

Properties

apiKey

Optional apiKey: string

Defined in

packages/core/src/llm/mistral.ts:57


callbackManager

Optional callbackManager: CallbackManager

Defined in

packages/core/src/llm/mistral.ts:58


maxTokens

Optional maxTokens: number

Defined in

packages/core/src/llm/mistral.ts:56


model

model: "mistral-tiny" | "mistral-small" | "mistral-medium"

Defined in

packages/core/src/llm/mistral.ts:53


randomSeed

Optional randomSeed: number

Defined in

packages/core/src/llm/mistral.ts:60


safeMode

safeMode: boolean

Defined in

packages/core/src/llm/mistral.ts:59


session

Private session: MistralAISession

Defined in

packages/core/src/llm/mistral.ts:62


temperature

temperature: number

Defined in

packages/core/src/llm/mistral.ts:54


topP

topP: number

Defined in

packages/core/src/llm/mistral.ts:55

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
model"mistral-tiny" | "mistral-small" | "mistral-medium"
temperaturenumber
tokenizerundefined
topPnumber

Overrides

BaseLLM.metadata

Defined in

packages/core/src/llm/mistral.ts:76

Methods

buildParams

Private buildParams(messages): any

Parameters

NameType
messagesChatMessage[]

Returns

any

Defined in

packages/core/src/llm/mistral.ts:91


chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/mistral.ts:103

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming

Returns

Promise<ChatResponse>

Overrides

BaseLLM.chat

Defined in

packages/core/src/llm/mistral.ts:106


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:18

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

BaseLLM.complete

Defined in

packages/core/src/llm/base.ts:21


streamChat

Protected streamChat(«destructured»): AsyncIterable<ChatResponseChunk>

Parameters

NameType
«destructured»LLMChatParamsStreaming

Returns

AsyncIterable<ChatResponseChunk>

Defined in

packages/core/src/llm/mistral.ts:124


tokens

tokens(messages): number

Parameters

NameType
messagesChatMessage[]

Returns

number

Overrides

BaseLLM.tokens

Defined in

packages/core/src/llm/mistral.ts:87