Pular para o conteúdo principal

Class: TogetherLLM

OpenAI LLM implementation

Hierarchy

Constructors

constructor

new TogetherLLM(init?)

Parameters

NameType
init?Partial<OpenAI>

Overrides

OpenAI.constructor

Defined in

packages/core/src/llm/together.ts:4

Properties

additionalChatOptions

Optional additionalChatOptions: Omit<Partial<ChatCompletionCreateParams>, "stream" | "max_tokens" | "messages" | "model" | "temperature" | "top_p">

Inherited from

OpenAI.additionalChatOptions

Defined in

packages/core/src/llm/LLM.ts:71


additionalSessionOptions

Optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "timeout" | "maxRetries">

Inherited from

OpenAI.additionalSessionOptions

Defined in

packages/core/src/llm/LLM.ts:81


apiKey

Optional apiKey: string = undefined

Inherited from

OpenAI.apiKey

Defined in

packages/core/src/llm/LLM.ts:77


callbackManager

Optional callbackManager: CallbackManager

Inherited from

OpenAI.callbackManager

Defined in

packages/core/src/llm/LLM.ts:86


maxRetries

maxRetries: number

Inherited from

OpenAI.maxRetries

Defined in

packages/core/src/llm/LLM.ts:78


maxTokens

Optional maxTokens: number

Inherited from

OpenAI.maxTokens

Defined in

packages/core/src/llm/LLM.ts:70


model

model: string

Inherited from

OpenAI.model

Defined in

packages/core/src/llm/LLM.ts:67


session

session: OpenAISession

Inherited from

OpenAI.session

Defined in

packages/core/src/llm/LLM.ts:80


temperature

temperature: number

Inherited from

OpenAI.temperature

Defined in

packages/core/src/llm/LLM.ts:68


timeout

Optional timeout: number

Inherited from

OpenAI.timeout

Defined in

packages/core/src/llm/LLM.ts:79


topP

topP: number

Inherited from

OpenAI.topP

Defined in

packages/core/src/llm/LLM.ts:69

Accessors

metadata

get metadata(): Object

Returns

Object

NameType
contextWindownumber
maxTokensundefined | number
modelstring
temperaturenumber
tokenizerCL100K_BASE
topPnumber

Inherited from

OpenAI.metadata

Defined in

packages/core/src/llm/LLM.ts:143

Methods

chat

chat(params): Promise<AsyncIterable<ChatResponseChunk>>

Parameters

NameType
paramsLLMChatParamsStreaming

Returns

Promise<AsyncIterable<ChatResponseChunk>>

Inherited from

OpenAI.chat

Defined in

packages/core/src/llm/LLM.ts:190

chat(params): Promise<ChatResponse>

Parameters

NameType
paramsLLMChatParamsNonStreaming

Returns

Promise<ChatResponse>

Inherited from

OpenAI.chat

Defined in

packages/core/src/llm/LLM.ts:193


complete

complete(params): Promise<AsyncIterable<CompletionResponse>>

Parameters

NameType
paramsLLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse>>

Inherited from

OpenAI.complete

Defined in

packages/core/src/llm/base.ts:18

complete(params): Promise<CompletionResponse>

Parameters

NameType
paramsLLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

OpenAI.complete

Defined in

packages/core/src/llm/base.ts:21


mapMessageType

mapMessageType(messageType): "function" | "user" | "assistant" | "system"

Parameters

NameType
messageTypeMessageType

Returns

"function" | "user" | "assistant" | "system"

Inherited from

OpenAI.mapMessageType

Defined in

packages/core/src/llm/LLM.ts:173


streamChat

Protected streamChat(«destructured»): AsyncIterable<ChatResponseChunk>

Parameters

NameType
«destructured»LLMChatParamsStreaming

Returns

AsyncIterable<ChatResponseChunk>

Inherited from

OpenAI.streamChat

Defined in

packages/core/src/llm/LLM.ts:228


tokens

tokens(messages): number

Parameters

NameType
messagesChatMessage[]

Returns

number

Inherited from

OpenAI.tokens

Defined in

packages/core/src/llm/LLM.ts:158