Class: OpenAI
OpenAI LLM implementation
Hierarchy
BaseLLM
↳
OpenAI
↳↳
TogetherLLM
Constructors
constructor
• new OpenAI(init?
)
Parameters
Name | Type |
---|---|
init? | Partial <OpenAI > & { azure? : AzureOpenAIConfig } |
Overrides
BaseLLM.constructor
Defined in
packages/core/src/llm/LLM.ts:88
Properties
additionalChatOptions
• Optional
additionalChatOptions: Omit
<Partial
<ChatCompletionCreateParams
>, "stream"
| "max_tokens"
| "messages"
| "model"
| "temperature"
| "top_p"
>
Defined in
packages/core/src/llm/LLM.ts:71
additionalSessionOptions
• Optional
additionalSessionOptions: Omit
<Partial
<ClientOptions
>, "apiKey"
| "timeout"
| "maxRetries"
>
Defined in
packages/core/src/llm/LLM.ts:81
apiKey
• Optional
apiKey: string
= undefined
Defined in
packages/core/src/llm/LLM.ts:77
callbackManager
• Optional
callbackManager: CallbackManager
Defined in
packages/core/src/llm/LLM.ts:86
maxRetries
• maxRetries: number
Defined in
packages/core/src/llm/LLM.ts:78
maxTokens
• Optional
maxTokens: number
Defined in
packages/core/src/llm/LLM.ts:70
model
• model: string
Defined in
packages/core/src/llm/LLM.ts:67
session
• session: OpenAISession
Defined in
packages/core/src/llm/LLM.ts:80
temperature
• temperature: number
Defined in
packages/core/src/llm/LLM.ts:68
timeout
• Optional
timeout: number
Defined in
packages/core/src/llm/LLM.ts:79
topP
• topP: number
Defined in
packages/core/src/llm/LLM.ts:69
Accessors
metadata
• get
metadata(): Object
Returns
Object
Name | Type |
---|---|
contextWindow | number |
maxTokens | undefined | number |
model | string |
temperature | number |
tokenizer | CL100K_BASE |
topP | number |
Overrides
BaseLLM.metadata
Defined in
packages/core/src/llm/LLM.ts:143
Methods
chat
▸ chat(params
): Promise
<AsyncIterable
<ChatResponseChunk
>>
Parameters
Name | Type |
---|---|
params | LLMChatParamsStreaming |
Returns
Promise
<AsyncIterable
<ChatResponseChunk
>>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/LLM.ts:190
▸ chat(params
): Promise
<ChatResponse
>
Parameters
Name | Type |
---|---|
params | LLMChatParamsNonStreaming |
Returns
Promise
<ChatResponse
>
Overrides
BaseLLM.chat
Defined in
packages/core/src/llm/LLM.ts:193
complete
▸ complete(params
): Promise
<AsyncIterable
<CompletionResponse
>>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsStreaming |
Returns
Promise
<AsyncIterable
<CompletionResponse
>>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:18
▸ complete(params
): Promise
<CompletionResponse
>
Parameters
Name | Type |
---|---|
params | LLMCompletionParamsNonStreaming |
Returns
Promise
<CompletionResponse
>
Inherited from
BaseLLM.complete
Defined in
packages/core/src/llm/base.ts:21
mapMessageType
▸ mapMessageType(messageType
): "function"
| "user"
| "assistant"
| "system"
Parameters
Name | Type |
---|---|
messageType | MessageType |
Returns
"function"
| "user"
| "assistant"
| "system"
Defined in
packages/core/src/llm/LLM.ts:173
streamChat
▸ Protected
streamChat(«destructured»
): AsyncIterable
<ChatResponseChunk
>
Parameters
Name | Type |
---|---|
«destructured» | LLMChatParamsStreaming |
Returns
AsyncIterable
<ChatResponseChunk
>
Defined in
packages/core/src/llm/LLM.ts:228
tokens
▸ tokens(messages
): number
Parameters
Name | Type |
---|---|
messages | ChatMessage [] |
Returns
number
Overrides
BaseLLM.tokens