Class: OpenAI
Unified language model interface
Extends
Extended by
Constructors
new OpenAI()
new OpenAI(
init
?):OpenAI
Parameters
• init?: Omit
<Partial
<OpenAI
>, "session"
> & object
Returns
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:247
Properties
additionalChatOptions?
optional
additionalChatOptions:OpenAIAdditionalChatOptions
Defined in
packages/providers/openai/dist/index.d.ts:240
additionalSessionOptions?
optional
additionalSessionOptions:Omit
<Partial
<ClientOptions
>,"apiKey"
|"maxRetries"
|"timeout"
>
Defined in
packages/providers/openai/dist/index.d.ts:244
apiKey?
optional
apiKey:string
Defined in
packages/providers/openai/dist/index.d.ts:241
lazySession()
lazySession: () =>
Promise
<LLMInstance$1
>
Returns
Promise
<LLMInstance$1
>
Defined in
packages/providers/openai/dist/index.d.ts:245
maxRetries
maxRetries:
number
Defined in
packages/providers/openai/dist/index.d.ts:242
maxTokens?
optional
maxTokens:number
Defined in
packages/providers/openai/dist/index.d.ts:239
model
model:
string
&object
|ChatModel
Defined in
packages/providers/openai/dist/index.d.ts:236
temperature
temperature:
number
Defined in
packages/providers/openai/dist/index.d.ts:237
timeout?
optional
timeout:number
Defined in
packages/providers/openai/dist/index.d.ts:243
topP
topP:
number
Defined in
packages/providers/openai/dist/index.d.ts:238
Accessors
metadata
Get Signature
get metadata():
LLMMetadata
&object
Returns
LLMMetadata
& object
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:252
session
Get Signature
get session():
Promise
<LLMInstance$1
>
Returns
Promise
<LLMInstance$1
>
Defined in
packages/providers/openai/dist/index.d.ts:246
supportToolCall
Get Signature
get supportToolCall():
boolean
Returns
boolean
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:251
Methods
chat()
chat(params)
chat(
params
):Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>,any
,any
>>
Get a chat response from the LLM
Parameters
• params: LLMChatParamsStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>
Returns
Promise
<AsyncIterable
<ChatResponseChunk
<ToolCallLLMMessageOptions
>, any
, any
>>
Overrides
Defined in
packages/providers/openai/dist/index.d.ts:255
chat(params)
chat(
params
):Promise
<ChatResponse
<ToolCallLLMMessageOptions
>>
Parameters
• params: LLMChatParamsNonStreaming
<OpenAIAdditionalChatOptions
, ToolCallLLMMessageOptions
>