Skip to main content

Class: OpenAI

Unified language model interface

Extends

Extended by

Constructors

new OpenAI()

new OpenAI(init?): OpenAI

Parameters

init?: Partial<OpenAI> & object

Returns

OpenAI

Overrides

ToolCallLLM.constructor

Defined in

packages/llm/openai/dist/index.d.ts:230

Properties

additionalChatOptions?

optional additionalChatOptions: OpenAIAdditionalChatOptions

Defined in

packages/llm/openai/dist/index.d.ts:224


additionalSessionOptions?

optional additionalSessionOptions: Omit<Partial<ClientOptions>, "apiKey" | "maxRetries" | "timeout">

Defined in

packages/llm/openai/dist/index.d.ts:229


apiKey?

optional apiKey: string

Defined in

packages/llm/openai/dist/index.d.ts:225


maxRetries

maxRetries: number

Defined in

packages/llm/openai/dist/index.d.ts:226


maxTokens?

optional maxTokens: number

Defined in

packages/llm/openai/dist/index.d.ts:223


model

model: string & object | ChatModel

Defined in

packages/llm/openai/dist/index.d.ts:220


session

session: OpenAISession

Defined in

packages/llm/openai/dist/index.d.ts:228


temperature

temperature: number

Defined in

packages/llm/openai/dist/index.d.ts:221


timeout?

optional timeout: number

Defined in

packages/llm/openai/dist/index.d.ts:227


topP

topP: number

Defined in

packages/llm/openai/dist/index.d.ts:222

Accessors

metadata

get metadata(): LLMMetadata

Returns

LLMMetadata

Overrides

ToolCallLLM.metadata

Defined in

packages/llm/openai/dist/index.d.ts:234


supportToolCall

get supportToolCall(): boolean

Returns

boolean

Overrides

ToolCallLLM.supportToolCall

Defined in

packages/llm/openai/dist/index.d.ts:233

Methods

chat()

chat(params)

chat(params): Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>>

Overrides

ToolCallLLM.chat

Defined in

packages/llm/openai/dist/index.d.ts:237

chat(params)

chat(params): Promise<ChatResponse<ToolCallLLMMessageOptions>>

Get a chat response from the LLM

Parameters

params: LLMChatParamsNonStreaming<OpenAIAdditionalChatOptions, ToolCallLLMMessageOptions>

Returns

Promise<ChatResponse<ToolCallLLMMessageOptions>>

Overrides

ToolCallLLM.chat

Defined in

packages/llm/openai/dist/index.d.ts:238


complete()

complete(params)

complete(params): Promise<AsyncIterable<CompletionResponse, any, any>>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsStreaming

Returns

Promise<AsyncIterable<CompletionResponse, any, any>>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/dist/llms/index.d.ts:168

complete(params)

complete(params): Promise<CompletionResponse>

Get a prompt completion from the LLM

Parameters

params: LLMCompletionParamsNonStreaming

Returns

Promise<CompletionResponse>

Inherited from

ToolCallLLM.complete

Defined in

packages/core/dist/llms/index.d.ts:169


streamChat()

protected streamChat(baseRequestParams): AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>

Parameters

baseRequestParams: ChatCompletionCreateParams

Returns

AsyncIterable<ChatResponseChunk<ToolCallLLMMessageOptions>, any, any>

Defined in

packages/llm/openai/dist/index.d.ts:239


toOpenAIMessage()

static toOpenAIMessage(messages): ChatCompletionMessageParam[]

Parameters

messages: ChatMessage<ToolCallLLMMessageOptions>[]

Returns

ChatCompletionMessageParam[]

Defined in

packages/llm/openai/dist/index.d.ts:236


toOpenAIRole()

static toOpenAIRole(messageType): ChatCompletionRole

Parameters

messageType: MessageType

Returns

ChatCompletionRole

Defined in

packages/llm/openai/dist/index.d.ts:235


toTool()

static toTool(tool): ChatCompletionTool

Parameters

tool: BaseTool<any>

Returns

ChatCompletionTool

Defined in

packages/llm/openai/dist/index.d.ts:240