Skip to main content

Class: abstract AgentRunner<AI, Store, AdditionalMessageOptions>

Runner will manage the task execution and provide a high-level API for the user

Extended by

Type Parameters

AI extends LLM

Store extends object = object

AdditionalMessageOptions extends object = AI extends LLM<object, infer AdditionalMessageOptions> ? AdditionalMessageOptions : never

Implements

Constructors

new AgentRunner()

protected new AgentRunner<AI, Store, AdditionalMessageOptions>(params): AgentRunner<AI, Store, AdditionalMessageOptions>

Parameters

params: AgentRunnerParams<AI, Store, AdditionalMessageOptions>

Returns

AgentRunner<AI, Store, AdditionalMessageOptions>

Defined in

packages/llamaindex/src/agent/base.ts:259

Properties

defaultTaskHandler

static defaultTaskHandler: TaskHandler<LLM<object, object>>

Defined in

packages/llamaindex/src/agent/base.ts:228

Accessors

chatHistory

get chatHistory(): ChatMessage<AdditionalMessageOptions>[]

Returns

ChatMessage<AdditionalMessageOptions>[]

Defined in

packages/llamaindex/src/agent/base.ts:277


llm

get llm(): AI

Returns

AI

Defined in

packages/llamaindex/src/agent/base.ts:273


verbose

get verbose(): boolean

Returns

boolean

Defined in

packages/llamaindex/src/agent/base.ts:281

Methods

chat()

chat(params)

chat(params): Promise<EngineResponse>

Send message along with the class's current chat history to the LLM.

Parameters

params: ChatEngineParamsNonStreaming

Returns

Promise<EngineResponse>

Implementation of

ChatEngine.chat

Defined in

packages/llamaindex/src/agent/base.ts:348

chat(params)

chat(params): Promise<ReadableStream<EngineResponse>>

Send message along with the class's current chat history to the LLM.

Parameters

params: ChatEngineParamsStreaming

Returns

Promise<ReadableStream<EngineResponse>>

Implementation of

ChatEngine.chat

Defined in

packages/llamaindex/src/agent/base.ts:349


createStore()

abstract createStore(): Store

Returns

Store

Defined in

packages/llamaindex/src/agent/base.ts:222


createTask()

createTask(message, stream, verbose, chatHistory?): ReadableStream<TaskStepOutput<AI, Store, AdditionalMessageOptions>>

Parameters

message: MessageContent

stream: boolean = false

verbose: undefined | boolean = undefined

chatHistory?: ChatMessage<AdditionalMessageOptions>[]

Returns

ReadableStream<TaskStepOutput<AI, Store, AdditionalMessageOptions>>

Defined in

packages/llamaindex/src/agent/base.ts:308


getTools()

getTools(query): BaseToolWithCall[] | Promise<BaseToolWithCall[]>

Parameters

query: MessageContent

Returns

BaseToolWithCall[] | Promise<BaseToolWithCall[]>

Defined in

packages/llamaindex/src/agent/base.ts:289


reset()

reset(): void

Resets the chat history so that it's empty.

Returns

void

Implementation of

ChatEngine.reset

Defined in

packages/llamaindex/src/agent/base.ts:285


defaultCreateStore()

static defaultCreateStore(): object

Returns

object

Defined in

packages/llamaindex/src/agent/base.ts:224


shouldContinue()

static shouldContinue<AI, Store, AdditionalMessageOptions>(task): boolean

Type Parameters

AI extends LLM<object, object>

Store extends object = object

AdditionalMessageOptions extends object = AI extends LLM<object, AdditionalMessageOptions> ? AdditionalMessageOptions : never

Parameters

task: Readonly<TaskStep<AI, Store, AdditionalMessageOptions>>

Returns

boolean

Defined in

packages/llamaindex/src/agent/base.ts:295