Skip to main content

Class: OpenAIAgent

Runner will manage the task execution and provide a high-level API for the user

Extends

Constructors

new OpenAIAgent()

new OpenAIAgent(params): OpenAIAgent

Parameters

params: LLMAgentParams

Returns

OpenAIAgent

Overrides

LLMAgent.constructor

Defined in

packages/llm/openai/dist/index.d.ts:14

Properties

createStore()

createStore: () => object

Returns

object

Inherited from

LLMAgent.createStore

Defined in

packages/core/agent/dist/agent/index.d.ts:102


taskHandler

taskHandler: TaskHandler<LLM<object, object>, object, object>

Inherited from

LLMAgent.taskHandler

Defined in

packages/core/agent/dist/agent/index.d.ts:103


defaultTaskHandler

static defaultTaskHandler: TaskHandler<LLM<object, object>, object, object>

Inherited from

LLMAgent.defaultTaskHandler

Defined in

packages/core/agent/dist/agent/index.d.ts:76

Accessors

chatHistory

get chatHistory(): ChatMessage<AdditionalMessageOptions>[]

Returns

ChatMessage<AdditionalMessageOptions>[]

Inherited from

LLMAgent.chatHistory

Defined in

packages/core/agent/dist/agent/index.d.ts:79


llm

get llm(): AI

Returns

AI

Inherited from

LLMAgent.llm

Defined in

packages/core/agent/dist/agent/index.d.ts:78


verbose

get verbose(): boolean

Returns

boolean

Inherited from

LLMAgent.verbose

Defined in

packages/core/agent/dist/agent/index.d.ts:80

Methods

chat()

chat(params)

chat(params): Promise<EngineResponse>

Parameters

params: NonStreamingChatEngineParams<object>

Returns

Promise<EngineResponse>

Inherited from

LLMAgent.chat

Defined in

packages/core/agent/dist/agent/index.d.ts:85

chat(params)

chat(params): Promise<ReadableStream<EngineResponse>>

Parameters

params: StreamingChatEngineParams<object>

Returns

Promise<ReadableStream<EngineResponse>>

Inherited from

LLMAgent.chat

Defined in

packages/core/agent/dist/agent/index.d.ts:86


createTask()

createTask(message, stream?, verbose?, chatHistory?): ReadableStream<TaskStepOutput<LLM<object, object>, object, object>>

Parameters

message: MessageContent

stream?: boolean

verbose?: boolean

chatHistory?: ChatMessage<object>[]

Returns

ReadableStream<TaskStepOutput<LLM<object, object>, object, object>>

Inherited from

LLMAgent.createTask

Defined in

packages/core/agent/dist/agent/index.d.ts:84


getTools()

getTools(query): BaseToolWithCall[] | Promise<BaseToolWithCall[]>

Parameters

query: MessageContent

Returns

BaseToolWithCall[] | Promise<BaseToolWithCall[]>

Inherited from

LLMAgent.getTools

Defined in

packages/core/agent/dist/agent/index.d.ts:82


reset()

reset(): void

Returns

void

Inherited from

LLMAgent.reset

Defined in

packages/core/agent/dist/agent/index.d.ts:81


defaultCreateStore()

static defaultCreateStore(): object

Returns

object

Inherited from

LLMAgent.defaultCreateStore

Defined in

packages/core/agent/dist/agent/index.d.ts:75


shouldContinue()

static shouldContinue<AI, Store, AdditionalMessageOptions>(task): boolean

Type Parameters

AI extends LLM<object, object>

Store extends object = object

AdditionalMessageOptions extends object = AI extends LLM<object, AdditionalMessageOptions> ? AdditionalMessageOptions : never

Parameters

task: Readonly<TaskStep<AI, Store, AdditionalMessageOptions>>

Returns

boolean

Inherited from

LLMAgent.shouldContinue

Defined in

packages/core/agent/dist/agent/index.d.ts:83