- Name: Agent identifier
- Instructions: Behavior instructions (system prompt)
- Model: LLM model to use (OpenAI, Anthropic, Bedrock)
- Tools: Available tools for the agent
- Memory: Memory configuration
- RAG: Knowledge base search configuration
Complete Agent Configuration
Copy
import { Agent, anthropic } from '@runflow-ai/sdk';
const agent = new Agent({
name: 'Advanced Support Agent',
instructions: `You are an expert customer support agent.
- Always be polite and helpful
- Solve problems efficiently
- Use tools when needed`,
// Model
model: anthropic('claude-3-5-sonnet-20241022'),
// Model configuration
modelConfig: {
temperature: 0.7,
maxTokens: 4000,
topP: 0.9,
frequencyPenalty: 0,
presencePenalty: 0,
},
// Memory
memory: {
maxTurns: 20,
summarizeAfter: 50,
summarizePrompt: 'Create a concise summary highlighting key points and decisions',
summarizeModel: openai('gpt-4o-mini'), // Cheaper model for summaries
},
// RAG (Agentic - LLM decides when to search)
rag: {
vectorStore: 'support-docs',
k: 5,
threshold: 0.7,
searchPrompt: 'Use for technical questions',
},
// Tools
tools: {
createTicket: ticketTool,
searchOrders: orderTool,
},
// Tool iteration limit
maxToolIterations: 10,
// Streaming
streaming: {
enabled: true,
},
// Debug mode
debug: true,
});
Supported Models
Copy
import { openai, anthropic, bedrock } from '@runflow-ai/sdk';
// OpenAI
const gpt4 = openai('gpt-4o');
const gpt4mini = openai('gpt-4o-mini');
const gpt4turbo = openai('gpt-4-turbo');
const gpt35 = openai('gpt-3.5-turbo');
// Anthropic (Claude)
const claude35 = anthropic('claude-3-5-sonnet-20241022');
const claude3opus = anthropic('claude-3-opus-20240229');
const claude3sonnet = anthropic('claude-3-sonnet-20240229');
const claude3haiku = anthropic('claude-3-haiku-20240307');
// AWS Bedrock
const claudeBedrock = bedrock('anthropic.claude-3-sonnet-20240229-v1:0');
const titan = bedrock('amazon.titan-text-express-v1');
Agent Methods
Copy
// Process a message
const result = await agent.process(input: AgentInput): Promise<AgentOutput>;
// Stream a message
const stream = await agent.processStream(input: AgentInput): AsyncIterable<ChunkType>;
// Simple generation (without full agent context)
const response = await agent.generate(input: string | Message[]): Promise<{ text: string }>;
// Streaming generation
const stream = await agent.generateStream(prompt: string): AsyncIterable<ChunkType>;
// Generation with tools
const response = await agent.generateWithTools(input): Promise<{ text: string }>;
Multi-Agent Systems (Supervisor Pattern)
Copy
const supervisor = new Agent({
name: 'Supervisor',
instructions: 'Route tasks to appropriate agents.',
model: openai('gpt-4o'),
agents: {
support: {
name: 'Support Agent',
instructions: 'Handle support requests.',
model: openai('gpt-4o-mini'),
},
sales: {
name: 'Sales Agent',
instructions: 'Handle sales inquiries.',
model: openai('gpt-4o-mini'),
},
},
});
// Supervisor automatically routes to the appropriate agent
await supervisor.process({
message: 'I want to buy your product',
sessionId: 'session_123',
});
Debug Mode
Copy
const agent = new Agent({
name: 'Debug Agent',
instructions: 'Help users',
model: openai('gpt-4o'),
// Simple debug (all logs enabled)
debug: true,
// Or detailed debug configuration
debug: {
enabled: true,
logMessages: true, // Log messages
logLLMCalls: true, // Log LLM API calls
logToolCalls: true, // Log tool executions
logRAG: true, // Log RAG searches
logMemory: true, // Log memory operations
truncateAt: 1000, // Truncate logs at N characters
},
});