Instructions
Instructions are the scaffolding that steer every run. When you build an agent you can pass plain strings, context-aware functions, or async callbacks; right before each session starts the library resolves them (in order) into a single system prompt. Because the resolver receives the run context
, you can tailor tone, policy, or knowledge base links per tenant without rebuilding the agent.
Keep instructions concise and layered: use static lines for global policy, then dynamic functions for request-specific details. If you need to defer heavy lookups (for example, loading account limits), perform that work inside the instruction function so the result is cached for the duration of the session.
type InstructionParam<TContext> = | string | ((ctx: TContext) => string) | ((ctx: TContext) => Promise<string>);
pub enum InstructionParam<TCtx> { String(String), Func(Box<dyn Fn(&TCtx) -> Result<String, BoxedError> + Send + Sync>), AsyncFunc( Box< dyn Fn( &TCtx, ) -> Pin<Box<dyn futures::Future<Output = Result<String, BoxedError>> + Send>> + Send + Sync, >, ),}
type InstructionParam[C any] struct { String *string Func func(ctx context.Context, contextVal C) (string, error)}
Example
Section titled “Example”Below is a small agent that mixes static and dynamic instructions across the three SDKs.
import { Agent, getResponseText } from "@hoangvvo/llm-agent";import { getModel } from "./get-model.ts";
interface DungeonRunContext { dungeonMaster: string; partyName: string; currentQuest: string; highlightPlayerClass: string; getOracleWhisper(): Promise<string>;}
const model = getModel("openai", "gpt-4o");
const dungeonCoach = new Agent<DungeonRunContext>({ name: "Torch", instructions: [ "You are Torch, a supportive guide who keeps tabletop role-playing sessions moving. Offer concrete options instead of long monologues.", (context) => `You are helping ${context.dungeonMaster}, the Dungeon Master for the ${context.partyName}. They are running the quest "${context.currentQuest}" and need a quick nudge that favors the party's ${context.highlightPlayerClass}.`, async (context) => { const whisper = await context.getOracleWhisper(); return `Weave in the oracle whisper: "${whisper}" so it feels like an in-world hint.`; }, ], model,});
const context: DungeonRunContext = { dungeonMaster: "Rowan", partyName: "Lanternbearers", currentQuest: "Echoes of the Sunken Keep", highlightPlayerClass: "ranger", async getOracleWhisper() { await new Promise((resolve) => setTimeout(resolve, 25)); return "the moss remembers every secret step"; },};
const response = await dungeonCoach.run({ context, input: [ { type: "message", role: "user", content: [ { type: "text", text: "The party is stuck at a collapsed bridge. What should happen next?", }, ], }, ],});
console.log(getResponseText(response));
use dotenvy::dotenv;use llm_agent::{Agent, AgentRequest, InstructionParam};use llm_sdk::{ openai::{OpenAIModel, OpenAIModelOptions}, Message, Part,};use std::{env, error::Error, sync::Arc};use tokio::time::{sleep, Duration};
#[derive(Clone)]struct DungeonRunContext { dungeon_master: String, party_name: String, current_quest: String, highlight_player_class: String, oracle_hint: String,}
#[tokio::main]async fn main() -> Result<(), Box<dyn Error>> { dotenv().ok();
let model = Arc::new(OpenAIModel::new( "gpt-4o", OpenAIModelOptions { api_key: env::var("OPENAI_API_KEY") .expect("OPENAI_API_KEY environment variable must be set"), ..Default::default() }, ));
let dungeon_coach = Agent::<DungeonRunContext>::builder("Torch", model) .add_instruction( "You are Torch, a supportive guide who keeps tabletop role-playing sessions moving. \ Offer concrete options instead of long monologues.", ) .add_instruction(|ctx: &DungeonRunContext| { Ok(format!( "You are helping {}, the Dungeon Master for the {}. They are running the quest \ \"{}\" and need a quick nudge that favors the party's {}.", ctx.dungeon_master, ctx.party_name, ctx.current_quest, ctx.highlight_player_class )) }) .add_instruction(InstructionParam::AsyncFunc(Box::new( |ctx: &DungeonRunContext| { let hint = ctx.oracle_hint.clone(); Box::pin(async move { sleep(Duration::from_millis(25)).await; Ok(format!( "Weave in the oracle whisper: \"{hint}\" so it feels like an in-world \ hint." )) }) }, ))) .build();
let context = DungeonRunContext { dungeon_master: "Rowan".into(), party_name: "Lanternbearers".into(), current_quest: "Echoes of the Sunken Keep".into(), highlight_player_class: "ranger".into(), oracle_hint: "the moss remembers every secret step".into(), };
let response = dungeon_coach .run(AgentRequest { context, input: vec![llm_agent::AgentItem::Message(Message::user(vec![ Part::text("The party is stuck at a collapsed bridge. What should happen next?"), ]))], }) .await?;
println!("{}", response.text());
Ok(())}
package main
import ( "context" "fmt" "log" "os" "time"
llmagent "github.com/hoangvvo/llm-sdk/agent-go" llmsdk "github.com/hoangvvo/llm-sdk/sdk-go" "github.com/hoangvvo/llm-sdk/sdk-go/openai" "github.com/joho/godotenv")
type DungeonRunContext struct { DungeonMaster string PartyName string CurrentQuest string HighlightPlayerClass string OracleHint string}
func (c *DungeonRunContext) GetOracleWhisper(ctx context.Context) (string, error) { select { case <-time.After(25 * time.Millisecond): return c.OracleHint, nil case <-ctx.Done(): return "", ctx.Err() }}
func main() { godotenv.Load("../.env")
apiKey := os.Getenv("OPENAI_API_KEY") if apiKey == "" { log.Fatal("OPENAI_API_KEY environment variable must be set") }
model := openai.NewOpenAIModel("gpt-4o", openai.OpenAIModelOptions{APIKey: apiKey})
staticInstruction := "You are Torch, a supportive guide who keeps tabletop role-playing sessions moving. Offer concrete options instead of long monologues."
dynamicInstruction := func(ctx context.Context, ctxVal *DungeonRunContext) (string, error) { return fmt.Sprintf( "You are helping %s, the Dungeon Master for the %s. They are running the quest \"%s\" and need a quick nudge that favors the party's %s.", ctxVal.DungeonMaster, ctxVal.PartyName, ctxVal.CurrentQuest, ctxVal.HighlightPlayerClass, ), nil }
asyncInstruction := func(ctx context.Context, ctxVal *DungeonRunContext) (string, error) { whisper, err := ctxVal.GetOracleWhisper(ctx) if err != nil { return "", err } return fmt.Sprintf("Weave in the oracle whisper: \"%s\" so it feels like an in-world hint.", whisper), nil }
dungeonCoach := llmagent.NewAgent[*DungeonRunContext]("Torch", model, llmagent.WithInstructions( llmagent.InstructionParam[*DungeonRunContext]{String: &staticInstruction}, llmagent.InstructionParam[*DungeonRunContext]{Func: dynamicInstruction}, llmagent.InstructionParam[*DungeonRunContext]{Func: asyncInstruction}, ), )
ctx := &DungeonRunContext{ DungeonMaster: "Rowan", PartyName: "Lanternbearers", CurrentQuest: "Echoes of the Sunken Keep", HighlightPlayerClass: "ranger", OracleHint: "the moss remembers every secret step", }
prompt := "The party is stuck at a collapsed bridge. What should happen next?"
response, err := dungeonCoach.Run(context.Background(), llmagent.AgentRequest[*DungeonRunContext]{ Input: []llmagent.AgentItem{ llmagent.NewAgentItemMessage( llmsdk.NewUserMessage( llmsdk.NewTextPart(prompt), ), ), }, Context: ctx, }) if err != nil { log.Fatal(err) }
fmt.Println(response.Text())}