diff --git a/README.md b/README.md
index 9652e47..c970017 100644
--- a/README.md
+++ b/README.md
@@ -82,13 +82,16 @@ A flow is JSON. No custom syntax, no new language — just structured data that
{
"flow": "support-triage",
"description": "Classify a ticket, draft a reply, get approval, send it",
- "trigger": { "on": "webhook", "from": "helpdesk" },
+ "inputs": {
+ "body": { "type": "string", "required": true },
+ "id": { "type": "string", "required": true }
+ },
"nodes": [
{
"name": "classify",
"do": "ai",
"prompt": "Classify this ticket as billing, technical, or general",
- "input": "trigger.body",
+ "input": "inputs.body",
"schema": {
"category": "billing | technical | general",
"urgency": "low | medium | high",
@@ -106,7 +109,7 @@ A flow is JSON. No custom syntax, no new language — just structured data that
{
"name": "handle-billing",
"do": "agent",
- "task": "Draft a billing support reply for: {{ trigger.body }}",
+ "task": "Draft a billing support reply for: {{ inputs.body }}",
"output": "draft"
}
],
@@ -114,7 +117,7 @@ A flow is JSON. No custom syntax, no new language — just structured data that
{
"name": "handle-technical",
"do": "agent",
- "task": "Draft a technical support reply for: {{ trigger.body }}",
+ "task": "Draft a technical support reply for: {{ inputs.body }}",
"output": "draft"
}
]
@@ -123,7 +126,7 @@ A flow is JSON. No custom syntax, no new language — just structured data that
{
"name": "handle-general",
"do": "agent",
- "task": "Draft a general support reply for: {{ trigger.body }}",
+ "task": "Draft a general support reply for: {{ inputs.body }}",
"output": "draft"
}
]
@@ -139,7 +142,7 @@ A flow is JSON. No custom syntax, no new language — just structured data that
"do": "http",
"url": "https://helpdesk.example.com/api/reply",
"method": "POST",
- "body": { "message": "{{ draft }}", "ticketId": "{{ trigger.id }}" },
+ "body": { "message": "{{ draft }}", "ticketId": "{{ inputs.id }}" },
"retry": { "limit": 3, "delay": "2s", "backoff": "exponential" }
}
]
@@ -161,7 +164,7 @@ The most important node. A single LLM call that returns structured or freeform o
"name": "classify",
"do": "ai",
"prompt": "Classify this support ticket",
- "input": "trigger.body",
+ "input": "inputs.body",
"schema": {
"category": "billing | technical | general",
"confidence": "number",
@@ -225,15 +228,15 @@ Routes the flow to a sub-flow based on a value in state. Each path is an array o
"on": "classification.category",
"paths": {
"billing": [
- { "name": "lookup-invoice", "do": "http", "url": "https://api.example.com/invoice/{{ trigger.id }}", "output": "invoice" },
+ { "name": "lookup-invoice", "do": "http", "url": "https://api.example.com/invoice/{{ inputs.id }}", "output": "invoice" },
{ "name": "draft-reply", "do": "ai", "prompt": "Draft billing reply for: {{ invoice }}", "output": "draft" }
],
"technical": [
- { "name": "draft-reply", "do": "agent", "task": "Research and draft technical reply for: {{ trigger.body }}", "output": "draft" }
+ { "name": "draft-reply", "do": "agent", "task": "Research and draft technical reply for: {{ inputs.body }}", "output": "draft" }
]
},
"default": [
- { "name": "draft-reply", "do": "ai", "prompt": "Draft a general reply for: {{ trigger.body }}", "output": "draft" }
+ { "name": "draft-reply", "do": "ai", "prompt": "Draft a general reply for: {{ inputs.body }}", "output": "draft" }
]
}
```
@@ -272,7 +275,7 @@ Supports comparison and logical operators:
```
"classification.priority == 'urgent'"
"validation.valid && items.length > 0"
-"trigger.amount > 1000 || trigger.vip == true"
+"inputs.amount > 1000 || inputs.vip == true"
```
---
@@ -341,7 +344,7 @@ Runs multiple nodes at the same time. Waits for all to complete (`mode: "all"`)
{
"name": "notify-slack",
"do": "http",
- "url": "https://hooks.slack.com/services/{{ trigger.slackWebhook }}",
+ "url": "https://hooks.slack.com/services/{{ inputs.slackWebhook }}",
"method": "POST",
"body": { "text": "Ticket resolved: {{ classification.summary }}" },
"retry": { "limit": 3, "delay": "1s", "backoff": "exponential" },
@@ -358,9 +361,9 @@ All fields support `{{ templates }}`. Retry is strongly recommended for any outb
Read, write, or delete values that persist across flow runs.
```json
-{ "name": "save-result", "do": "memory", "action": "write", "key": "ticket-{{ trigger.id }}", "value": "{{ classification.category }}" }
-{ "name": "load-history", "do": "memory", "action": "read", "key": "ticket-{{ trigger.id }}", "output": "previous_category" }
-{ "name": "cleanup", "do": "memory", "action": "delete", "key": "ticket-{{ trigger.id }}" }
+{ "name": "save-result", "do": "memory", "action": "write", "key": "ticket-{{ inputs.id }}", "value": "{{ classification.category }}" }
+{ "name": "load-history", "do": "memory", "action": "read", "key": "ticket-{{ inputs.id }}", "output": "previous_category" }
+{ "name": "cleanup", "do": "memory", "action": "delete", "key": "ticket-{{ inputs.id }}" }
```
In the OpenClaw plugin, memory persists to `~/.openclaw/flow-memory/`. In Cloudflare, it maps to KV or D1. Keys support templates.
@@ -420,7 +423,7 @@ Duration syntax: `30s`, `5m`, `2h`, `1d`. Maps directly to Cloudflare's `step.sl
{
"name": "format-date",
"do": "code",
- "input": "trigger.timestamp",
+ "input": "inputs.timestamp",
"run": "new Date(input).toLocaleDateString('en-GB')",
"output": "formatted_date"
}
@@ -432,7 +435,7 @@ Single expressions are returned automatically. Multi-statement bodies (containin
{
"name": "calc",
"do": "code",
- "input": "trigger",
+ "input": "inputs",
"run": "const total = input.price * input.qty;\nconst tax = total * 0.22;\nreturn { total, tax, grand: total + tax };",
"output": "invoice"
}
@@ -447,15 +450,17 @@ No imports, no async, no filesystem access. `state` and `input` are frozen — r
Any string field supports `{{ path.to.value }}` interpolation resolved against flow state:
```
-{{ trigger.body }} # initial input
+{{ inputs.body }} # initial input payload (always available)
{{ classification.category }} # node with output: "classification" → access .category
-{{ trigger.user.email }} # nested dotted path
+{{ inputs.user.email }} # nested dotted path
{{ research.web_results }} # array or object (serialized to JSON string)
```
**Important:** templates reference the **`output` key**, not the node name. If a node has `"name": "get_data", "output": "api"`, reference it as `{{ api }}` — not `{{ get_data }}`.
-Flow state starts as `{ trigger: }` and grows as nodes complete.
+Flow state starts as `{ inputs: }` and grows as nodes complete. The
+caller (CLI, webhook server, parent flow, dashboard) is responsible for
+producing that payload — the flow itself is trigger-agnostic.
---
@@ -567,7 +572,7 @@ clawflow install lead-enrichment
```
Every flow in the registry is:
-- Parameterized (inputs declared in `trigger`)
+- Parameterized (inputs declared in the `inputs:` block)
- Runtime-agnostic (runs on OpenClaw or Cloudflare)
- LLM-editable (agents can fork and modify them)
@@ -659,7 +664,7 @@ Rules:
│ │ │ (coming soon) │
│ 11 tools │ │ │
│ versioning │ │ REST API │
- │ webhooks │ │ Webhook recv │
+ │ run server │ │ /run endpoint │
└──────┬──────┘ └────────────────┘
│
┌──────▼──────────────────────────────┐
diff --git a/openclaw.plugin.json b/openclaw.plugin.json
index 79d48e9..f548ddf 100644
--- a/openclaw.plugin.json
+++ b/openclaw.plugin.json
@@ -34,6 +34,6 @@
"memoryDir": { "label": "Memory Directory" },
"stateDir": { "label": "Flow State Directory" },
"maxNodeDurationMs": { "label": "Node Timeout (ms)", "placeholder": "30000" },
- "serve": { "label": "Webhook Server", "help": "Start an HTTP server for triggering flows via webhook. Requires at least port." }
+ "serve": { "label": "Flow HTTP Server", "help": "Start an HTTP server that runs flows on POST. Requires at least port. Endpoint: POST ///run with the JSON body as the flow's inputs." }
}
}
diff --git a/package.json b/package.json
index 76cbe1e..c5a759c 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@clawnify/clawflow",
- "version": "0.9.7",
+ "version": "1.0.0",
"description": "The n8n for agents. A declarative, AI-native workflow format that agents can read, write, and run.",
"type": "module",
"main": "./dist/index.js",
@@ -20,6 +20,7 @@
"test": "node --import tsx --test tests/*.test.ts",
"lint": "tsc --noEmit",
"clean": "rm -rf dist",
+ "migrate": "tsx scripts/migrate-to-1.0.ts",
"prepublishOnly": "npm run build"
},
"openclaw": {
@@ -41,6 +42,7 @@
"dist/**/*.d.ts.map",
"index.ts",
"src/**/*.ts",
+ "scripts/migrate-to-1.0.ts",
"skills/**/*",
"openclaw.plugin.json",
"README.md",
diff --git a/scripts/migrate-to-1.0.ts b/scripts/migrate-to-1.0.ts
new file mode 100644
index 0000000..4449b6b
--- /dev/null
+++ b/scripts/migrate-to-1.0.ts
@@ -0,0 +1,245 @@
+#!/usr/bin/env node
+/**
+ * ClawFlow 0.x → 1.0 migration.
+ *
+ * Rewrites flow definitions in-place:
+ * - Removes the top-level `trigger` block (now a Clawnify-side concern)
+ * - Renames `{{ trigger.X }}` → `{{ inputs.X }}` in every string field
+ * - Renames bare path values `"trigger.X"` → `"inputs.X"` in node.input,
+ * loop.over, branch.on, condition.if (which take dotted state paths)
+ *
+ * Does not auto-generate an `inputs:` declaration block. Declared inputs are
+ * left as a deliberate, human-curated upgrade — extras pass through at
+ * runtime, and missing-required checks only kick in when you opt in.
+ *
+ * Usage:
+ * npx tsx scripts/migrate-to-1.0.ts [--dry]
+ * tsx scripts/migrate-to-1.0.ts ~/.openclaw/workspace/flows
+ * tsx scripts/migrate-to-1.0.ts path/to/flow.json --dry
+ */
+
+import * as fs from "fs";
+import * as path from "path";
+
+interface MigrateResult {
+ file: string;
+ changed: boolean;
+ removedTrigger: boolean;
+ templateRewrites: number;
+ pathRewrites: number;
+}
+
+function rewriteString(s: string, counters: { templates: number; paths: number }): string {
+ let out = s;
+
+ // {{ trigger.X.Y | filter }} → {{ inputs.X.Y | filter }}
+ // Only rewrite when "trigger" is the top-level identifier inside the {{ }}.
+ out = out.replace(
+ /\{\{\s*trigger(\.[\w.]+)?(\s*\|\s*\w+)?\s*\}\}/g,
+ (_match: string, tail: string | undefined, filter: string | undefined) => {
+ counters.templates += 1;
+ return `{{ inputs${tail ?? ""}${filter ?? ""} }}`;
+ },
+ );
+
+ // {{ trigger[*].field }} → {{ inputs[*].field }}
+ out = out.replace(
+ /\{\{\s*trigger(\[\*\](?:\.[\w.]+)?)\s*\}\}/g,
+ (_match: string, tail: string) => {
+ counters.templates += 1;
+ return `{{ inputs${tail} }}`;
+ },
+ );
+
+ return out;
+}
+
+/**
+ * Bare dotted-path fields that the runtime resolves against state. These take
+ * raw paths like "trigger.x" (no template braces). Found on: AiNode.input,
+ * AgentNode.input, CodeNode.input, LoopNode.over, BranchNode.on,
+ * ConditionNode.if (within JS expressions). We rewrite the leading `trigger.`
+ * → `inputs.` and bare `trigger` → `inputs`.
+ */
+function rewriteBarePath(s: string, counters: { templates: number; paths: number }): string {
+ // Quick reject: if the string contains `{{`, treat as template-only — those
+ // are handled by rewriteString. Bare-path fields are pure dotted paths.
+ if (s.includes("{{")) return s;
+ if (s === "trigger") {
+ counters.paths += 1;
+ return "inputs";
+ }
+ if (s.startsWith("trigger.")) {
+ counters.paths += 1;
+ return "inputs." + s.slice("trigger.".length);
+ }
+ return s;
+}
+
+/**
+ * In ConditionNode.if, the body is a JS-ish expression that may reference
+ * `trigger.X` as an identifier. Rewrite identifier-prefixed `trigger.` → `inputs.`.
+ */
+function rewriteExpression(s: string, counters: { templates: number; paths: number }): string {
+ // Match `trigger` as a whole word followed by `.` or end. Avoids hitting
+ // strings inside literals — but for safety we also skip quoted regions.
+ const regions: { start: number; end: number }[] = [];
+ const literalRe = /'[^']*'|"[^"]*"/g;
+ let m: RegExpExecArray | null;
+ while ((m = literalRe.exec(s)) !== null) {
+ regions.push({ start: m.index, end: m.index + m[0].length });
+ }
+ const inLiteral = (i: number) =>
+ regions.some((r) => i >= r.start && i < r.end);
+
+ return s.replace(/\btrigger\b/g, (match: string, offset: number) => {
+ if (inLiteral(offset)) return match;
+ counters.paths += 1;
+ return "inputs";
+ });
+}
+
+const BARE_PATH_FIELDS = new Set(["input", "over", "on"]);
+const EXPRESSION_FIELDS = new Set(["if"]);
+
+function migrateValue(
+ value: unknown,
+ counters: { templates: number; paths: number },
+ fieldName?: string,
+): unknown {
+ if (typeof value === "string") {
+ if (fieldName && BARE_PATH_FIELDS.has(fieldName)) {
+ return rewriteString(rewriteBarePath(value, counters), counters);
+ }
+ if (fieldName && EXPRESSION_FIELDS.has(fieldName)) {
+ return rewriteString(rewriteExpression(value, counters), counters);
+ }
+ return rewriteString(value, counters);
+ }
+ if (Array.isArray(value)) {
+ return value.map((v) => migrateValue(v, counters, fieldName));
+ }
+ if (value !== null && typeof value === "object") {
+ const out: Record = {};
+ for (const [k, v] of Object.entries(value)) {
+ out[k] = migrateValue(v, counters, k);
+ }
+ return out;
+ }
+ return value;
+}
+
+function migrateFlowDef(def: Record): {
+ next: Record;
+ removedTrigger: boolean;
+ templateRewrites: number;
+ pathRewrites: number;
+} {
+ const counters = { templates: 0, paths: 0 };
+ const removedTrigger = "trigger" in def;
+
+ // Drop top-level trigger field. Don't migrate it into `inputs:` — declarations
+ // are an opt-in upgrade.
+ const { trigger: _trigger, ...rest } = def;
+
+ const migratedNodes = migrateValue(rest.nodes, counters);
+
+ const next: Record = { ...rest, nodes: migratedNodes };
+
+ return {
+ next,
+ removedTrigger,
+ templateRewrites: counters.templates,
+ pathRewrites: counters.paths,
+ };
+}
+
+function migrateFile(file: string, dry: boolean): MigrateResult {
+ const raw = fs.readFileSync(file, "utf8");
+ const def = JSON.parse(raw) as Record;
+
+ const { next, removedTrigger, templateRewrites, pathRewrites } = migrateFlowDef(def);
+ const changed = removedTrigger || templateRewrites > 0 || pathRewrites > 0;
+
+ if (changed && !dry) {
+ fs.writeFileSync(file, JSON.stringify(next, null, 2) + "\n");
+ }
+
+ return { file, changed, removedTrigger, templateRewrites, pathRewrites };
+}
+
+function walk(target: string): string[] {
+ const stat = fs.statSync(target);
+ if (stat.isFile()) return target.endsWith(".json") ? [target] : [];
+ const out: string[] = [];
+ for (const entry of fs.readdirSync(target)) {
+ const p = path.join(target, entry);
+ const s = fs.statSync(p);
+ if (s.isDirectory()) {
+ // Skip versions directory — those are immutable snapshots; users should
+ // re-publish from a migrated draft.
+ if (entry === ".clawflow" || entry === "node_modules") continue;
+ out.push(...walk(p));
+ } else if (entry.endsWith(".json")) {
+ out.push(p);
+ }
+ }
+ return out;
+}
+
+function main(): void {
+ const args = process.argv.slice(2);
+ const dry = args.includes("--dry");
+ const targets = args.filter((a) => !a.startsWith("--"));
+
+ if (targets.length === 0) {
+ console.error("usage: migrate-to-1.0 ... [--dry]");
+ process.exit(2);
+ }
+
+ const files: string[] = [];
+ for (const t of targets) {
+ const abs = path.resolve(t);
+ if (!fs.existsSync(abs)) {
+ console.error(`not found: ${abs}`);
+ process.exit(2);
+ }
+ files.push(...walk(abs));
+ }
+
+ const results: MigrateResult[] = [];
+ for (const file of files) {
+ try {
+ results.push(migrateFile(file, dry));
+ } catch (err) {
+ console.error(`skip ${file}: ${err instanceof Error ? err.message : String(err)}`);
+ }
+ }
+
+ const changed = results.filter((r) => r.changed);
+ const totalTemplates = changed.reduce((s, r) => s + r.templateRewrites, 0);
+ const totalPaths = changed.reduce((s, r) => s + r.pathRewrites, 0);
+ const triggersDropped = changed.filter((r) => r.removedTrigger).length;
+
+ console.log(`${dry ? "DRY RUN — " : ""}migrated ${changed.length}/${results.length} flow files`);
+ console.log(` template rewrites: ${totalTemplates}`);
+ console.log(` path rewrites: ${totalPaths}`);
+ console.log(` trigger blocks dropped: ${triggersDropped}`);
+
+ if (changed.length > 0) {
+ console.log("\nfiles changed:");
+ for (const r of changed) {
+ const parts: string[] = [];
+ if (r.removedTrigger) parts.push("removed trigger");
+ if (r.templateRewrites > 0) parts.push(`${r.templateRewrites} template${r.templateRewrites === 1 ? "" : "s"}`);
+ if (r.pathRewrites > 0) parts.push(`${r.pathRewrites} path${r.pathRewrites === 1 ? "" : "s"}`);
+ console.log(` ${r.file} — ${parts.join(", ")}`);
+ }
+ }
+
+ if (dry) {
+ console.log("\n(dry run — no files written. re-run without --dry to apply.)");
+ }
+}
+
+main();
diff --git a/skills/flow/SKILL.md b/skills/flow/SKILL.md
index e8511fe..535fd56 100644
--- a/skills/flow/SKILL.md
+++ b/skills/flow/SKILL.md
@@ -58,6 +58,37 @@ A flow is JSON with a `flow` name, an optional `env` block, and a `nodes` array.
- `do: condition` for boolean if/else, `do: branch` for multi-way value matching — both run inline sub-flows and reconverge
- Model shorthands: `fast` (Gemini 3 Flash), `smart` (Claude Sonnet 4.6), `best` (Minimax M2.5)
+### Declaring inputs
+
+Flows can declare the input fields they expect via the optional `inputs` block.
+A flow is **trigger-agnostic** — the runtime payload is just JSON that the
+caller (CLI, webhook server, parent flow, dashboard) supplies. Inside the flow,
+that payload is reachable as `{{ inputs.* }}` and `state.inputs` (in code nodes).
+
+```json
+{
+ "flow": "support-triage",
+ "inputs": {
+ "body": { "type": "string", "required": true, "description": "Ticket body text" },
+ "id": { "type": "string", "required": true },
+ "vip": { "type": "boolean" }
+ },
+ "nodes": [ ... ]
+}
+```
+
+Rules:
+
+- The `inputs` block is **optional**. When omitted, the flow accepts any
+ payload (anything-goes mode). Templates `{{ inputs.X }}` resolve at runtime.
+- When present, every entry marked `required: true` must be in the payload at
+ flow start. Missing required inputs fail the flow before any node executes.
+- Extra keys in the payload that aren't declared **pass through** and remain
+ reachable via `{{ inputs.* }}` — webhook envelopes can evolve without forcing
+ every flow to declare every new field.
+- When you **do** declare inputs, the validator catches typos like
+ `{{ inputs.email_too }}` (when only `email_to` is declared) at load time.
+
### Environment variables
Flows can declare required and optional env vars via the `env` field. Three value types:
@@ -94,10 +125,10 @@ Access via `{{ env.VAR_NAME }}` in any template field. `process.env` always take
Any string field supports `{{ path.to.value }}` interpolation. The top-level key is always the **`output` field value**, not the node name:
```
-{{ trigger.body }} — initial input (trigger is always available)
+{{ inputs.body }} — initial payload (inputs is always available)
{{ env.API_KEY }} — environment variable (env is always available)
{{ classification.category }} — node with output: "classification" → access .category
-{{ trigger.user.email }} — nested dotted path from trigger
+{{ inputs.user.email }} — nested dotted path from the input payload
```
**Filters:** Use `{{ value | filter }}` to transform values inline:
@@ -138,7 +169,7 @@ AI nodes support an `attachments` field — an array of file paths or URLs sent
"name": "analyze-receipt",
"do": "ai",
"prompt": "Extract the total and vendor name from this receipt",
- "attachments": ["{{ trigger.receiptPath }}"],
+ "attachments": ["{{ inputs.receiptPath }}"],
"schema": { "total": "number", "vendor": "string" },
"model": "smart",
"output": "extracted"
@@ -333,7 +364,7 @@ EOF
"name": "research_topic",
"do": "agent",
"agentId": "clawflow",
- "task": "Research the latest trends in {{ trigger.topic }}",
+ "task": "Research the latest trends in {{ inputs.topic }}",
"timeout": "240s",
"output": "research"
}
@@ -353,7 +384,7 @@ EOF
{
"name": "draft",
"do": "ai",
- "prompt": "Write a LinkedIn thought leadership post about: {{ trigger.topic }}\n\nTone: professional but conversational. Include a hook, 3-5 key points, and a question to drive engagement. Add 3 relevant hashtags.",
+ "prompt": "Write a LinkedIn thought leadership post about: {{ inputs.topic }}\n\nTone: professional but conversational. Include a hook, 3-5 key points, and a question to drive engagement. Add 3 relevant hashtags.",
"schema": {
"post": "string",
"hook": "string",
@@ -375,7 +406,7 @@ EOF
{
"name": "research",
"do": "ai",
- "prompt": "Research the topic '{{ trigger.topic }}' and list 5 key insights",
+ "prompt": "Research the topic '{{ inputs.topic }}' and list 5 key insights",
"schema": { "insights": "string[]" },
"model": "smart",
"output": "research"
@@ -415,7 +446,7 @@ EOF
{
"name": "parse",
"do": "ai",
- "prompt": "Extract items from: {{ trigger.text }}",
+ "prompt": "Extract items from: {{ inputs.text }}",
"schema": { "items": [{ "type": "string", "name": "string" }] },
"model": "smart",
"output": "parsed"
@@ -444,7 +475,7 @@ EOF
{
"name": "notify",
"do": "agent",
- "task": "Send email to {{ trigger.email }} with these attachments:\n{{ process_items[*].outPath }}"
+ "task": "Send email to {{ inputs.email }} with these attachments:\n{{ process_items[*].outPath }}"
}
]
}
@@ -460,7 +491,7 @@ EOF
"name": "extract",
"do": "ai",
"prompt": "Extract all line items, totals, and vendor info from this invoice",
- "attachments": ["{{ trigger.invoicePath }}"],
+ "attachments": ["{{ inputs.invoicePath }}"],
"schema": {
"vendor": "string",
"date": "string",
@@ -533,8 +564,8 @@ flows/
## Reading and discovering flows
-- `flow_list` — lists all flows with their description, expected inputs, trigger config, and published version info
-- `flow_read file: "my-flow"` — full definition with expected trigger inputs (extracted from `{{ trigger.* }}` templates) and available versions
+- `flow_list` — lists all flows with their description, declared `inputs:` block, and published version info
+- `flow_read file: "my-flow"` — full definition; response includes the declared `inputs:` block plus a best-effort `_expectedInputs` list extracted from `{{ inputs.* }}` templates, and available versions
- `flow_read file: "my-flow" node: "classify"` — inspect a single node (searches nested structures)
**Always use `flow_read` before running an unfamiliar flow** to understand what inputs it expects.
diff --git a/src/core/runner.ts b/src/core/runner.ts
index 2f4de73..97a1cb0 100644
--- a/src/core/runner.ts
+++ b/src/core/runner.ts
@@ -110,14 +110,32 @@ export class FlowRunner {
status: "failed",
flowName: flow.flow ?? "unknown",
instanceId: id,
- state: { trigger: input },
+ state: { inputs: input },
trace: [],
error: `Validation failed:\n${messages.join("\n")}`,
};
}
const id = instanceId ?? crypto.randomUUID();
- const state: FlowState = { trigger: input };
+ const state: FlowState = { inputs: input };
+
+ // Validate declared inputs against the actual payload before any node runs.
+ if (flow.inputs && typeof flow.inputs === "object") {
+ const payload = (input && typeof input === "object" ? input : {}) as Record;
+ const missing: string[] = [];
+ for (const [k, spec] of Object.entries(flow.inputs)) {
+ if (spec?.required && !(k in payload)) missing.push(k);
+ }
+ if (missing.length > 0) {
+ const error = `Missing required input${missing.length > 1 ? "s" : ""}: ${missing.join(", ")}`;
+ this.store.create(id, flow.flow, state);
+ this.store.update(id, { status: "failed", error });
+ return {
+ ok: false, status: "failed", flowName: flow.flow ?? "unknown",
+ instanceId: id, state, trace: [], error,
+ };
+ }
+ }
// Create state record first — ensures a state file exists even if env resolution fails
this.store.create(id, flow.flow, state);
@@ -1378,7 +1396,7 @@ export class FlowRunner {
/**
* Wrap an object-style input with a Proxy that throws a descriptive error
* when the code accesses a property that doesn't exist, showing what keys
- * ARE available in input and in state.trigger.
+ * ARE available in input and in state.inputs.
*/
private wrapWithDiagnosticProxy(
obj: Record,
@@ -1386,9 +1404,9 @@ export class FlowRunner {
state: FlowState,
): Record {
const inputKeys = Object.keys(obj);
- const triggerKeys =
- state.trigger && typeof state.trigger === "object"
- ? Object.keys(state.trigger as Record)
+ const flowInputKeys =
+ state.inputs && typeof state.inputs === "object"
+ ? Object.keys(state.inputs as Record)
: [];
const stateKeys = Object.keys(state);
@@ -1404,10 +1422,10 @@ export class FlowRunner {
}
const parts: string[] = [];
parts.push(`'${p}' is not a key in input. Input keys: [${inputKeys.join(", ")}].`);
- if (triggerKeys.includes(p)) {
- parts.push(`Did you mean state.trigger.${p}? It exists in trigger.`);
- } else if (triggerKeys.length > 0) {
- parts.push(`Trigger keys: [${triggerKeys.join(", ")}].`);
+ if (flowInputKeys.includes(p)) {
+ parts.push(`Did you mean state.inputs.${p}? It exists in inputs.`);
+ } else if (flowInputKeys.length > 0) {
+ parts.push(`Flow input keys: [${flowInputKeys.join(", ")}].`);
}
if (stateKeys.length > 0) {
parts.push(`State keys: [${stateKeys.join(", ")}].`);
@@ -1427,12 +1445,12 @@ export class FlowRunner {
} else {
parts.push("input is " + (input === undefined ? "undefined" : String(input)) + ".");
}
- const triggerKeys =
- state.trigger && typeof state.trigger === "object"
- ? Object.keys(state.trigger as Record)
+ const flowInputKeys =
+ state.inputs && typeof state.inputs === "object"
+ ? Object.keys(state.inputs as Record)
: [];
- if (triggerKeys.length > 0) {
- parts.push(`Trigger keys: [${triggerKeys.join(", ")}].`);
+ if (flowInputKeys.length > 0) {
+ parts.push(`Flow input keys: [${flowInputKeys.join(", ")}].`);
}
parts.push(`State keys: [${Object.keys(state).join(", ")}].`);
return parts.join(" ");
diff --git a/src/core/serve.ts b/src/core/serve.ts
index 6eb9312..7600134 100644
--- a/src/core/serve.ts
+++ b/src/core/serve.ts
@@ -4,15 +4,16 @@ import * as path from "path";
import type { FlowDefinition, ServeConfig } from "./types.js";
import type { FlowRunner } from "./runner.js";
-// ---- Webhook Server -------------------------------------------------------------
-// Lightweight HTTP server for triggering flows via POST requests.
-// Follows the same serve.port + serve.path pattern as clawvoice.
+// ---- Flow Server ----------------------------------------------------------------
+// Lightweight HTTP server that runs flows on POST. Trigger semantics (webhooks,
+// cron, manual UI) live in the calling platform, not the flow format — this
+// server is a generic invocation endpoint.
//
// Endpoints:
-// POST /:basePath/:flowName/webhook — trigger a flow with JSON body as input
-// GET /:basePath/health — health check
+// POST /:basePath/:flowName/run — run a flow with the JSON body as inputs
+// GET /:basePath/health — health check
-export interface WebhookServerOpts {
+export interface FlowServerOpts {
runner: FlowRunner;
serve: ServeConfig;
logger?: {
@@ -81,7 +82,7 @@ function readBody(req: http.IncomingMessage): Promise {
});
}
-export function startWebhookServer(opts: WebhookServerOpts): http.Server {
+export function startFlowServer(opts: FlowServerOpts): http.Server {
if (activeServer) return activeServer;
const { runner, serve, logger } = opts;
@@ -103,11 +104,11 @@ export function startWebhookServer(opts: WebhookServerOpts): http.Server {
return;
}
- // Webhook trigger: POST /:basePath/:flowName/webhook
- const webhookPattern = new RegExp(
- `^${basePath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}/([a-zA-Z0-9_-]+)/webhook$`,
+ // Run a flow: POST /:basePath/:flowName/run
+ const runPattern = new RegExp(
+ `^${basePath.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}/([a-zA-Z0-9_-]+)/run$`,
);
- const match = pathname.match(webhookPattern);
+ const match = pathname.match(runPattern);
if (!match || req.method !== "POST") {
json(res, 404, { error: "Not found" });
@@ -123,20 +124,12 @@ export function startWebhookServer(opts: WebhookServerOpts): http.Server {
return;
}
- // Validate that the flow declares a webhook trigger
- if (!flowDef.trigger || flowDef.trigger.on !== "webhook") {
- json(res, 400, {
- error: `Flow "${flowName}" does not declare trigger.on: "webhook"`,
- });
- return;
- }
-
- // Parse request body
- let input: unknown = {};
+ // Parse request body — entire body becomes the flow's inputs payload.
+ let inputs: unknown = {};
const rawBody = await readBody(req);
if (rawBody) {
try {
- input = JSON.parse(rawBody);
+ inputs = JSON.parse(rawBody);
} catch {
json(res, 400, { error: "Invalid JSON body" });
return;
@@ -145,13 +138,13 @@ export function startWebhookServer(opts: WebhookServerOpts): http.Server {
// Fire-and-forget: start the flow, return immediately with instanceId
const instanceId = crypto.randomUUID();
- log.info(`[clawflow] webhook → ${flowName} (${instanceId})`);
+ log.info(`[clawflow] run → ${flowName} (${instanceId})`);
// Respond 202 before the flow runs
json(res, 202, { ok: true, instanceId, flow: flowName });
// Run asynchronously — don't block the response
- runner.run(flowDef, input, instanceId).then((result) => {
+ runner.run(flowDef, inputs, instanceId).then((result) => {
if (!result.ok) {
log.error(
`[clawflow] flow "${flowName}" (${instanceId}) failed: ${result.error ?? "unknown error"}`,
@@ -166,7 +159,7 @@ export function startWebhookServer(opts: WebhookServerOpts): http.Server {
});
} catch (err) {
log.error(
- `[clawflow] webhook error: ${err instanceof Error ? err.message : String(err)}`,
+ `[clawflow] run error: ${err instanceof Error ? err.message : String(err)}`,
);
json(res, 500, { error: "Internal server error" });
}
@@ -187,7 +180,7 @@ export function startWebhookServer(opts: WebhookServerOpts): http.Server {
server.listen(serve.port, () => {
log.info(
- `[clawflow] webhook server listening on :${serve.port}${basePath}/:flowName/webhook`,
+ `[clawflow] flow server listening on :${serve.port}${basePath}/:flowName/run`,
);
});
diff --git a/src/core/transpile.ts b/src/core/transpile.ts
index 2a081d0..448ddaa 100644
--- a/src/core/transpile.ts
+++ b/src/core/transpile.ts
@@ -122,7 +122,7 @@ function resolveTemplate(state: Record, template: string): stri
export class ${className} extends WorkflowEntrypoint {
async run(event: WorkflowEvent, step: WorkflowStep) {
- const state: Record = { trigger: event.payload };
+ const state: Record = { inputs: event.payload };
${nodeCode}
diff --git a/src/core/types.ts b/src/core/types.ts
index 9b2518e..91c3b32 100644
--- a/src/core/types.ts
+++ b/src/core/types.ts
@@ -13,16 +13,24 @@ export interface FlowDefinition {
flow: string; // unique name, e.g. "triage-support-ticket"
version?: string; // semver, e.g. "1.0.0"
description?: string;
- trigger?: FlowTrigger;
+ /**
+ * Declared inputs the flow expects. Optional: when omitted, the flow accepts
+ * any payload (anything-goes mode). When present, required inputs must be
+ * supplied at runtime or the flow fails before any node executes. Extra
+ * undeclared keys in the payload pass through and are reachable via
+ * {{ inputs.* }} but are not statically checked.
+ */
+ inputs?: Record;
/** Environment variables the flow expects. Values are defaults; null means required (runtime must provide). */
env?: Record;
nodes: FlowNode[];
}
-export interface FlowTrigger {
- on: "webhook" | "cron" | "manual" | "event" | string;
- from?: string; // source label e.g. "helpdesk"
- schedule?: string; // cron expression if on: cron
+export interface InputSpec {
+ type?: "string" | "number" | "boolean" | "object" | "array";
+ required?: boolean;
+ description?: string;
+ default?: unknown;
}
// ---- Retry Policy ---------------------------------------------------------------
@@ -265,7 +273,7 @@ export const NODE_KEYS: Record> = {
// ---- Runtime Types --------------------------------------------------------------
export interface FlowState {
- trigger?: unknown;
+ inputs?: unknown;
[key: string]: unknown;
}
@@ -377,7 +385,7 @@ export interface PluginConfig {
inferenceFn?: InferenceFn;
/** OpenClaw agent ID for do:agent nodes (e.g. "ops"). Falls back to --local if unset. */
defaultAgent?: string;
- /** Webhook server config — starts an HTTP server for triggering flows externally */
+ /** Optional HTTP server config — exposes a generic run endpoint per flow. */
serve?: ServeConfig;
}
diff --git a/src/core/validate.ts b/src/core/validate.ts
index 2bc91de..31a34c7 100644
--- a/src/core/validate.ts
+++ b/src/core/validate.ts
@@ -129,9 +129,9 @@ function validateNodes(
parentAvailable: Set,
errors: ValidationError[],
): void {
- // Available keys: trigger and env are always available + anything from parent scope
+ // Available keys: inputs and env are always available + anything from parent scope
const available = new Set(parentAvailable);
- available.add("trigger");
+ available.add("inputs");
available.add("env");
for (const node of nodes) {
diff --git a/src/index.ts b/src/index.ts
index 60476f6..d8d00c3 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -10,7 +10,7 @@ export type {
FlowState,
FlowResult,
FlowStatus,
- FlowTrigger,
+ InputSpec,
TraceEntry,
NodeStatus,
RetryPolicy,
@@ -33,6 +33,6 @@ export type {
ExecNode,
} from "./core/types.js";
export { parseDuration, MODEL_MAP, DEFAULT_MODEL, NODE_KEYS } from "./core/types.js";
-export { startWebhookServer } from "./core/serve.js";
-export type { WebhookServerOpts } from "./core/serve.js";
+export { startFlowServer } from "./core/serve.js";
+export type { FlowServerOpts } from "./core/serve.js";
export type { ServeConfig } from "./core/types.js";
diff --git a/src/plugin/index.ts b/src/plugin/index.ts
index a567e14..762a172 100644
--- a/src/plugin/index.ts
+++ b/src/plugin/index.ts
@@ -1,6 +1,6 @@
import { FlowRunner, sendEvent } from "../core/runner.js";
-import { startWebhookServer } from "../core/serve.js";
+import { startFlowServer } from "../core/serve.js";
import { validateFlow } from "../core/validate.js";
import type { FlowDefinition, FlowNode, PluginConfig, BranchNode, ConditionNode, LoopNode, ParallelNode } from "../core/types.js";
@@ -64,10 +64,10 @@ function register(api: PluginApi) {
const runner = new FlowRunner(pluginCfg);
const store = runner.getStore();
- // ---- Webhook server (optional) ------------------------------------------------
+ // ---- Flow server (optional) ---------------------------------------------------
// Skip when spawned as a child agent (CLAWFLOW_NO_SERVE) to avoid port conflicts.
if (pluginCfg.serve && !process.env.CLAWFLOW_NO_SERVE) {
- startWebhookServer({
+ startFlowServer({
runner,
serve: pluginCfg.serve,
logger: api.logger,
@@ -182,11 +182,11 @@ All nodes require "name" and "do". Templates: {{ outputKey.field }}.`,
items: { type: "object", additionalProperties: true },
description: "Array of node definitions",
},
- trigger: {
+ inputs: {
type: "object",
additionalProperties: true,
description:
- 'Trigger configuration: { on: "webhook"|"cron"|"manual"|"event", schedule?, from? }',
+ 'Declared inputs the flow expects. Map of name → { type?, required?, description?, default? }. Optional: when omitted, the flow accepts any payload. When present, required inputs are enforced before any node runs.',
},
env: {
type: "object",
@@ -207,7 +207,7 @@ All nodes require "name" and "do". Templates: {{ outputKey.field }}.`,
flow: string;
description?: string;
nodes: FlowNode[];
- trigger?: FlowDefinition["trigger"];
+ inputs?: FlowDefinition["inputs"];
env?: Record;
version?: string;
},
@@ -243,7 +243,7 @@ All nodes require "name" and "do". Templates: {{ outputKey.field }}.`,
flow: params.flow,
...(params.version && { version: params.version }),
...(params.description && { description: params.description }),
- ...(params.trigger && { trigger: params.trigger }),
+ ...(params.inputs && { inputs: params.inputs }),
...(params.env && { env: params.env }),
nodes: params.nodes,
};
@@ -470,13 +470,13 @@ directory. If the flow file already exists, the restore is rejected.`,
description: `Run an agentic workflow in the clawflow format.
State model:
- The "input" parameter becomes "trigger" in flow state.
- Flow state = { trigger, env?, ...nodeOutputs }.
+ The "input" parameter becomes "inputs" in flow state (i.e. state.inputs).
+ Flow state = { inputs, env?, ...nodeOutputs }.
Each node with "output" adds its result to state (e.g. output: "result" → state.result).
In code nodes: fn(input, state) — "input" is the resolved node.input field, "state" is the full flow state.
- IMPORTANT: trigger contains ALL initial data. If you need different parts of trigger in a code node,
- use object-style input: { "payload": "trigger.payload", "email": "trigger.email_to" }
- or access via state.trigger.field inside the code.
+ IMPORTANT: inputs contains ALL initial data. If you need different parts of inputs in a code node,
+ use object-style input: { "payload": "inputs.payload", "email": "inputs.email_to" }
+ or access via state.inputs.field inside the code.
Node types:
ai — LLM call, structured or freeform. Use schema: for typed output.
@@ -521,7 +521,7 @@ Set version to run a specific published version.`,
input: {
type: "object",
additionalProperties: true,
- description: "Input data, available as trigger.* in the flow",
+ description: "Input data, available as inputs.* in the flow (and at state.inputs in code nodes).",
},
draft: {
type: "boolean",
@@ -836,7 +836,7 @@ Status values: running | completed | paused | waiting | failed | cancelled`,
description: `List all saved flow definitions in the workspace.
Scans the flows directory for .json files and returns a summary of each flow
-including its name, description, trigger, version, node count, and file path.
+including its name, description, declared inputs, version, node count, and file path.
Use this to discover available flows before running or editing them.`,
parameters: {
@@ -896,7 +896,7 @@ Use this to discover available flows before running or editing them.`,
file: string;
flow: string;
description?: string;
- trigger?: unknown;
+ inputs?: FlowDefinition["inputs"];
expectedInputs?: string[];
publishedVersion?: number;
totalVersions?: number;
@@ -909,15 +909,15 @@ Use this to discover available flows before running or editing them.`,
const raw = fs.readFileSync(abs, "utf-8");
const def = JSON.parse(raw) as FlowDefinition;
if (!def.flow || !Array.isArray(def.nodes)) continue;
- const inputs = extractTriggerInputs(def.nodes);
+ const referenced = extractInputRefs(def.nodes);
const flowName = file.replace(/\.json$/, "");
const versions = listVersions(flowName);
flows.push({
file: abs,
flow: def.flow,
...(def.description && { description: def.description }),
- ...(def.trigger && { trigger: def.trigger }),
- ...(inputs.length > 0 && { expectedInputs: inputs }),
+ ...(def.inputs && { inputs: def.inputs }),
+ ...(referenced.length > 0 && { expectedInputs: referenced }),
...(versions.length > 0 && {
publishedVersion: versions[versions.length - 1],
totalVersions: versions.length,
@@ -954,12 +954,13 @@ Use this to discover available flows before running or editing them.`,
// ---- flow_read ----------------------------------------------------------------
/**
- * Recursively extract unique trigger.* paths from any string values in an
- * object tree. This tells agents what input fields a flow expects.
+ * Recursively extract unique inputs.* paths from any string values in an
+ * object tree. This tells agents what input fields a flow references — used
+ * when the flow has no declared inputs block to give a best-effort hint.
*/
- function extractTriggerInputs(obj: unknown): string[] {
+ function extractInputRefs(obj: unknown): string[] {
const paths = new Set();
- const re = /\{\{\s*trigger\.(\w+(?:\.\w+)*)/g;
+ const re = /\{\{\s*inputs\.(\w+(?:\.\w+)*)/g;
function walk(val: unknown): void {
if (typeof val === "string") {
@@ -1012,9 +1013,11 @@ Use this to discover available flows before running or editing them.`,
name: "flow_read",
description: `Read a flow definition from file and return its contents.
-Returns the full flow definition (or a single node if specified) along with
-the list of expected trigger inputs extracted from templates. Use this to
-inspect a flow before running it or to understand what inputs it needs.
+Returns the full flow definition (or a single node if specified). The response
+includes the declared "inputs" block when present, plus a best-effort list of
+input fields referenced by templates (extracted from {{ inputs.* }} usages).
+Use this to inspect a flow before running it or to understand what inputs it
+needs.
Versioning: by default reads the draft (working copy). Set version to read
a specific published version. The response includes available version numbers.`,
@@ -1090,7 +1093,7 @@ a specific published version. The response includes available version numbers.`,
source = "draft";
}
- const inputs = extractTriggerInputs(flowDef.nodes);
+ const referenced = extractInputRefs(flowDef.nodes);
// Single node mode
if (params.node) {
@@ -1105,7 +1108,7 @@ a specific published version. The response includes available version numbers.`,
],
};
}
- const nodeInputs = extractTriggerInputs(found);
+ const nodeInputs = extractInputRefs(found);
const result = {
flow: flowDef.flow,
_source: source,
@@ -1126,7 +1129,7 @@ a specific published version. The response includes available version numbers.`,
_source: source,
_file: abs,
...(versions.length > 0 && { _versions: versions }),
- ...(inputs.length > 0 && { _expectedInputs: inputs }),
+ ...(referenced.length > 0 && { _expectedInputs: referenced }),
};
return {
content: [{ type: "text", text: JSON.stringify(result, null, 2) }],
@@ -1242,7 +1245,7 @@ modify the draft without affecting published versions.`,
description: `Edit nodes in a clawflow definition. Operates on a file or inline flow.
Actions:
- set — set top-level flow fields (description, trigger, env, version)
+ set — set top-level flow fields (description, inputs, env, version)
update — update a node entirely or patch specific fields by node name
add — insert a new node at a position (default: end)
remove — remove a node by name
@@ -1265,7 +1268,7 @@ edit is rejected and errors are returned. For file-based flows, the file is
overwritten with the updated definition on success.
Examples:
- Set flow fields: { action: "set", fields: { description: "New desc", trigger: { on: "webhook" } } }
+ Set flow fields: { action: "set", fields: { description: "New desc", inputs: { ticket_id: { type: "string", required: true } } } }
Update one field: { action: "update", node: "classify", fields: { prompt: "New prompt" } }
Replace full node: { action: "update", node: "classify", replace: { name: "classify", do: "ai", prompt: "..." } }
Add at position: { action: "add", position: 2, nodeDefinition: { name: "step3", do: "code", run: "..." } }
@@ -1307,7 +1310,7 @@ Examples:
fields: {
type: "object",
additionalProperties: true,
- description: "For action=set: top-level flow fields to set (description, trigger, env, version). For action=update: partial field updates to merge into the node.",
+ description: "For action=set: top-level flow fields to set (description, inputs, env, version). For action=update: partial field updates to merge into the node.",
},
replace: {
type: "object",
@@ -1575,7 +1578,7 @@ Examples:
if (!params.fields)
return fail('action "set" requires "fields".');
- const allowed = ["description", "trigger", "env", "version"];
+ const allowed = ["description", "inputs", "env", "version"];
const backup = { ...flowDef };
for (const [key, value] of Object.entries(params.fields)) {
diff --git a/tests/core.test.ts b/tests/core.test.ts
index 614cedc..78f8715 100644
--- a/tests/core.test.ts
+++ b/tests/core.test.ts
@@ -36,7 +36,7 @@ describe("FlowRunner — code node", () => {
const flow: FlowDefinition = {
flow: "test-code",
nodes: [
- { name: "double", do: "code" as const, run: "input * 2", input: "trigger.x", output: "result" },
+ { name: "double", do: "code" as const, run: "input * 2", input: "inputs.x", output: "result" },
],
};
const runner = new FlowRunner(cfg);
@@ -50,7 +50,7 @@ describe("FlowRunner — code node", () => {
const flow: FlowDefinition = {
flow: "test-code-state",
nodes: [
- { name: "greet", do: "code" as const, run: "`Hello ${state.trigger.name}`", input: "trigger", output: "greeting" },
+ { name: "greet", do: "code" as const, run: "`Hello ${state.inputs.name}`", input: "inputs", output: "greeting" },
],
};
const runner = new FlowRunner(cfg);
@@ -62,7 +62,7 @@ describe("FlowRunner — code node", () => {
const flow: FlowDefinition = {
flow: "test-code-multi",
nodes: [
- { name: "calc", do: "code" as const, run: "const x = input + 1;\nreturn x * 2;", input: "trigger.x", output: "result" },
+ { name: "calc", do: "code" as const, run: "const x = input + 1;\nreturn x * 2;", input: "inputs.x", output: "result" },
],
};
const runner = new FlowRunner(cfg);
@@ -75,7 +75,7 @@ describe("FlowRunner — code node", () => {
const flow: FlowDefinition = {
flow: "test-code-semi",
nodes: [
- { name: "calc", do: "code" as const, run: "const a = input.x; const b = input.y; return a + b;", input: "trigger", output: "result" },
+ { name: "calc", do: "code" as const, run: "const a = input.x; const b = input.y; return a + b;", input: "inputs", output: "result" },
],
};
const runner = new FlowRunner(cfg);
@@ -88,7 +88,7 @@ describe("FlowRunner — code node", () => {
const flow: FlowDefinition = {
flow: "test-code-frozen-state",
nodes: [
- { name: "mutate", do: "code" as const, run: "state.trigger.x = 999; return 1;", input: "trigger", output: "result" },
+ { name: "mutate", do: "code" as const, run: "state.inputs.x = 999; return 1;", input: "inputs", output: "result" },
],
};
const runner = new FlowRunner(cfg);
@@ -102,7 +102,7 @@ describe("FlowRunner — code node", () => {
const flow: FlowDefinition = {
flow: "test-code-frozen-input",
nodes: [
- { name: "mutate", do: "code" as const, run: "input.x = 999; return 1;", input: "trigger", output: "result" },
+ { name: "mutate", do: "code" as const, run: "input.x = 999; return 1;", input: "inputs", output: "result" },
],
};
const runner = new FlowRunner(cfg);
@@ -171,7 +171,7 @@ describe("FlowRunner — code node diagnostics", () => {
name: "bad_access",
do: "code" as const,
run: "input.email_to",
- input: "trigger.payload",
+ input: "inputs.payload",
output: "result",
},
],
@@ -185,7 +185,7 @@ describe("FlowRunner — code node diagnostics", () => {
assert.match(result.error!, /email_to/);
assert.match(result.error!, /Input keys:/);
assert.match(result.error!, /client/);
- assert.match(result.error!, /state\.trigger\.email_to/);
+ assert.match(result.error!, /state\.inputs\.email_to/);
});
it("catches chained access on missing property via Proxy", async () => {
@@ -196,7 +196,7 @@ describe("FlowRunner — code node diagnostics", () => {
name: "bad_chain",
do: "code" as const,
run: "input.nested.deep",
- input: "trigger.payload",
+ input: "inputs.payload",
output: "result",
},
],
@@ -220,7 +220,7 @@ describe("FlowRunner — code node diagnostics", () => {
name: "good_access",
do: "code" as const,
run: "input.client",
- input: "trigger.payload",
+ input: "inputs.payload",
output: "result",
},
],
@@ -258,7 +258,7 @@ describe("FlowRunner — exec node", () => {
const flow: FlowDefinition = {
flow: "test-exec-template",
nodes: [
- { name: "run-it", do: "exec" as const, command: "echo '{{ trigger.msg }}'", output: "result" },
+ { name: "run-it", do: "exec" as const, command: "echo '{{ inputs.msg }}'", output: "result" },
],
};
const runner = new FlowRunner(cfg);
@@ -463,7 +463,7 @@ describe("FlowRunner — condition node", () => {
nodes: [
{
name: "check", do: "condition" as const,
- if: "trigger.user.role == 'admin'",
+ if: "inputs.user.role == 'admin'",
then: [
{ name: "admin-msg", do: "code" as const, run: "'admin panel'", output: "view" },
],
@@ -492,7 +492,7 @@ describe("FlowRunner — loop node", () => {
flow: "test-loop",
nodes: [
{
- name: "process", do: "loop" as const, over: "trigger.items", as: "item",
+ name: "process", do: "loop" as const, over: "inputs.items", as: "item",
nodes: [
{ name: "transform", do: "code" as const, run: "input.toUpperCase()", input: "item", output: "transformed" },
],
@@ -514,7 +514,7 @@ describe("FlowRunner — loop node", () => {
flow: "test-loop-wildcard",
nodes: [
{
- name: "process_sheets", do: "loop" as const, over: "trigger.sheets", as: "sheet",
+ name: "process_sheets", do: "loop" as const, over: "inputs.sheets", as: "sheet",
nodes: [
{ name: "build-path", do: "code" as const, run: "`/output/foglio_${state.sheet.type}.pdf`", output: "pdfPath" },
],
@@ -570,7 +570,7 @@ describe("FlowRunner — wait for approval (enhanced)", () => {
{ name: "prep", do: "code" as const, run: "({ files: ['/a.pdf', '/b.pdf'] })", output: "data" },
{
name: "review", do: "wait" as const, for: "approval",
- prompt: "Review files for {{ trigger.client }}",
+ prompt: "Review files for {{ inputs.client }}",
preview: "data.files",
output: "approval",
},
@@ -716,7 +716,7 @@ describe("FlowRunner — memory node", () => {
const flow: FlowDefinition = {
flow: "test-memory",
nodes: [
- { name: "save", do: "memory" as const, action: "write", key: "test-key", value: "{{ trigger.data }}" },
+ { name: "save", do: "memory" as const, action: "write", key: "test-key", value: "{{ inputs.data }}" },
{ name: "load", do: "memory" as const, action: "read", key: "test-key", output: "loaded" },
],
};
@@ -847,7 +847,7 @@ describe("template filters", () => {
after(cleanup);
const state = {
- trigger: { body: "hello world" },
+ inputs: { body: "hello world" },
plan: { title: "My Plan", tags: ["a", "b", "c"] },
data: { count: 42, nested: { x: 1, y: 2 }, text: " padded " },
};
@@ -1132,11 +1132,11 @@ describe("validateFlow", () => {
assert.ok(result.error?.includes("Validation failed"));
});
- it("allows trigger refs without prior nodes", () => {
+ it("allows inputs refs without prior nodes", () => {
const flow: FlowDefinition = {
- flow: "trigger-ref",
+ flow: "inputs-ref",
nodes: [
- { name: "greet", do: "ai" as const, prompt: "Hello {{ trigger.name }}", output: "msg" },
+ { name: "greet", do: "ai" as const, prompt: "Hello {{ inputs.name }}", output: "msg" },
],
};
const result = validateFlow(flow);
@@ -1159,7 +1159,7 @@ describe("validateFlow", () => {
const flow: FlowDefinition = {
flow: "exec-ref",
nodes: [
- { name: "run", do: "exec" as const, command: "echo '{{ trigger.x }}'", output: "out" },
+ { name: "run", do: "exec" as const, command: "echo '{{ inputs.x }}'", output: "out" },
],
};
const result = validateFlow(flow);
@@ -1395,7 +1395,7 @@ describe("attachments — unit tests", () => {
name: "analyze",
do: "ai" as const,
prompt: "Analyze",
- attachments: ["{{ trigger.imgPath }}"],
+ attachments: ["{{ inputs.imgPath }}"],
output: "result",
},
],
@@ -1469,7 +1469,7 @@ describe("attachments — unit tests", () => {
name: "analyze",
do: "ai" as const,
prompt: "Check",
- attachments: ["{{ trigger.path }}"],
+ attachments: ["{{ inputs.path }}"],
output: "result",
},
],