Skip to content

Commit 7351675

Browse files
Merge pull request aden-hive#222 from Chrishabh2002/feat/manual-agent-codefirst
Add minimal code-first agent example and isolate core dependencies
2 parents fa5d505 + 715df54 commit 7351675

7 files changed

Lines changed: 166 additions & 7 deletions

File tree

.gitignore

98 Bytes
Binary file not shown.

core/examples/manual_agent.py

Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,122 @@
1+
"""
2+
Minimal Manual Agent Example
3+
----------------------------
4+
This example demonstrates how to build and run an agent programmatically
5+
without using the Claude Code CLI or external LLM APIs.
6+
7+
It uses 'function' nodes to define logic in pure Python, making it perfect
8+
for understanding the core runtime loop:
9+
Setup -> Graph definition -> Execution -> Result
10+
11+
Run with:
12+
PYTHONPATH=core python core/examples/manual_agent.py
13+
"""
14+
15+
import asyncio
16+
import logging
17+
from framework.graph import Goal, NodeSpec, EdgeSpec, GraphSpec, EdgeCondition
18+
from framework.graph.executor import GraphExecutor
19+
from framework.runtime.core import Runtime
20+
21+
# 1. Define Node Logic (Pure Python Functions)
22+
def greet(name: str) -> str:
23+
"""Generate a simple greeting."""
24+
return f"Hello, {name}!"
25+
26+
def uppercase(greeting: str) -> str:
27+
"""Convert text to uppercase."""
28+
return greeting.upper()
29+
30+
async def main():
31+
print("🚀 Setting up Manual Agent...")
32+
33+
# 2. Define the Goal
34+
# Every agent needs a goal with success criteria
35+
goal = Goal(
36+
id="greet-user",
37+
name="Greet User",
38+
description="Generate a friendly uppercase greeting",
39+
success_criteria=[
40+
{
41+
"id": "greeting_generated",
42+
"description": "Greeting produced",
43+
"metric": "custom",
44+
"target": "any"
45+
}
46+
]
47+
)
48+
49+
# 3. Define Nodes
50+
# Nodes describe steps in the process
51+
node1 = NodeSpec(
52+
id="greeter",
53+
name="Greeter",
54+
description="Generates a simple greeting",
55+
node_type="function",
56+
function="greet", # Matches the registered function name
57+
input_keys=["name"],
58+
output_keys=["greeting"]
59+
)
60+
61+
node2 = NodeSpec(
62+
id="uppercaser",
63+
name="Uppercaser",
64+
description="Converts greeting to uppercase",
65+
node_type="function",
66+
function="uppercase",
67+
input_keys=["greeting"],
68+
output_keys=["final_greeting"]
69+
)
70+
71+
# 4. Define Edges
72+
# Edges define the flow between nodes
73+
edge1 = EdgeSpec(
74+
id="greet-to-upper",
75+
source="greeter",
76+
target="uppercaser",
77+
condition=EdgeCondition.ON_SUCCESS
78+
)
79+
80+
# 5. Create Graph
81+
# The graph works like a blueprint connecting nodes and edges
82+
graph = GraphSpec(
83+
id="greeting-agent",
84+
goal_id="greet-user",
85+
entry_node="greeter",
86+
terminal_nodes=["uppercaser"],
87+
nodes=[node1, node2],
88+
edges=[edge1],
89+
)
90+
91+
# 6. Initialize Runtime & Executor
92+
# Runtime handles state/memory; Executor runs the graph
93+
from pathlib import Path
94+
runtime = Runtime(storage_path=Path("./agent_logs"))
95+
executor = GraphExecutor(runtime=runtime)
96+
97+
# 7. Register Function Implementations
98+
# Connect string names in NodeSpecs to actual Python functions
99+
executor.register_function("greeter", greet)
100+
executor.register_function("uppercaser", uppercase)
101+
102+
# 8. Execute Agent
103+
print(f"▶ Executing agent with input: name='Alice'...")
104+
105+
result = await executor.execute(
106+
graph=graph,
107+
goal=goal,
108+
input_data={"name": "Alice"}
109+
)
110+
111+
# 9. Verify Results
112+
if result.success:
113+
print("\n✅ Success!")
114+
print(f"Path taken: {' -> '.join(result.path)}")
115+
print(f"Final output: {result.output.get('final_greeting')}")
116+
else:
117+
print(f"\n❌ Failed: {result.error}")
118+
119+
if __name__ == "__main__":
120+
# Optional: Enable logging to see internal decision flow
121+
# logging.basicConfig(level=logging.INFO)
122+
asyncio.run(main())

core/framework/graph/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
from framework.graph.goal import Goal, SuccessCriterion, Constraint, GoalStatus
44
from framework.graph.node import NodeSpec, NodeContext, NodeResult, NodeProtocol
5-
from framework.graph.edge import EdgeSpec, EdgeCondition
5+
from framework.graph.edge import EdgeSpec, EdgeCondition, GraphSpec
66
from framework.graph.executor import GraphExecutor
77

88
# Flexible execution (Worker-Judge pattern)
@@ -42,6 +42,7 @@
4242
# Edge
4343
"EdgeSpec",
4444
"EdgeCondition",
45+
"GraphSpec",
4546
# Executor (fixed graph)
4647
"GraphExecutor",
4748
# Plan (flexible execution)

core/framework/graph/node.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1123,9 +1123,13 @@ async def execute(self, ctx: NodeContext) -> NodeResult:
11231123
)
11241124

11251125
# Write to output keys
1126-
output = {"result": result}
1126+
output = {}
11271127
if ctx.node_spec.output_keys:
1128-
ctx.memory.write(ctx.node_spec.output_keys[0], result)
1128+
key = ctx.node_spec.output_keys[0]
1129+
output[key] = result
1130+
ctx.memory.write(key, result)
1131+
else:
1132+
output = {"result": result}
11291133

11301134
return NodeResult(success=True, output=output, latency_ms=latency_ms)
11311135

core/framework/llm/__init__.py

Lines changed: 13 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,17 @@
11
"""LLM provider abstraction."""
22

33
from framework.llm.provider import LLMProvider, LLMResponse
4-
from framework.llm.anthropic import AnthropicProvider
5-
from framework.llm.litellm import LiteLLMProvider
64

7-
__all__ = ["LLMProvider", "LLMResponse", "AnthropicProvider", "LiteLLMProvider"]
5+
__all__ = ["LLMProvider", "LLMResponse"]
6+
7+
try:
8+
from framework.llm.anthropic import AnthropicProvider
9+
__all__.append("AnthropicProvider")
10+
except ImportError:
11+
pass
12+
13+
try:
14+
from framework.llm.litellm import LiteLLMProvider
15+
__all__.append("LiteLLMProvider")
16+
except ImportError:
17+
pass

core/framework/llm/litellm.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,10 @@
1010
import json
1111
from typing import Any
1212

13-
import litellm
13+
try:
14+
import litellm
15+
except ImportError:
16+
litellm = None
1417

1518
from framework.llm.provider import LLMProvider, LLMResponse, Tool, ToolUse
1619

@@ -72,6 +75,11 @@ def __init__(
7275
self.api_base = api_base
7376
self.extra_kwargs = kwargs
7477

78+
if litellm is None:
79+
raise ImportError(
80+
"LiteLLM is not installed. Please install it with: pip install litellm"
81+
)
82+
7583
def complete(
7684
self,
7785
messages: list[dict[str, Any]],

docs/getting-started.md

Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,20 @@ cd exports/my_agent
5959
PYTHONPATH=core:exports python -m my_agent validate
6060
```
6161

62+
### Option 3: Manual Code-First (Minimal Example)
63+
64+
If you prefer to start with code rather than CLI wizards, check out the manual agent example:
65+
66+
```bash
67+
# View the minimal example
68+
cat core/examples/manual_agent.py
69+
70+
# Run it (no API keys required)
71+
PYTHONPATH=core python core/examples/manual_agent.py
72+
```
73+
74+
This demonstrates the core runtime loop using pure Python functions, skipping the complexity of LLM setup and file-based configuration.
75+
6276
## Project Structure
6377

6478
```

0 commit comments

Comments
 (0)