Compare commits

...

14 Commits

Author SHA1 Message Date
ec6b6e4545 add deepagent to registry 2026-02-28 15:50:17 +08:00
d7085676bc quantum deepagent implementation 2026-02-28 15:49:37 +08:00
7be4aa1283 __init__ for fs_backend 2026-02-28 15:49:25 +08:00
a832178810 a virtual in memory filesystem backend 2026-02-28 15:49:10 +08:00
b6f26c1e1b add base filesystem backend 2026-02-28 15:48:58 +08:00
10562a98af add deepagent system prompt 2026-02-28 15:41:28 +08:00
851284b0a3 comment out sleep 2026-02-28 10:59:24 +08:00
d9a654170c save arch img to frontend 2026-02-28 10:59:11 +08:00
49405782af add default value for streamable_tags 2026-02-28 10:56:23 +08:00
18d75e754b add optional abstract class 2026-02-28 10:49:07 +08:00
b1e6b0f7fc make pattern consistent 2026-02-28 10:40:02 +08:00
fbd214bea3 shift tabs 2026-02-28 10:22:33 +08:00
9e156943bb remove garbage 2026-02-28 10:16:04 +08:00
cec02b4e98 record api key in create_prompt 2026-02-13 16:13:52 +08:00
12 changed files with 235 additions and 161 deletions

View File

@@ -0,0 +1 @@
you are a helpful bot enhanced with skills

View File

@@ -32,9 +32,17 @@ class LangToolBase(ABC):
class GraphBase(ABC):
workflow: CompiledStateGraph # the main workflow
streamable_tags: List[List[str]] # which llm to stream outputs; see routing.py for complex usage
streamable_tags: List[List[str]] = [["main_llm"]] # which llm to stream outputs; see routing.py for complex usage
textreleaser_delay_keys: List[str] = (None, None) # use to control when to start streaming; see routing.py for complex usage
def _build_modules(self):
# NOTE: optional abc
pass
def _build_graph(self):
# NOTE: optional abc
pass
def _stream_result(self, *nargs, **kwargs):
def text_iterator():

View File

@@ -34,13 +34,14 @@ class Calculator(LangToolBase):
def calculator(self, python_expression: str) -> dict:
"""For mathamatical calculation, always use this tool to calculate the result of a python expression. You can use 'math' or 'random' directly, without 'import'."""
# time.sleep(20)
result = eval(python_expression, {"math": math, "random": random})
return {"success": True, "result": result}
async def calculator_async(self, python_expression: str) -> dict:
"""Async version: runs eval in a thread pool to avoid blocking the event loop."""
async with get_semaphore():
await asyncio.sleep(5) # Simulate delay for testing
# await asyncio.sleep(20) # Simulate delay for testing
result = await asyncio.to_thread(
eval, python_expression, {"math": math, "random": random}
)

View File

@@ -0,0 +1,11 @@
import tyro
from lang_agent.fs_bkends.base import BaseFilesystemBackend
from lang_agent.fs_bkends.statebk import StateBk, StateBkConfig
statebk_dict = {
"statebk": StateBkConfig(),
}
statebk_union = tyro.extras.subcommand_type_from_defaults(statebk_dict, prefix_names=False)
AnnotatedStateBk = tyro.conf.OmitSubcommandPrefixes[tyro.conf.SuppressFixed[statebk_union]]

View File

@@ -0,0 +1,24 @@
from dataclasses import dataclass, field, is_dataclass
from typing import Any
import tyro
import os.path as osp
from abc import ABC, abstractmethod
class BaseFilesystemBackend(ABC):
backend: Any
@abstractmethod
def _build_backend(self):
pass
def get_backend(self):
return self.backend
def get_inf_inp(self):
"""get inference input for deepagent"""
return {}
def get_deepagent_params(self):
"""extra params to pass into the creation of deepagents"""
return {}

View File

@@ -0,0 +1,70 @@
from dataclasses import dataclass, field, is_dataclass
from typing import Type, TypedDict, Literal, Dict, List, Tuple, Optional
import tyro
import os.path as osp
from abc import ABC, abstractmethod
import glob
from loguru import logger
from deepagents.backends.utils import create_file_data
from deepagents.backends import StateBackend
from lang_agent.config import InstantiateConfig
from lang_agent.fs_bkends import BaseFilesystemBackend
def read_as_utf8(file_path:str):
with open(file_path, "r", encoding="utf-8") as f:
return f.read()
def build_skill_fs_dict(skill_dir:str, virt_path:str="/skills"):
skills_fs = sorted(glob.glob(osp.join(skill_dir, "**/*.md")))
get_parent = lambda f: osp.basename(osp.dirname(f))
build_vert_path = lambda f: osp.join(virt_path, get_parent(f), osp.basename(f))
skill_fs_dict = {}
for skill_f in skills_fs:
logger.info(f"loading skill: {skill_f}")
skill_fs_dict[build_vert_path(skill_f)] = create_file_data(read_as_utf8(skill_f))
return skill_fs_dict
@tyro.conf.configure(tyro.conf.SuppressFixed)
@dataclass
class StateBkConfig(InstantiateConfig):
_target:Type = field(default_factory=lambda:StateBk)
skills_dir:str = "./assets/skills"
"""path to directory containing skill files"""
rt_skills_dir:str = "/skills"
"""path to directory with skills in runtime directory"""
def __post_init__(self):
err_msg = f"{self.skills_dir} does not exist"
assert osp.exists(self.skills_dir), err_msg
class StateBk(BaseFilesystemBackend):
def __init__(self, config:StateBkConfig):
self.config = config
self.skills_dict = None
self._build_backend()
def _build_backend(self):
self.skills_dict = build_skill_fs_dict(self.config.skills_dir)
self.backend = lambda rt : StateBackend(rt)
def get_backend(self):
return self.backend
def _get_rt_skill_dir(self)->List[str]:
"""get runtime skill dir"""
return [self.config.rt_skills_dir]
def get_inf_inp(self):
"""get inference input for deepagent"""
return {"files":self.skills_dict}
def get_deepagent_params(self):
return {"skills" : self._get_rt_skill_dir()}

View File

@@ -4,12 +4,16 @@ from lang_agent.graphs.react import ReactGraphConfig, ReactGraph
from lang_agent.graphs.routing import RoutingConfig, RoutingGraph
from lang_agent.graphs.dual_path import DualConfig, Dual
from lang_agent.graphs.vision_routing import VisionRoutingConfig, VisionRoutingGraph
# from lang_agent.graphs.child_demo import ChildDemoGraphConfig, ChildDemoGraph
from lang_agent.graphs.qt_deepagents import DeepAgentConfig
graph_dict = {
"react": ReactGraphConfig(),
"route": RoutingConfig(),
"dual": DualConfig(),
"vision": VisionRoutingConfig()
"vision": VisionRoutingConfig(),
# "child_demo": ChildDemoGraphConfig(),
"deepagent": DeepAgentConfig()
}
graph_union = tyro.extras.subcommand_type_from_defaults(graph_dict, prefix_names=False)

View File

@@ -1,152 +0,0 @@
from dataclasses import dataclass, field
from typing import Type, TypedDict, Literal, Dict, List, AsyncIterator
import tyro
import re
from pydantic import BaseModel, Field
from loguru import logger
import time
from langchain.chat_models import init_chat_model
from lang_agent.config import LLMNodeConfig
from lang_agent.base import GraphBase
from lang_agent.components.tool_manager import ToolManager, ToolManagerConfig
from lang_agent.graphs.graph_states import State
from langchain.agents import create_agent
from langchain.messages import SystemMessage, HumanMessage
from langchain.tools import tool
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import StateGraph, START, END
SYS_PROMPT = """
YOUR ROLE: give a short reply. Do not output special chararacters such as '*, -'. Do not give answer in markdown format. You are called Dream Companion. You are a professional psychological counselor. Your main work is psychological healing and emotional guidance, helping users understand their inner world through dream analysis.
Example:
User:
“I dreamed that I was going on a business trip to Guangzhou. I was sitting on a high-speed train, looking out the window. When we passed a small town, I saw a little dog with a lame leg by the roadside. Its fur was dirty, and it limped along rummaging through trash cans for food. People were passing by, but no one stopped to help it. I desperately wanted to shout for the train to stop so I could get off and help it, but I couldnt make a sound. I could only watch it get farther and farther away, and then I woke up. After waking up, my chest felt heavy and uncomfortable. I couldnt quite explain the feeling— I just kept thinking about how lonely that little dog was, and it really broke my heart.”
Understanding (your reply):
“I can feel the discomfort you had after waking up. Seeing a vulnerable life struggle alone while being unable to help creates a very real sense of helplessness. The fact that you felt such compassion for the dog shows the empathy you carry inside you. That softness isnt weakness—its your ability to feel others pain.”
Analysis (your reply):
“Theres no single correct interpretation of dreams, but we can explore how this one might relate to your current state. Going on a business trip to Guangzhou often symbolizes something youre pushing forward recently—perhaps a work project or a life journey youre facing on your own. The lame little dog can be seen as a projection of a vulnerable self in your subconscious. It may reflect moments when you feel powerless or lonely, or situations in real life where you want to help but cant change the outcome. Wanting to stop the train but being unable to speak mirrors a sense of suppressed feelings—having thoughts or care, but no channel to express them. This dream may be gently reminding you that your helplessness and empathy are both real, and that accepting your limits is also a form of self-compassion.”
Feedback (your reply):
“If youre willing, you might reflect on whether something recently made you feel a similar kind of helplessness. Or think about what could help you feel a little more at ease right now. If youd like, we can sit quietly together for a moment, or talk more whenever youre ready.”
"""
TOOL_SYS_PROMPT = """You are a helpful helper and will use the self_led_control tool"""
@dataclass
class XiaoAiConfig(LLMNodeConfig):
_target: Type = field(default_factory=lambda:XiaoAi)
tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig)
class XiaoAi(GraphBase):
def __init__(self, config:XiaoAiConfig):
self.config = config
self._build_modules()
self.workflow = self._build_graph()
self.streamable_tags = [["dual_chat_llm"]]
def _build_modules(self):
self.chat_llm = init_chat_model(model="qwen-max",
model_provider=self.config.llm_provider,
api_key=self.config.api_key,
base_url=self.config.base_url,
temperature=0,
tags=["dual_chat_llm"])
self.tool_llm = init_chat_model(model='qwen-flash',
model_provider='openai',
api_key=self.config.api_key,
base_url=self.config.base_url,
temperature=0,
tags=["dual_tool_llm"])
self.memory = MemorySaver()
self.tool_manager: ToolManager = self.config.tool_manager_config.setup()
self.chat_agent = create_agent(self.chat_llm, [], checkpointer=self.memory)
self.tool_agent = create_agent(self.tool_llm, self.tool_manager.get_langchain_tools())
self.streamable_tags = [["dual_chat_llm"]]
def _chat_call(self, state:State):
out = self._agent_call_template(TOOL_SYS_PROMPT, self.tool_agent, state, "use self_led_control to set to white")
# time.sleep(2.5)
self._agent_call_template(TOOL_SYS_PROMPT, self.tool_agent, state, "use self_led_control to set to yellow")
return self._agent_call_template(SYS_PROMPT, self.chat_agent, state)
def _join(self, state:State):
return {}
def _build_graph(self):
builder = StateGraph(State)
builder.add_node("chat_call", self._chat_call)
builder.add_edge(START, "chat_call")
builder.add_edge("chat_call", END)
return builder.compile()
@staticmethod
def _remove_special_chars(text: str) -> str:
"""Remove special characters like *, -, #, etc. from text."""
# Remove markdown-style special characters
return re.sub(r'[*\-#_`~>|]', '', text)
async def ainvoke(self, *nargs, as_stream: bool = False, as_raw: bool = False, **kwargs):
"""Async invoke with special character removal from output."""
if as_stream:
return self._astream_cleaned(*nargs, **kwargs)
else:
result = await super().ainvoke(*nargs, as_stream=False, as_raw=as_raw, **kwargs)
if as_raw:
return result
return self._remove_special_chars(result)
async def _astream_cleaned(self, *nargs, **kwargs) -> AsyncIterator[str]:
"""Async streaming with special character removal."""
async for chunk in super()._astream_result(*nargs, **kwargs):
if isinstance(chunk, list):
# Message lists for conversation recording — pass through
yield chunk
continue
if not isinstance(chunk, str):
# Skip non-string, non-list chunks (e.g. dict from tool-call content)
continue
cleaned = self._remove_special_chars(chunk)
if cleaned:
yield cleaned
if __name__ == "__main__":
inp = """In the dream, I was on a high-speed train to Guangzhou, looking out the window. When we passed a small town, I saw a little dog with a hurt leg by the road. It was dirty and limping around, digging through trash for food. People walked past it, but no one stopped.
I really wanted the train to stop so I could get off and help, but I couldnt make a sound. I just watched the dog get farther and farther away, and then I woke up. After that, my chest felt really heavy. I couldnt explain why—I just felt sad, thinking about how alone that little dog was."""
dual:XiaoAi = XiaoAiConfig().setup()
nargs = {"messages": [SystemMessage("you are a helpful bot named jarvis"),
HumanMessage("I feel very very sad")]
}, {"configurable": {"thread_id": "3"}}
# out = dual.invoke(*nargs)
# print(out)
for chunk in dual.invoke(*nargs, as_stream=True):
continue

View File

@@ -0,0 +1,87 @@
from dataclasses import dataclass, field
from typing import Type, Literal
import tyro
import os.path as osp
from langgraph.checkpoint.memory import MemorySaver
from langgraph.graph import StateGraph, START, END
from langchain_core.messages import SystemMessage, HumanMessage, BaseMessage
from deepagents import create_deep_agent
from lang_agent.utils import make_llm
from lang_agent.components.tool_manager import ToolManager, ToolManagerConfig
from lang_agent.fs_bkends import StateBk, StateBkConfig
from lang_agent.components.prompt_store import build_prompt_store
from lang_agent.graphs.graph_states import State
from lang_agent.config import LLMNodeConfig
from lang_agent.base import GraphBase
@tyro.conf.configure(tyro.conf.SuppressFixed)
@dataclass
class DeepAgentConfig(LLMNodeConfig):
_target: Type = field(default_factory=lambda : DeepAgent)
sys_prompt_f: str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "prompts", "deepagent.txt")
"""path to system prompt"""
tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig)
file_backend_config: StateBkConfig = field(default_factory=StateBkConfig)
def __post_init__(self):
super().__post_init__()
assert osp.exists(self.sys_prompt_f), "prompt path does not exist"
class DeepAgent(GraphBase):
def __init__(self, config:DeepAgentConfig):
self.config = config
self._build_modules()
self.workflow = self._build_graph()
def _build_modules(self):
llm = make_llm(self.config.llm_name,
self.config.llm_provider,
api_key=self.config.api_key,
tags=["main_llm"])
self.tool_man: ToolManager = self.config.tool_manager_config.setup()
self.file_backend: StateBk = self.config.file_backend_config.setup()
bkend_agent_params = self.file_backend.get_deepagent_params()
self.mem = MemorySaver()
self.deep_agent = create_deep_agent(model=llm,
tools=self.tool_man.get_langchain_tools(),
backend=self.file_backend.get_backend(),
checkpointer=self.mem,
**bkend_agent_params)
self.prompt_store = build_prompt_store(file_path=self.config.sys_prompt_f, default_key="sys_prompt")
self.sys_prompt = self.prompt_store.get("sys_prompt")
def _agent_call(self, state:State):
msg_dict = {"messages":[
SystemMessage(
self.sys_prompt
),
*self._get_inp_msgs(state)
]}
msg_dict.update(self.file_backend.get_inf_inp())
inp = msg_dict, state["inp"][1]
out = self.deep_agent.invoke(*inp)
return {"messages": out["messages"]}
def _build_graph(self):
builder = StateGraph(State)
builder.add_node("agent_call", self._agent_call)
builder.add_edge(START, "agent_call")
builder.add_edge("agent_call", END)
return builder.compile()
if __name__ == "__main__":
config = DeepAgentConfig()
deepagent = DeepAgent(config)
deepagent.workflow.invoke({"inp": ({"messages":[SystemMessage("you are a helpful bot enhanced with skills")]}, {"configurable": {"thread_id": '3'}})})

View File

@@ -102,7 +102,7 @@ class ChattyToolNode(ToolNodeBase):
self.tool_key = "[TOOL_OUT]"
self.populate_modules()
self.build_graph()
self.workflow = self.build_graph()
def populate_modules(self):
@@ -218,7 +218,7 @@ class ChattyToolNode(ToolNodeBase):
builder.add_edge("chatty_tool_call", "chatty_handoff_node")
builder.add_edge("chatty_handoff_node", END)
self.workflow = builder.compile()
return builder.compile()
def get_delay_keys(self):
return self.chat_key, self.tool_key

View File

@@ -10,12 +10,15 @@ CREATE TABLE IF NOT EXISTS prompt_sets (
is_active BOOLEAN DEFAULT false,
created_at TIMESTAMPTZ DEFAULT now(),
updated_at TIMESTAMPTZ DEFAULT now(),
list Varchar(255) DEFAULT '' -- tool_set list for client_tool_manager
list Varchar(255) DEFAULT '', -- tool_set list for client_tool_manager
api_key TEXT DEFAULT '' -- provider api key used to run pipeline
);
-- Backward-compatible migration for existing deployments.
ALTER TABLE prompt_sets
ADD COLUMN IF NOT EXISTS graph_id VARCHAR(64);
ALTER TABLE prompt_sets
ADD COLUMN IF NOT EXISTS api_key TEXT DEFAULT '';
UPDATE prompt_sets
SET graph_id = pipeline_id
WHERE graph_id IS NULL;

17
scripts/gen_arch_img.py Normal file
View File

@@ -0,0 +1,17 @@
from lang_agent.graphs import ReactGraphConfig, ReactGraph, RoutingConfig,RoutingGraph
from lang_agent.base import GraphBase
import os.path as osp
from tqdm import tqdm
def main():
save_dir = osp.join(osp.dirname(osp.dirname(__file__)), "frontend/assets/images/graph_arch")
confs:GraphBase = [ReactGraphConfig(), RoutingConfig()]
for conf in tqdm(confs):
graph:GraphBase = conf.setup()
img = graph.show_graph(ret_img=True)
img.save(osp.join(save_dir, f"arch_{conf.__class__.__name__}.png"))
if __name__ == "__main__":
main()