use LLMKeyConfig

This commit is contained in:
2026-02-12 14:35:27 +08:00
parent 43dad177ab
commit c2cc2628dd
6 changed files with 14 additions and 41 deletions

View File

@@ -4,7 +4,7 @@ import tyro
import os.path as osp
from loguru import logger
from lang_agent.config import KeyConfig
from lang_agent.config import LLMKeyConfig
from lang_agent.components.tool_manager import ToolManager, ToolManagerConfig
from lang_agent.components.prompt_store import build_prompt_store
from lang_agent.base import GraphBase
@@ -20,21 +20,12 @@ from langgraph.graph import StateGraph, START, END
# NOTE: maybe make this into a base_graph_config?
@tyro.conf.configure(tyro.conf.SuppressFixed)
@dataclass
class ReactGraphConfig(KeyConfig):
class ReactGraphConfig(LLMKeyConfig):
_target: Type = field(default_factory=lambda: ReactGraph)
llm_name: str = "qwen-plus"
"""name of llm"""
llm_provider:str = "openai"
"""provider of the llm"""
sys_prompt_f:str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "prompts", "blueberry.txt")
"""path to system prompt"""
base_url:str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
"""base url; could be used to overwrite the baseurl in llm provider"""
pipeline_id: Optional[str] = None
"""If set, load prompts from database (with file fallback)"""

View File

@@ -6,7 +6,7 @@ import time
import asyncio
from loguru import logger
from lang_agent.config import InstantiateConfig, KeyConfig
from lang_agent.config import InstantiateConfig, LLMKeyConfig
from lang_agent.components.tool_manager import ToolManager
from lang_agent.components.prompt_store import build_prompt_store
from lang_agent.components.reit_llm import ReitLLM
@@ -85,18 +85,9 @@ class ToolNode(ToolNodeBase):
@dataclass
class ChattyToolNodeConfig(KeyConfig, ToolNodeConfig):
class ChattyToolNodeConfig(LLMKeyConfig, ToolNodeConfig):
_target: Type = field(default_factory=lambda: ChattyToolNode)
llm_name: str = "qwen-plus"
"""name of llm"""
llm_provider:str = "openai"
"""provider of the llm"""
base_url:str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
"""base url; could be used to overwrite the baseurl in llm provider"""
chatty_sys_prompt_f:str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "route_sys_prompts", "chatty_prompt.txt")
"""path to chatty system prompt"""