remove pipeline_id and set_id since using LLMNodeConfig
This commit is contained in:
@@ -3,7 +3,7 @@ from typing import Type, Callable, List
|
||||
import tyro
|
||||
import random
|
||||
|
||||
from lang_agent.config import LLMNodeConfig
|
||||
from lang_agent.config import LLMKeyConfig
|
||||
from lang_agent.pipeline import Pipeline, PipelineConfig
|
||||
|
||||
from langchain.chat_models import init_chat_model
|
||||
@@ -11,7 +11,7 @@ from langchain_core.messages import BaseMessage, ToolMessage
|
||||
|
||||
@tyro.conf.configure(tyro.conf.SuppressFixed)
|
||||
@dataclass
|
||||
class ValidatorConfig(LLMNodeConfig):
|
||||
class ValidatorConfig(LLMKeyConfig):
|
||||
_target: Type = field(default_factory=lambda:Validator)
|
||||
|
||||
|
||||
|
||||
@@ -51,12 +51,6 @@ TOOL_SYS_PROMPT = """根据用户的心情使用self_led_control改变灯的颜
|
||||
class DualConfig(LLMNodeConfig):
|
||||
_target: Type = field(default_factory=lambda:Dual)
|
||||
|
||||
pipeline_id: Optional[str] = None
|
||||
"""If set, load prompts from database (with hardcoded fallback)"""
|
||||
|
||||
prompt_set_id: Optional[str] = None
|
||||
"""If set, load from this specific prompt set instead of the active one"""
|
||||
|
||||
tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig)
|
||||
|
||||
from langchain.tools import tool
|
||||
|
||||
@@ -26,12 +26,6 @@ class ReactGraphConfig(LLMNodeConfig):
|
||||
sys_prompt_f:str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "prompts", "blueberry.txt")
|
||||
"""path to system prompt"""
|
||||
|
||||
pipeline_id: Optional[str] = None
|
||||
"""If set, load prompts from database (with file fallback)"""
|
||||
|
||||
prompt_set_id: Optional[str] = None
|
||||
"""If set, load from this specific prompt set instead of the active one"""
|
||||
|
||||
tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig)
|
||||
|
||||
def __post_init__(self):
|
||||
|
||||
@@ -30,24 +30,9 @@ from langgraph.checkpoint.memory import MemorySaver
|
||||
class RoutingConfig(LLMNodeConfig):
|
||||
_target: Type = field(default_factory=lambda: RoutingGraph)
|
||||
|
||||
llm_name: str = "qwen-plus"
|
||||
"""name of llm"""
|
||||
|
||||
llm_provider:str = "openai"
|
||||
"""provider of the llm"""
|
||||
|
||||
base_url:str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
|
||||
"""base url; could be used to overwrite the baseurl in llm provider"""
|
||||
|
||||
sys_promp_dir: str = osp.join(osp.dirname(osp.dirname(osp.dirname(__file__))), "configs", "route_sys_prompts")
|
||||
"""path to directory or json contantaining system prompt for graphs; Will overwrite systemprompt from xiaozhi if 'chat_prompt' is provided"""
|
||||
|
||||
pipeline_id: Optional[str] = None
|
||||
"""If set, load prompts from database (with file fallback)"""
|
||||
|
||||
prompt_set_id: Optional[str] = None
|
||||
"""If set, load from this specific prompt set instead of the active one"""
|
||||
|
||||
tool_manager_config: ToolManagerConfig = field(default_factory=ToolManagerConfig)
|
||||
|
||||
tool_node_config: AnnotatedToolNode = field(default_factory=ToolNodeConfig)
|
||||
|
||||
@@ -99,18 +99,6 @@ class VisionRoutingConfig(LLMNodeConfig):
|
||||
vision_llm_name: str = "qwen-vl-max"
|
||||
"""LLM for vision/image analysis"""
|
||||
|
||||
llm_provider: str = "openai"
|
||||
"""provider of the llm"""
|
||||
|
||||
base_url: str = "https://dashscope.aliyuncs.com/compatible-mode/v1"
|
||||
"""base url for API"""
|
||||
|
||||
pipeline_id: Optional[str] = None
|
||||
"""If set, load prompts from database (with hardcoded fallback)"""
|
||||
|
||||
prompt_set_id: Optional[str] = None
|
||||
"""If set, load from this specific prompt set instead of the active one"""
|
||||
|
||||
tool_manager_config: ToolManagerConfig = field(default_factory=ClientToolManagerConfig)
|
||||
|
||||
|
||||
|
||||
@@ -64,12 +64,6 @@ class PipelineConfig(LLMNodeConfig):
|
||||
port:int = 8588
|
||||
"""what is my port"""
|
||||
|
||||
pipeline_id: str = None
|
||||
"""If set, load prompts from database (with file fallback)"""
|
||||
|
||||
prompt_set_id: str = None
|
||||
"""If set, load from this specific prompt set instead of the active one"""
|
||||
|
||||
# graph_config: AnnotatedGraph = field(default_factory=ReactGraphConfig)
|
||||
graph_config: AnnotatedGraph = field(default_factory=RoutingConfig)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user