+
{{ error }}
-
-
-
-
{{ tool.name }}
-
{{ tool.description || '暂无描述' }}
-
-
-
-
-
-
-
{{ detail.name }}
-
{{ detail.description }}
-
-
参数
-
{{ JSON.stringify(detail.parameters, null, 2) }}
-
-
-
+
+
@@ -41,7 +52,6 @@ import { toolsAPI } from '../utils/api.js'
const list = ref([])
const loading = ref(true)
const error = ref('')
-const detail = ref(null)
const fetchData = async () => {
loading.value = true
@@ -55,7 +65,7 @@ const fetchData = async () => {
if (Array.isArray(data[cat])) {
all.push(...data[cat].map(t => {
const func = t.function ? t.function : t
- return { name: func.name, description: func.description, parameters: func.parameters, category: cat, enabled: true }
+ return { name: func.name, description: func.description, parameters: func.parameters, category: cat, enabled: t.enabled !== false }
}))
}
})
@@ -65,44 +75,34 @@ const fetchData = async () => {
finally { loading.value = false }
}
-const showDetail = (tool) => { detail.value = tool }
+const toggleEnabled = async (tool) => {
+ tool.enabled = !tool.enabled
+}
onMounted(fetchData)
diff --git a/luxx/routes/messages.py b/luxx/routes/messages.py
index 3048f17..0de755f 100644
--- a/luxx/routes/messages.py
+++ b/luxx/routes/messages.py
@@ -1,5 +1,6 @@
"""Message routes"""
import json
+from typing import List, Optional
from fastapi import APIRouter, Depends, Response
from fastapi.responses import StreamingResponse
from pydantic import BaseModel
@@ -114,6 +115,8 @@ def send_message(
async def stream_message(
data: MessageCreate,
tools_enabled: bool = True,
+ enabled_tools: Optional[List[str]] = None,
+ thinking_enabled: bool = False,
current_user: User = Depends(get_current_user),
db: Session = Depends(get_db)
):
@@ -141,7 +144,9 @@ async def stream_message(
async for sse_str in chat_service.stream_response(
conversation=conversation,
user_message=data.content,
- tools_enabled=tools_enabled
+ tools_enabled=tools_enabled,
+ enabled_tools=enabled_tools,
+ thinking_enabled=thinking_enabled
):
# Chat service returns raw SSE strings (including done event)
yield sse_str
diff --git a/luxx/services/chat.py b/luxx/services/chat.py
index 9eb1a2c..53d1bb3 100644
--- a/luxx/services/chat.py
+++ b/luxx/services/chat.py
@@ -1,7 +1,7 @@
"""Chat service module"""
import json
import uuid
-from typing import List, Dict, Any, AsyncGenerator
+from typing import List, Dict, Any, AsyncGenerator, Optional
from luxx.models import Conversation, Message
from luxx.tools.executor import ToolExecutor
@@ -97,7 +97,9 @@ class ChatService:
self,
conversation: Conversation,
user_message: str,
- tools_enabled: bool = True
+ tools_enabled: bool = True,
+ enabled_tools: Optional[List[str]] = None,
+ thinking_enabled: bool = False
) -> AsyncGenerator[Dict[str, str], None]:
"""
Streaming response generator
@@ -112,7 +114,16 @@ class ChatService:
"content": json.dumps({"text": user_message, "attachments": []})
})
- tools = registry.list_all() if tools_enabled else None
+ # Filter tools by enabled list if provided
+ if enabled_tools is not None and tools_enabled:
+ all_tools = registry.list_all()
+ tools = [t for t in all_tools if t.get("name") in enabled_tools]
+ else:
+ tools = registry.list_all() if tools_enabled else None
+
+ # Only include enabled tools
+ all_tools = registry.list_all() if tools_enabled else None
+ tools = [t for t in all_tools] if all_tools else None
llm, provider_max_tokens = get_llm_client(conversation)
model = conversation.model or llm.default_model or "gpt-4"
@@ -150,7 +161,8 @@ class ChatService:
messages=messages,
tools=tools,
temperature=conversation.temperature,
- max_tokens=max_tokens or 8192
+ max_tokens=max_tokens or 8192,
+ thinking_enabled=thinking_enabled or conversation.thinking_enabled
):
# Parse SSE line
# Format: "event: xxx\ndata: {...}\n\n"
diff --git a/luxx/services/llm_client.py b/luxx/services/llm_client.py
index 257f0a9..5dcd6de 100644
--- a/luxx/services/llm_client.py
+++ b/luxx/services/llm_client.py
@@ -78,6 +78,9 @@ class LLMClient:
if "max_tokens" in kwargs:
body["max_tokens"] = kwargs["max_tokens"]
+ if "thinking_enabled" in kwargs and kwargs["thinking_enabled"]:
+ body["thinking_enabled"] = True
+
if tools:
body["tools"] = tools