diff --git a/dashboard/src/components/AppSidebar.vue b/dashboard/src/components/AppSidebar.vue index 2424ea5..d9ada9c 100644 --- a/dashboard/src/components/AppSidebar.vue +++ b/dashboard/src/components/AppSidebar.vue @@ -42,6 +42,7 @@ const navItems = [ { path: '/', icon: '🏠', label: '首页' }, { path: '/conversations', icon: '💬', label: '会话' }, { path: '/tools', icon: '🛠️', label: '工具' }, + { path: '/settings', icon: '⚙️', label: '设置' }, { path: '/about', icon: 'ℹ️', label: '关于' } ] diff --git a/dashboard/src/router/index.js b/dashboard/src/router/index.js index 93aacb4..b887e24 100644 --- a/dashboard/src/router/index.js +++ b/dashboard/src/router/index.js @@ -14,6 +14,12 @@ const routes = [ component: () => import('../views/AboutView.vue'), meta: { requiresAuth: true } }, + { + path: '/settings', + name: 'Settings', + component: () => import('../views/SettingsView.vue'), + meta: { requiresAuth: true } + }, { path: '/auth', name: 'Auth', @@ -26,6 +32,12 @@ const routes = [ component: () => import('../views/ConversationsView.vue'), meta: { requiresAuth: true } }, + { + path: '/conversations/:id', + name: 'ConversationDetail', + component: () => import('../views/ConversationDetailView.vue'), + meta: { requiresAuth: true } + }, { path: '/tools', name: 'Tools', diff --git a/dashboard/src/services/api.js b/dashboard/src/services/api.js index d5ab9ba..4b617d9 100644 --- a/dashboard/src/services/api.js +++ b/dashboard/src/services/api.js @@ -75,7 +75,7 @@ export const conversationsAPI = { export const messagesAPI = { // 获取消息列表 - list: (conversationId, params) => api.get(`/messages/${conversationId}`, { params }), + list: (conversationId, params) => api.get('/messages/', { params: { conversation_id: conversationId, ...params } }), // 发送消息(非流式) send: (data) => api.post('/messages/', data), @@ -110,5 +110,27 @@ export const toolsAPI = { execute: (name, data) => api.post(`/tools/${name}/execute`, data) } +// ============ LLM Provider 接口 ============ + +export const providersAPI = { + // 获取提供商列表 + list: () => api.get('/providers/'), + + // 创建提供商 + create: (data) => api.post('/providers/', data), + + // 获取提供商详情 + get: (id) => api.get(`/providers/${id}`), + + // 更新提供商 + update: (id, data) => api.put(`/providers/${id}`, data), + + // 删除提供商 + delete: (id) => api.delete(`/providers/${id}`), + + // 测试连接 + test: (id) => api.post(`/providers/${id}/test`) +} + // 默认导出 export default api \ No newline at end of file diff --git a/dashboard/src/views/ConversationDetailView.vue b/dashboard/src/views/ConversationDetailView.vue new file mode 100644 index 0000000..914667e --- /dev/null +++ b/dashboard/src/views/ConversationDetailView.vue @@ -0,0 +1,175 @@ + + + + + diff --git a/dashboard/src/views/ConversationsView.vue b/dashboard/src/views/ConversationsView.vue index ceda75e..9f75868 100644 --- a/dashboard/src/views/ConversationsView.vue +++ b/dashboard/src/views/ConversationsView.vue @@ -28,11 +28,23 @@ @@ -42,10 +54,11 @@ + + diff --git a/luxx/__init__.py b/luxx/__init__.py index 3ac9980..496d937 100644 --- a/luxx/__init__.py +++ b/luxx/__init__.py @@ -12,7 +12,7 @@ from luxx.routes import api_router async def lifespan(app: FastAPI): """Application lifespan manager""" # Import all models to ensure they are registered with Base - from luxx import models # noqa + from luxx.models import User, Conversation, Message, Project, LLMProvider # noqa init_db() # Create default test user if not exists diff --git a/luxx/models.py b/luxx/models.py index 46f4b37..e8b171b 100644 --- a/luxx/models.py +++ b/luxx/models.py @@ -7,6 +7,44 @@ from sqlalchemy.orm import Mapped, mapped_column, relationship from luxx.database import Base +class LLMProvider(Base): + """LLM Provider configuration model""" + __tablename__ = "llm_providers" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + user_id: Mapped[int] = mapped_column(Integer, ForeignKey("users.id"), nullable=False) + name: Mapped[str] = mapped_column(String(100), nullable=False) + provider_type: Mapped[str] = mapped_column(String(50), nullable=False, default="openai") # openai, deepseek, glm, etc. + base_url: Mapped[str] = mapped_column(String(500), nullable=False) + api_key: Mapped[str] = mapped_column(String(500), nullable=False) + default_model: Mapped[str] = mapped_column(String(100), nullable=False, default="gpt-4") + is_default: Mapped[bool] = mapped_column(Boolean, default=False) + enabled: Mapped[bool] = mapped_column(Boolean, default=True) + created_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow) + updated_at: Mapped[datetime] = mapped_column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow) + + # Relationships + user: Mapped["User"] = relationship("User", backref="llm_providers") + + def to_dict(self, include_key: bool = False): + """Convert to dictionary, optionally include API key""" + result = { + "id": self.id, + "user_id": self.user_id, + "name": self.name, + "provider_type": self.provider_type, + "base_url": self.base_url, + "default_model": self.default_model, + "is_default": self.is_default, + "enabled": self.enabled, + "created_at": self.created_at.isoformat() if self.created_at else None, + "updated_at": self.updated_at.isoformat() if self.updated_at else None + } + if include_key: + result["api_key"] = self.api_key + return result + + class Project(Base): """Project model""" __tablename__ = "projects" @@ -56,6 +94,7 @@ class Conversation(Base): id: Mapped[str] = mapped_column(String(64), primary_key=True) user_id: Mapped[int] = mapped_column(Integer, ForeignKey("users.id"), nullable=False) + provider_id: Mapped[Optional[int]] = mapped_column(Integer, ForeignKey("llm_providers.id"), nullable=True) project_id: Mapped[Optional[str]] = mapped_column(String(64), ForeignKey("projects.id"), nullable=True) title: Mapped[str] = mapped_column(String(255), nullable=False) model: Mapped[str] = mapped_column(String(64), nullable=False, default="deepseek-chat") @@ -68,6 +107,7 @@ class Conversation(Base): # Relationships user: Mapped["User"] = relationship("User", back_populates="conversations") + provider: Mapped[Optional["LLMProvider"]] = relationship("LLMProvider") messages: Mapped[List["Message"]] = relationship( "Message", back_populates="conversation", cascade="all, delete-orphan" ) @@ -76,6 +116,7 @@ class Conversation(Base): return { "id": self.id, "user_id": self.user_id, + "provider_id": self.provider_id, "project_id": self.project_id, "title": self.title, "model": self.model, diff --git a/luxx/routes/__init__.py b/luxx/routes/__init__.py index c354528..c6519c8 100644 --- a/luxx/routes/__init__.py +++ b/luxx/routes/__init__.py @@ -1,7 +1,7 @@ """API routes module""" from fastapi import APIRouter -from luxx.routes import auth, conversations, messages, tools +from luxx.routes import auth, conversations, messages, tools, providers api_router = APIRouter() @@ -11,3 +11,4 @@ api_router.include_router(auth.router) api_router.include_router(conversations.router) api_router.include_router(messages.router) api_router.include_router(tools.router) +api_router.include_router(providers.router) diff --git a/luxx/routes/conversations.py b/luxx/routes/conversations.py index 11d63e5..c730909 100644 --- a/luxx/routes/conversations.py +++ b/luxx/routes/conversations.py @@ -16,8 +16,9 @@ router = APIRouter(prefix="/conversations", tags=["Conversations"]) class ConversationCreate(BaseModel): """Create conversation model""" project_id: Optional[str] = None + provider_id: Optional[int] = None title: Optional[str] = None - model: str = "deepseek-chat" + model: Optional[str] = None system_prompt: str = "You are a helpful assistant." temperature: float = 0.7 max_tokens: int = 2000 @@ -45,7 +46,7 @@ def list_conversations( query = db.query(Conversation).filter(Conversation.user_id == current_user.id) result = paginate(query.order_by(Conversation.updated_at.desc()), page, page_size) return success_response(data={ - "conversations": [c.to_dict() for c in result["items"]], + "items": [c.to_dict() for c in result["items"]], "total": result["total"], "page": result["page"], "page_size": result["page_size"] @@ -59,12 +60,28 @@ def create_conversation( db: Session = Depends(get_db) ): """Create conversation""" + # Get provider info if provider_id is specified + model = data.model + if data.provider_id and not model: + from luxx.models import LLMProvider + provider = db.query(LLMProvider).filter( + LLMProvider.id == data.provider_id, + LLMProvider.user_id == current_user.id + ).first() + if provider: + model = provider.default_model + else: + model = "gpt-4" + elif not model: + model = "gpt-4" + conversation = Conversation( id=generate_id("conv"), user_id=current_user.id, project_id=data.project_id, + provider_id=data.provider_id, title=data.title or "New Conversation", - model=data.model, + model=model, system_prompt=data.system_prompt, temperature=data.temperature, max_tokens=data.max_tokens, diff --git a/luxx/routes/providers.py b/luxx/routes/providers.py new file mode 100644 index 0000000..935f2bf --- /dev/null +++ b/luxx/routes/providers.py @@ -0,0 +1,217 @@ +"""LLM Provider routes""" +from typing import Optional +from fastapi import APIRouter, Depends, HTTPException +from pydantic import BaseModel + +from luxx.database import get_db, SessionLocal +from luxx.models import User, LLMProvider +from luxx.routes.auth import get_current_user +from luxx.utils.helpers import success_response +import httpx +import asyncio + +router = APIRouter(prefix="/providers", tags=["LLM Providers"]) + + +class ProviderCreate(BaseModel): + name: str + provider_type: str = "openai" + base_url: str + api_key: str + default_model: str = "gpt-4" + is_default: bool = False + + +class ProviderUpdate(BaseModel): + name: Optional[str] = None + provider_type: Optional[str] = None + base_url: Optional[str] = None + api_key: Optional[str] = None + default_model: Optional[str] = None + is_default: Optional[bool] = None + enabled: Optional[bool] = None + + +@router.get("/", response_model=dict) +def list_providers( + current_user: User = Depends(get_current_user) +): + """Get user's LLM providers""" + db = SessionLocal() + try: + providers = db.query(LLMProvider).filter( + LLMProvider.user_id == current_user.id + ).order_by(LLMProvider.is_default.desc(), LLMProvider.created_at.desc()).all() + + return success_response(data={ + "providers": [p.to_dict() for p in providers], + "total": len(providers) + }) + finally: + db.close() + + +@router.post("/", response_model=dict) +def create_provider( + provider: ProviderCreate, + current_user: User = Depends(get_current_user) +): + """Create a new LLM provider""" + db = SessionLocal() + try: + # If this is set as default, unset other defaults + if provider.is_default: + db.query(LLMProvider).filter( + LLMProvider.user_id == current_user.id + ).update({"is_default": False}) + + db_provider = LLMProvider( + user_id=current_user.id, + name=provider.name, + provider_type=provider.provider_type, + base_url=provider.base_url, + api_key=provider.api_key, + default_model=provider.default_model, + is_default=provider.is_default + ) + db.add(db_provider) + db.commit() + db.refresh(db_provider) + + return success_response(data=db_provider.to_dict(include_key=True)) + except Exception as e: + db.rollback() + raise HTTPException(status_code=400, detail=str(e)) + finally: + db.close() + + +@router.get("/{provider_id}", response_model=dict) +def get_provider( + provider_id: int, + current_user: User = Depends(get_current_user) +): + """Get provider details""" + db = SessionLocal() + try: + provider = db.query(LLMProvider).filter( + LLMProvider.id == provider_id, + LLMProvider.user_id == current_user.id + ).first() + + if not provider: + raise HTTPException(status_code=404, detail="Provider not found") + + return success_response(data=provider.to_dict(include_key=True)) + finally: + db.close() + + +@router.put("/{provider_id}", response_model=dict) +def update_provider( + provider_id: int, + update: ProviderUpdate, + current_user: User = Depends(get_current_user) +): + """Update provider""" + db = SessionLocal() + try: + provider = db.query(LLMProvider).filter( + LLMProvider.id == provider_id, + LLMProvider.user_id == current_user.id + ).first() + + if not provider: + raise HTTPException(status_code=404, detail="Provider not found") + + # If setting as default, unset others + if update.is_default: + db.query(LLMProvider).filter( + LLMProvider.user_id == current_user.id, + LLMProvider.id != provider_id + ).update({"is_default": False}) + + # Update fields + update_data = update.dict(exclude_unset=True) + for key, value in update_data.items(): + setattr(provider, key, value) + + db.commit() + db.refresh(provider) + + return success_response(data=provider.to_dict(include_key=True)) + except HTTPException: + raise + except Exception as e: + db.rollback() + raise HTTPException(status_code=400, detail=str(e)) + finally: + db.close() + + +@router.delete("/{provider_id}", response_model=dict) +def delete_provider( + provider_id: int, + current_user: User = Depends(get_current_user) +): + """Delete provider""" + db = SessionLocal() + try: + provider = db.query(LLMProvider).filter( + LLMProvider.id == provider_id, + LLMProvider.user_id == current_user.id + ).first() + + if not provider: + raise HTTPException(status_code=404, detail="Provider not found") + + db.delete(provider) + db.commit() + + return success_response(message="Provider deleted") + finally: + db.close() + + +@router.post("/{provider_id}/test", response_model=dict) +def test_provider( + provider_id: int, + current_user: User = Depends(get_current_user) +): + """Test provider connection""" + + + db = SessionLocal() + try: + provider = db.query(LLMProvider).filter( + LLMProvider.id == provider_id, + LLMProvider.user_id == current_user.id + ).first() + + if not provider: + raise HTTPException(status_code=404, detail="Provider not found") + + # Test the connection + try: + async def test(): + async with httpx.AsyncClient(timeout=10.0) as client: + response = await client.post( + f"{provider.base_url}/chat/completions", + headers={ + "Authorization": f"Bearer {provider.api_key}", + "Content-Type": "application/json" + }, + json={ + "model": provider.default_model, + "messages": [{"role": "user", "content": "Hi"}], + "max_tokens": 10 + } + ) + return response.status_code == 200 + + success = asyncio.run(test()) + return success_response(data={"success": success, "message": "连接成功" if success else "连接失败"}) + except Exception as e: + return success_response(data={"success": False, "message": f"连接失败: {str(e)}"}) + finally: + db.close() diff --git a/luxx/services/chat.py b/luxx/services/chat.py index a4b9075..5cc38c3 100644 --- a/luxx/services/chat.py +++ b/luxx/services/chat.py @@ -5,13 +5,37 @@ from typing import List, Dict, Any, AsyncGenerator from luxx.models import Conversation, Message from luxx.tools.executor import ToolExecutor from luxx.tools.core import registry -from luxx.services.llm_client import llm_client +from luxx.services.llm_client import LLMClient +from luxx.config import config # Maximum iterations to prevent infinite loops MAX_ITERATIONS = 10 +def get_llm_client(conversation: Conversation = None): + """Get LLM client, optionally using conversation's provider""" + if conversation and conversation.provider_id: + from luxx.models import LLMProvider + from luxx.database import SessionLocal + db = SessionLocal() + try: + provider = db.query(LLMProvider).filter(LLMProvider.id == conversation.provider_id).first() + if provider: + client = LLMClient( + api_key=provider.api_key, + api_url=provider.base_url, + model=provider.default_model + ) + return client + finally: + db.close() + + # Fallback to global config + client = LLMClient() + return client + + class ChatService: """Chat service""" @@ -66,13 +90,16 @@ class ChatService: iteration = 0 + llm = get_llm_client(conversation) + model = conversation.model or llm.default_model or "gpt-4" + while iteration < MAX_ITERATIONS: iteration += 1 tool_calls_this_round = None - async for event in llm_client.stream_call( - model=conversation.model, + async for event in llm.stream_call( + model=model, messages=messages, tools=tools, temperature=conversation.temperature, @@ -144,11 +171,14 @@ class ChatService: iteration = 0 + llm_client = get_llm_client(conversation) + model = conversation.model or llm_client.default_model or "gpt-4" + while iteration < MAX_ITERATIONS: iteration += 1 response = llm_client.sync_call( - model=conversation.model, + model=model, messages=messages, tools=tools, temperature=conversation.temperature, diff --git a/luxx/services/llm_client.py b/luxx/services/llm_client.py index 52f312d..0bacb98 100644 --- a/luxx/services/llm_client.py +++ b/luxx/services/llm_client.py @@ -26,9 +26,10 @@ class LLMResponse: class LLMClient: """LLM API client with multi-provider support""" - def __init__(self): - self.api_key = config.llm_api_key - self.api_url = config.llm_api_url + def __init__(self, api_key: str = None, api_url: str = None, model: str = None): + self.api_key = api_key or config.llm_api_key + self.api_url = api_url or config.llm_api_url + self.default_model = model self.provider = self._detect_provider() self._client: Optional[httpx.AsyncClient] = None