diff --git a/src/apps/desktop/src/api/agentic_api.rs b/src/apps/desktop/src/api/agentic_api.rs index b4dc125d..ff936d18 100644 --- a/src/apps/desktop/src/api/agentic_api.rs +++ b/src/apps/desktop/src/api/agentic_api.rs @@ -46,6 +46,13 @@ pub struct CreateSessionResponse { pub agent_type: String, } +#[derive(Debug, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct UpdateSessionModelRequest { + pub session_id: String, + pub model_name: String, +} + #[derive(Debug, Deserialize)] #[serde(rename_all = "camelCase")] pub struct StartDialogTurnRequest { @@ -215,6 +222,17 @@ pub async fn create_session( }) } +#[tauri::command] +pub async fn update_session_model( + coordinator: State<'_, Arc>, + request: UpdateSessionModelRequest, +) -> Result<(), String> { + coordinator + .update_session_model(&request.session_id, &request.model_name) + .await + .map_err(|e| format!("Failed to update session model: {}", e)) +} + #[tauri::command] pub async fn start_dialog_turn( _app: AppHandle, diff --git a/src/apps/desktop/src/lib.rs b/src/apps/desktop/src/lib.rs index 3c7702fd..0fbc330f 100644 --- a/src/apps/desktop/src/lib.rs +++ b/src/apps/desktop/src/lib.rs @@ -295,6 +295,7 @@ pub async fn run() { .invoke_handler(tauri::generate_handler![ theme::show_main_window, api::agentic_api::create_session, + api::agentic_api::update_session_model, api::agentic_api::start_dialog_turn, api::agentic_api::ensure_assistant_bootstrap, api::agentic_api::cancel_dialog_turn, diff --git a/src/crates/core/src/agentic/coordination/coordinator.rs b/src/crates/core/src/agentic/coordination/coordinator.rs index 769d1323..a85a651c 100644 --- a/src/crates/core/src/agentic/coordination/coordinator.rs +++ b/src/crates/core/src/agentic/coordination/coordinator.rs @@ -244,6 +244,30 @@ Update the persona files and delete BOOTSTRAP.md as soon as bootstrap is complet .await } + pub async fn update_session_model( + &self, + session_id: &str, + model_id: &str, + ) -> BitFunResult<()> { + let normalized_model_id = model_id.trim(); + let normalized_model_id = if normalized_model_id.is_empty() { + "auto" + } else { + normalized_model_id + }; + + self.session_manager + .update_session_model_id(session_id, normalized_model_id) + .await?; + + info!( + "Coordinator updated session model: session_id={}, model_id={}", + session_id, normalized_model_id + ); + + Ok(()) + } + /// Create a new session with explicit creator identity. pub async fn create_session_with_workspace_and_creator( &self, diff --git a/src/crates/core/src/agentic/execution/execution_engine.rs b/src/crates/core/src/agentic/execution/execution_engine.rs index e9bc8d38..e9ee9a61 100644 --- a/src/crates/core/src/agentic/execution/execution_engine.rs +++ b/src/crates/core/src/agentic/execution/execution_engine.rs @@ -145,19 +145,6 @@ impl ExecutionEngine { .unwrap_or_else(|| "auto".to_string()) } - fn resolve_locked_auto_model_id( - ai_config: &crate::service::config::types::AIConfig, - model_id: Option<&String>, - ) -> Option { - let model_id = model_id?; - let trimmed = model_id.trim(); - if trimmed.is_empty() || trimmed == "auto" || trimmed == "default" { - return None; - } - - ai_config.resolve_model_selection(trimmed) - } - fn should_use_fast_auto_model(turn_index: usize, original_user_input: &str) -> bool { turn_index == 0 && original_user_input.chars().count() <= 10 } @@ -192,55 +179,32 @@ impl ExecutionEngine { .map(|model_id| model_id.trim()) .filter(|model_id| !model_id.is_empty()) .map(str::to_string) - .unwrap_or(fallback_model_id); + .unwrap_or(fallback_model_id.clone()); let resolved_configured_model_id = Self::resolve_configured_model_id(&ai_config, &configured_model_id); let model_id = if configured_model_id == "auto" || resolved_configured_model_id == "auto" { - let locked_model_id = - Self::resolve_locked_auto_model_id(&ai_config, session.config.model_id.as_ref()); - let raw_locked_model_id = session.config.model_id.clone(); - - if let Some(locked_model_id) = locked_model_id { - locked_model_id - } else { - if let Some(raw_locked_model_id) = raw_locked_model_id.as_ref() { - let trimmed = raw_locked_model_id.trim(); - if !trimmed.is_empty() && trimmed != "auto" && trimmed != "default" { - warn!( - "Ignoring invalid locked auto model for session: session_id={}, model_id={}", - session.session_id, trimmed - ); - } - } - - let use_fast_model = - Self::should_use_fast_auto_model(turn_index, original_user_input); - let fallback_model = if use_fast_model { "fast" } else { "primary" }; - let resolved_model_id = ai_config.resolve_model_selection(fallback_model); - - if let Some(resolved_model_id) = resolved_model_id { - self.session_manager - .update_session_model_id(&session.session_id, &resolved_model_id) - .await?; - - info!( - "Auto model resolved: session_id={}, turn_index={}, user_input_chars={}, strategy={}, resolved_model_id={}", - session.session_id, - turn_index, - original_user_input.chars().count(), - fallback_model, - resolved_model_id - ); + let use_fast_model = Self::should_use_fast_auto_model(turn_index, original_user_input); + let fallback_model = if use_fast_model { "fast" } else { "primary" }; + let resolved_model_id = ai_config.resolve_model_selection(fallback_model); + if let Some(resolved_model_id) = resolved_model_id { + info!( + "Auto model resolved without locking session: session_id={}, turn_index={}, user_input_chars={}, strategy={}, resolved_model_id={}", + session.session_id, + turn_index, + original_user_input.chars().count(), + fallback_model, resolved_model_id - } else { - warn!( - "Auto model strategy unresolved, keeping symbolic selector: session_id={}, strategy={}", - session.session_id, fallback_model - ); - fallback_model.to_string() - } + ); + + resolved_model_id + } else { + warn!( + "Auto model strategy unresolved, keeping symbolic selector: session_id={}, strategy={}", + session.session_id, fallback_model + ); + fallback_model.to_string() } } else { resolved_configured_model_id diff --git a/src/crates/core/src/agentic/execution/stream_processor.rs b/src/crates/core/src/agentic/execution/stream_processor.rs index da782871..761fa888 100644 --- a/src/crates/core/src/agentic/execution/stream_processor.rs +++ b/src/crates/core/src/agentic/execution/stream_processor.rs @@ -248,6 +248,13 @@ impl StreamContext { } } + fn can_recover_as_partial_text_result(&self) -> bool { + self.has_effective_output + && !self.full_text.is_empty() + && self.tool_calls.is_empty() + && self.tool_call_buffer.tool_id.is_empty() + } + /// Force finish tool_call_buffer, used to handle cases where toolcall parameters are not fully closed /// E.g., when new toolcall arrives and before returning results fn force_finish_tool_call_buffer(&mut self) { @@ -707,6 +714,12 @@ impl StreamProcessor { Ok(Some(Err(e))) => { let error_msg = format!("Stream processing error: {}", e); error!("{}", error_msg); + if ctx.can_recover_as_partial_text_result() { + flush_sse_on_error(&sse_collector, &error_msg).await; + self.send_thinking_end_if_needed(&mut ctx).await; + self.log_stream_result(&ctx); + break; + } // log SSE for network errors flush_sse_on_error(&sse_collector, &error_msg).await; self.graceful_shutdown_from_ctx(&mut ctx, error_msg.clone()).await; diff --git a/src/crates/core/src/agentic/persistence/manager.rs b/src/crates/core/src/agentic/persistence/manager.rs index 5c7fff36..aebf2672 100644 --- a/src/crates/core/src/agentic/persistence/manager.rs +++ b/src/crates/core/src/agentic/persistence/manager.rs @@ -579,12 +579,6 @@ impl PersistenceManager { self.write_json_atomic(&index_path, &index).await } - async fn rebuild_index(&self, workspace_path: &Path) -> BitFunResult> { - let lock = self.get_session_index_lock(workspace_path).await; - let _guard = lock.lock().await; - self.rebuild_index_locked(workspace_path).await - } - async fn upsert_index_entry( &self, workspace_path: &Path, diff --git a/src/crates/core/src/infrastructure/ai/client.rs b/src/crates/core/src/infrastructure/ai/client.rs index 7d987874..79d591ed 100644 --- a/src/crates/core/src/infrastructure/ai/client.rs +++ b/src/crates/core/src/infrastructure/ai/client.rs @@ -60,6 +60,9 @@ impl AIClient { const TEST_IMAGE_EXPECTED_CODE: &'static str = "BYGR"; const TEST_IMAGE_PNG_BASE64: &'static str = "iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAIAAADTED8xAAACBklEQVR42u3ZsREAIAwDMYf9dw4txwJupI7Wua+YZEPBfO91h4ZjAgQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABIAAQAAgABAACAAEAAIAAYAAQAAgABAACAAEAAIAAYAAQAAgABAAAAAAAEDRZI3QGf7jDvEPAAIAAYAAQAAgABAACAAEAAIAAYAAQAAgABAACAAEAAIAAYAAQAAgABAACAABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAAAjABAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQALwuLkoG8OSfau4AAAAASUVORK5CYII="; + const STREAM_CONNECT_TIMEOUT_SECS: u64 = 10; + const HTTP_POOL_IDLE_TIMEOUT_SECS: u64 = 30; + const HTTP_TCP_KEEPALIVE_SECS: u64 = 60; fn image_test_response_matches_expected(response: &str) -> bool { let upper = response.to_ascii_uppercase(); @@ -280,12 +283,20 @@ impl AIClient { /// Create an HTTP client (supports proxy config and SSL verification control) fn create_http_client(proxy_config: Option, skip_ssl_verify: bool) -> Client { let mut builder = Client::builder() - .timeout(std::time::Duration::from_secs(600)) - .connect_timeout(std::time::Duration::from_secs(10)) + // SSE requests can legitimately stay open for a long time while the model + // thinks or executes tools. Keep only connect timeout here and let the + // stream handlers enforce idle timeouts between chunks. + .connect_timeout(std::time::Duration::from_secs( + Self::STREAM_CONNECT_TIMEOUT_SECS, + )) .user_agent("BitFun/1.0") - .pool_idle_timeout(std::time::Duration::from_secs(30)) + .pool_idle_timeout(std::time::Duration::from_secs( + Self::HTTP_POOL_IDLE_TIMEOUT_SECS, + )) .pool_max_idle_per_host(4) - .tcp_keepalive(Some(std::time::Duration::from_secs(60))) + .tcp_keepalive(Some(std::time::Duration::from_secs( + Self::HTTP_TCP_KEEPALIVE_SECS, + ))) .danger_accept_invalid_certs(skip_ssl_verify); if skip_ssl_verify { @@ -2253,4 +2264,16 @@ mod tests { assert_eq!(request_body["tools"][0]["googleSearch"], json!({})); assert!(request_body.get("toolConfig").is_none()); } + + #[test] + fn streaming_http_client_does_not_apply_global_request_timeout() { + let client = make_test_client("openai", None); + let request = client + .client + .get("https://example.com/stream") + .build() + .expect("request should build"); + + assert_eq!(request.timeout(), None); + } } diff --git a/src/crates/core/src/service/system/command.rs b/src/crates/core/src/service/system/command.rs index c0d2f63d..c26320df 100644 --- a/src/crates/core/src/service/system/command.rs +++ b/src/crates/core/src/service/system/command.rs @@ -203,6 +203,8 @@ fn dedup_existing_dirs(paths: Vec) -> Vec { /// /// # Example /// ```rust +/// use bitfun_core::service::system::check_command; +/// /// let result = check_command("git"); /// if result.exists { /// if let Some(path) = result.path.as_deref() { diff --git a/src/web-ui/src/flow_chat/components/ModelSelector.tsx b/src/web-ui/src/flow_chat/components/ModelSelector.tsx index 8aa2e074..47841c6d 100644 --- a/src/web-ui/src/flow_chat/components/ModelSelector.tsx +++ b/src/web-ui/src/flow_chat/components/ModelSelector.tsx @@ -11,10 +11,12 @@ import React, { useState, useEffect, useRef, useCallback, useMemo } from 'react' import { Cpu, ChevronDown, Check, Sparkles } from 'lucide-react'; import { useTranslation } from 'react-i18next'; import { configManager } from '@/infrastructure/config/services/ConfigManager'; +import { agentAPI } from '@/infrastructure/api/service-api/AgentAPI'; import { getProviderDisplayName } from '@/infrastructure/config/services/modelConfigs'; import { globalEventBus } from '@/infrastructure/event-bus'; import type { AIModelConfig } from '@/infrastructure/config/types'; import { Tooltip } from '@/component-library'; +import { FlowChatStore } from '../store/FlowChatStore'; import { createLogger } from '@/shared/utils/logger'; import './ModelSelector.scss'; @@ -111,6 +113,7 @@ const buildAutoModelInfo = ( export const ModelSelector: React.FC = ({ currentMode, className = '', + sessionId, }) => { const { t } = useTranslation('flow-chat'); const [allModels, setAllModels] = useState([]); @@ -268,6 +271,15 @@ export const ModelSelector: React.FC = ({ await configManager.setConfig('ai.agent_models', updatedAgentModels); setAgentModels(updatedAgentModels); + + if (sessionId) { + FlowChatStore.getInstance().updateSessionModelName(sessionId, modelId); + await agentAPI.updateSessionModel({ + sessionId, + modelName: modelId, + }); + } + log.info('Mode model updated', { mode: currentMode, modelId }); globalEventBus.emit('mode:config:updated'); diff --git a/src/web-ui/src/flow_chat/services/flow-chat-manager/MessageModule.ts b/src/web-ui/src/flow_chat/services/flow-chat-manager/MessageModule.ts index 7f31160a..3336a483 100644 --- a/src/web-ui/src/flow_chat/services/flow-chat-manager/MessageModule.ts +++ b/src/web-ui/src/flow_chat/services/flow-chat-manager/MessageModule.ts @@ -4,7 +4,9 @@ */ import { agentAPI } from '@/infrastructure/api/service-api/AgentAPI'; +import { configManager } from '@/infrastructure/config/services/ConfigManager'; import { aiExperienceConfigService } from '@/infrastructure/config/services'; +import type { AIModelConfig, DefaultModelsConfig } from '@/infrastructure/config/types'; import { notificationService } from '../../../shared/notification-system'; import { stateMachineManager } from '../../state-machine'; import { SessionExecutionEvent, SessionExecutionState } from '../../state-machine/types'; @@ -17,6 +19,66 @@ import type { ImageContextData as ImageInputContextData } from '@/infrastructure const log = createLogger('MessageModule'); +function normalizeModelSelection( + modelId: string | undefined, + models: AIModelConfig[], + defaultModels: DefaultModelsConfig, +): string { + const value = modelId?.trim(); + if (!value || value === 'auto') return 'auto'; + + if (value === 'primary' || value === 'fast') { + const resolvedDefaultId = value === 'primary' ? defaultModels.primary : defaultModels.fast; + const matchedModel = models.find(model => model.id === resolvedDefaultId); + return matchedModel ? value : 'auto'; + } + + const matchedModel = models.find(model => + model.id === value || model.name === value || model.model_name === value, + ); + return matchedModel ? value : 'auto'; +} + +async function syncSessionModelSelection( + context: FlowChatContext, + sessionId: string, + agentType: string, +): Promise { + const session = context.flowChatStore.getState().sessions.get(sessionId); + if (!session) { + throw new Error(`Session does not exist: ${sessionId}`); + } + + const [agentModels, allModels, defaultModels] = await Promise.all([ + configManager.getConfig>('ai.agent_models') || {}, + configManager.getConfig('ai.models') || [], + configManager.getConfig('ai.default_models') || {}, + ]); + + const desiredModelId = normalizeModelSelection(agentModels[agentType], allModels, defaultModels); + const currentModelId = (session.config.modelName || 'auto').trim() || 'auto'; + const shouldForceAutoSync = desiredModelId === 'auto'; + if (!shouldForceAutoSync && desiredModelId === currentModelId) { + return; + } + + if (currentModelId !== desiredModelId) { + context.flowChatStore.updateSessionModelName(sessionId, desiredModelId); + } + await agentAPI.updateSessionModel({ + sessionId, + modelName: desiredModelId, + }); + + log.info('Session model synchronized before send', { + sessionId, + agentType, + previousModelId: currentModelId, + nextModelId: desiredModelId, + forcedAutoSync: shouldForceAutoSync, + }); +} + /** * Send message and handle response * @param message - Message sent to backend @@ -90,6 +152,9 @@ export async function sendMessage( metadata: { sessionId: sessionId, dialogTurnId } }); + const currentAgentType = agentType || session.mode || 'agentic'; + await syncSessionModelSelection(context, sessionId, currentAgentType); + const updatedSession = context.flowChatStore.getState().sessions.get(sessionId); if (!updatedSession) { throw new Error(`Session lost after adding dialog turn: ${sessionId}`); @@ -104,7 +169,6 @@ export async function sendMessage( context.contentBuffers.set(sessionId, new Map()); context.activeTextItems.set(sessionId, new Map()); - const currentAgentType = agentType || 'agentic'; const workspacePath = updatedSession.workspacePath; try { diff --git a/src/web-ui/src/flow_chat/store/FlowChatStore.ts b/src/web-ui/src/flow_chat/store/FlowChatStore.ts index c7ca99eb..20a66ce9 100644 --- a/src/web-ui/src/flow_chat/store/FlowChatStore.ts +++ b/src/web-ui/src/flow_chat/store/FlowChatStore.ts @@ -322,6 +322,35 @@ export class FlowChatStore { }); } + public updateSessionModelName(sessionId: string, modelName: string): void { + this.setState(prev => { + const session = prev.sessions.get(sessionId); + if (!session) return prev; + + const normalizedModelName = modelName.trim() || 'auto'; + if ((session.config.modelName || 'auto') === normalizedModelName) { + return prev; + } + + const updatedSession = { + ...session, + config: { + ...session.config, + modelName: normalizedModelName, + }, + lastActiveAt: Date.now(), + }; + + const newSessions = new Map(prev.sessions); + newSessions.set(sessionId, updatedSession); + + return { + ...prev, + sessions: newSessions, + }; + }); + } + /** * Update session relationship metadata (parent/child grouping, kind, etc.). * This is UI-only and does not affect backend behavior directly. diff --git a/src/web-ui/src/infrastructure/api/service-api/AgentAPI.ts b/src/web-ui/src/infrastructure/api/service-api/AgentAPI.ts index f04094c9..05fd0586 100644 --- a/src/web-ui/src/infrastructure/api/service-api/AgentAPI.ts +++ b/src/web-ui/src/infrastructure/api/service-api/AgentAPI.ts @@ -85,6 +85,11 @@ export interface EnsureAssistantBootstrapResponse { detail?: string; } +export interface UpdateSessionModelRequest { + sessionId: string; + modelName: string; +} + export interface Message { id: string; @@ -223,6 +228,14 @@ export class AgentAPI { } } + async updateSessionModel(request: UpdateSessionModelRequest): Promise { + try { + await api.invoke('update_session_model', { request }); + } catch (error) { + throw createTauriCommandError('update_session_model', error, request); + } + } + async listSessions(workspacePath: string): Promise {