Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions src/apps/desktop/src/api/agentic_api.rs
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,13 @@ pub struct CreateSessionResponse {
pub agent_type: String,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdateSessionModelRequest {
pub session_id: String,
pub model_name: String,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct StartDialogTurnRequest {
Expand Down Expand Up @@ -215,6 +222,17 @@ pub async fn create_session(
})
}

#[tauri::command]
pub async fn update_session_model(
coordinator: State<'_, Arc<ConversationCoordinator>>,
request: UpdateSessionModelRequest,
) -> Result<(), String> {
coordinator
.update_session_model(&request.session_id, &request.model_name)
.await
.map_err(|e| format!("Failed to update session model: {}", e))
}

#[tauri::command]
pub async fn start_dialog_turn(
_app: AppHandle,
Expand Down
1 change: 1 addition & 0 deletions src/apps/desktop/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -295,6 +295,7 @@ pub async fn run() {
.invoke_handler(tauri::generate_handler![
theme::show_main_window,
api::agentic_api::create_session,
api::agentic_api::update_session_model,
api::agentic_api::start_dialog_turn,
api::agentic_api::ensure_assistant_bootstrap,
api::agentic_api::cancel_dialog_turn,
Expand Down
24 changes: 24 additions & 0 deletions src/crates/core/src/agentic/coordination/coordinator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,6 +244,30 @@ Update the persona files and delete BOOTSTRAP.md as soon as bootstrap is complet
.await
}

pub async fn update_session_model(
&self,
session_id: &str,
model_id: &str,
) -> BitFunResult<()> {
let normalized_model_id = model_id.trim();
let normalized_model_id = if normalized_model_id.is_empty() {
"auto"
} else {
normalized_model_id
};

self.session_manager
.update_session_model_id(session_id, normalized_model_id)
.await?;

info!(
"Coordinator updated session model: session_id={}, model_id={}",
session_id, normalized_model_id
);

Ok(())
}

/// Create a new session with explicit creator identity.
pub async fn create_session_with_workspace_and_creator(
&self,
Expand Down
76 changes: 20 additions & 56 deletions src/crates/core/src/agentic/execution/execution_engine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -145,19 +145,6 @@ impl ExecutionEngine {
.unwrap_or_else(|| "auto".to_string())
}

fn resolve_locked_auto_model_id(
ai_config: &crate::service::config::types::AIConfig,
model_id: Option<&String>,
) -> Option<String> {
let model_id = model_id?;
let trimmed = model_id.trim();
if trimmed.is_empty() || trimmed == "auto" || trimmed == "default" {
return None;
}

ai_config.resolve_model_selection(trimmed)
}

fn should_use_fast_auto_model(turn_index: usize, original_user_input: &str) -> bool {
turn_index == 0 && original_user_input.chars().count() <= 10
}
Expand Down Expand Up @@ -192,55 +179,32 @@ impl ExecutionEngine {
.map(|model_id| model_id.trim())
.filter(|model_id| !model_id.is_empty())
.map(str::to_string)
.unwrap_or(fallback_model_id);
.unwrap_or(fallback_model_id.clone());
let resolved_configured_model_id =
Self::resolve_configured_model_id(&ai_config, &configured_model_id);

let model_id = if configured_model_id == "auto" || resolved_configured_model_id == "auto" {
let locked_model_id =
Self::resolve_locked_auto_model_id(&ai_config, session.config.model_id.as_ref());
let raw_locked_model_id = session.config.model_id.clone();

if let Some(locked_model_id) = locked_model_id {
locked_model_id
} else {
if let Some(raw_locked_model_id) = raw_locked_model_id.as_ref() {
let trimmed = raw_locked_model_id.trim();
if !trimmed.is_empty() && trimmed != "auto" && trimmed != "default" {
warn!(
"Ignoring invalid locked auto model for session: session_id={}, model_id={}",
session.session_id, trimmed
);
}
}

let use_fast_model =
Self::should_use_fast_auto_model(turn_index, original_user_input);
let fallback_model = if use_fast_model { "fast" } else { "primary" };
let resolved_model_id = ai_config.resolve_model_selection(fallback_model);

if let Some(resolved_model_id) = resolved_model_id {
self.session_manager
.update_session_model_id(&session.session_id, &resolved_model_id)
.await?;

info!(
"Auto model resolved: session_id={}, turn_index={}, user_input_chars={}, strategy={}, resolved_model_id={}",
session.session_id,
turn_index,
original_user_input.chars().count(),
fallback_model,
resolved_model_id
);
let use_fast_model = Self::should_use_fast_auto_model(turn_index, original_user_input);
let fallback_model = if use_fast_model { "fast" } else { "primary" };
let resolved_model_id = ai_config.resolve_model_selection(fallback_model);

if let Some(resolved_model_id) = resolved_model_id {
info!(
"Auto model resolved without locking session: session_id={}, turn_index={}, user_input_chars={}, strategy={}, resolved_model_id={}",
session.session_id,
turn_index,
original_user_input.chars().count(),
fallback_model,
resolved_model_id
} else {
warn!(
"Auto model strategy unresolved, keeping symbolic selector: session_id={}, strategy={}",
session.session_id, fallback_model
);
fallback_model.to_string()
}
);

resolved_model_id
} else {
warn!(
"Auto model strategy unresolved, keeping symbolic selector: session_id={}, strategy={}",
session.session_id, fallback_model
);
fallback_model.to_string()
}
} else {
resolved_configured_model_id
Expand Down
13 changes: 13 additions & 0 deletions src/crates/core/src/agentic/execution/stream_processor.rs
Original file line number Diff line number Diff line change
Expand Up @@ -248,6 +248,13 @@ impl StreamContext {
}
}

fn can_recover_as_partial_text_result(&self) -> bool {
self.has_effective_output
&& !self.full_text.is_empty()
&& self.tool_calls.is_empty()
&& self.tool_call_buffer.tool_id.is_empty()
}

/// Force finish tool_call_buffer, used to handle cases where toolcall parameters are not fully closed
/// E.g., when new toolcall arrives and before returning results
fn force_finish_tool_call_buffer(&mut self) {
Expand Down Expand Up @@ -707,6 +714,12 @@ impl StreamProcessor {
Ok(Some(Err(e))) => {
let error_msg = format!("Stream processing error: {}", e);
error!("{}", error_msg);
if ctx.can_recover_as_partial_text_result() {
flush_sse_on_error(&sse_collector, &error_msg).await;
self.send_thinking_end_if_needed(&mut ctx).await;
self.log_stream_result(&ctx);
break;
}
// log SSE for network errors
flush_sse_on_error(&sse_collector, &error_msg).await;
self.graceful_shutdown_from_ctx(&mut ctx, error_msg.clone()).await;
Expand Down
6 changes: 0 additions & 6 deletions src/crates/core/src/agentic/persistence/manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -579,12 +579,6 @@ impl PersistenceManager {
self.write_json_atomic(&index_path, &index).await
}

async fn rebuild_index(&self, workspace_path: &Path) -> BitFunResult<Vec<SessionMetadata>> {
let lock = self.get_session_index_lock(workspace_path).await;
let _guard = lock.lock().await;
self.rebuild_index_locked(workspace_path).await
}

async fn upsert_index_entry(
&self,
workspace_path: &Path,
Expand Down
31 changes: 27 additions & 4 deletions src/crates/core/src/infrastructure/ai/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,9 @@ impl AIClient {
const TEST_IMAGE_EXPECTED_CODE: &'static str = "BYGR";
const TEST_IMAGE_PNG_BASE64: &'static str =
"iVBORw0KGgoAAAANSUhEUgAAAQAAAAEACAIAAADTED8xAAACBklEQVR42u3ZsREAIAwDMYf9dw4txwJupI7Wua+YZEPBfO91h4ZjAgQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABIAAQAAgABAACAAEAAIAAYAAQAAgABAACAAEAAIAAYAAQAAgABAAAAAAAEDRZI3QGf7jDvEPAAIAAYAAQAAgABAACAAEAAIAAYAAQAAgABAACAAEAAIAAYAAQAAgABAACAABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAACAAEAAIAAQAAgABgABAAAjABAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQAAgABAACAAGAAEAAIAAQALwuLkoG8OSfau4AAAAASUVORK5CYII=";
const STREAM_CONNECT_TIMEOUT_SECS: u64 = 10;
const HTTP_POOL_IDLE_TIMEOUT_SECS: u64 = 30;
const HTTP_TCP_KEEPALIVE_SECS: u64 = 60;

fn image_test_response_matches_expected(response: &str) -> bool {
let upper = response.to_ascii_uppercase();
Expand Down Expand Up @@ -280,12 +283,20 @@ impl AIClient {
/// Create an HTTP client (supports proxy config and SSL verification control)
fn create_http_client(proxy_config: Option<ProxyConfig>, skip_ssl_verify: bool) -> Client {
let mut builder = Client::builder()
.timeout(std::time::Duration::from_secs(600))
.connect_timeout(std::time::Duration::from_secs(10))
// SSE requests can legitimately stay open for a long time while the model
// thinks or executes tools. Keep only connect timeout here and let the
// stream handlers enforce idle timeouts between chunks.
.connect_timeout(std::time::Duration::from_secs(
Self::STREAM_CONNECT_TIMEOUT_SECS,
))
.user_agent("BitFun/1.0")
.pool_idle_timeout(std::time::Duration::from_secs(30))
.pool_idle_timeout(std::time::Duration::from_secs(
Self::HTTP_POOL_IDLE_TIMEOUT_SECS,
))
.pool_max_idle_per_host(4)
.tcp_keepalive(Some(std::time::Duration::from_secs(60)))
.tcp_keepalive(Some(std::time::Duration::from_secs(
Self::HTTP_TCP_KEEPALIVE_SECS,
)))
.danger_accept_invalid_certs(skip_ssl_verify);

if skip_ssl_verify {
Expand Down Expand Up @@ -2253,4 +2264,16 @@ mod tests {
assert_eq!(request_body["tools"][0]["googleSearch"], json!({}));
assert!(request_body.get("toolConfig").is_none());
}

#[test]
fn streaming_http_client_does_not_apply_global_request_timeout() {
let client = make_test_client("openai", None);
let request = client
.client
.get("https://example.com/stream")
.build()
.expect("request should build");

assert_eq!(request.timeout(), None);
}
}
2 changes: 2 additions & 0 deletions src/crates/core/src/service/system/command.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,8 @@ fn dedup_existing_dirs(paths: Vec<PathBuf>) -> Vec<PathBuf> {
///
/// # Example
/// ```rust
/// use bitfun_core::service::system::check_command;
///
/// let result = check_command("git");
/// if result.exists {
/// if let Some(path) = result.path.as_deref() {
Expand Down
12 changes: 12 additions & 0 deletions src/web-ui/src/flow_chat/components/ModelSelector.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -11,10 +11,12 @@ import React, { useState, useEffect, useRef, useCallback, useMemo } from 'react'
import { Cpu, ChevronDown, Check, Sparkles } from 'lucide-react';
import { useTranslation } from 'react-i18next';
import { configManager } from '@/infrastructure/config/services/ConfigManager';
import { agentAPI } from '@/infrastructure/api/service-api/AgentAPI';
import { getProviderDisplayName } from '@/infrastructure/config/services/modelConfigs';
import { globalEventBus } from '@/infrastructure/event-bus';
import type { AIModelConfig } from '@/infrastructure/config/types';
import { Tooltip } from '@/component-library';
import { FlowChatStore } from '../store/FlowChatStore';
import { createLogger } from '@/shared/utils/logger';
import './ModelSelector.scss';

Expand Down Expand Up @@ -111,6 +113,7 @@ const buildAutoModelInfo = (
export const ModelSelector: React.FC<ModelSelectorProps> = ({
currentMode,
className = '',
sessionId,
}) => {
const { t } = useTranslation('flow-chat');
const [allModels, setAllModels] = useState<AIModelConfig[]>([]);
Expand Down Expand Up @@ -268,6 +271,15 @@ export const ModelSelector: React.FC<ModelSelectorProps> = ({

await configManager.setConfig('ai.agent_models', updatedAgentModels);
setAgentModels(updatedAgentModels);

if (sessionId) {
FlowChatStore.getInstance().updateSessionModelName(sessionId, modelId);
await agentAPI.updateSessionModel({
sessionId,
modelName: modelId,
});
}

log.info('Mode model updated', { mode: currentMode, modelId });

globalEventBus.emit('mode:config:updated');
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
*/

import { agentAPI } from '@/infrastructure/api/service-api/AgentAPI';
import { configManager } from '@/infrastructure/config/services/ConfigManager';
import { aiExperienceConfigService } from '@/infrastructure/config/services';
import type { AIModelConfig, DefaultModelsConfig } from '@/infrastructure/config/types';
import { notificationService } from '../../../shared/notification-system';
import { stateMachineManager } from '../../state-machine';
import { SessionExecutionEvent, SessionExecutionState } from '../../state-machine/types';
Expand All @@ -17,6 +19,66 @@ import type { ImageContextData as ImageInputContextData } from '@/infrastructure

const log = createLogger('MessageModule');

function normalizeModelSelection(
modelId: string | undefined,
models: AIModelConfig[],
defaultModels: DefaultModelsConfig,
): string {
const value = modelId?.trim();
if (!value || value === 'auto') return 'auto';

if (value === 'primary' || value === 'fast') {
const resolvedDefaultId = value === 'primary' ? defaultModels.primary : defaultModels.fast;
const matchedModel = models.find(model => model.id === resolvedDefaultId);
return matchedModel ? value : 'auto';
}

const matchedModel = models.find(model =>
model.id === value || model.name === value || model.model_name === value,
);
return matchedModel ? value : 'auto';
}

async function syncSessionModelSelection(
context: FlowChatContext,
sessionId: string,
agentType: string,
): Promise<void> {
const session = context.flowChatStore.getState().sessions.get(sessionId);
if (!session) {
throw new Error(`Session does not exist: ${sessionId}`);
}

const [agentModels, allModels, defaultModels] = await Promise.all([
configManager.getConfig<Record<string, string>>('ai.agent_models') || {},
configManager.getConfig<AIModelConfig[]>('ai.models') || [],
configManager.getConfig<DefaultModelsConfig>('ai.default_models') || {},
]);

const desiredModelId = normalizeModelSelection(agentModels[agentType], allModels, defaultModels);
const currentModelId = (session.config.modelName || 'auto').trim() || 'auto';
const shouldForceAutoSync = desiredModelId === 'auto';
if (!shouldForceAutoSync && desiredModelId === currentModelId) {
return;
}

if (currentModelId !== desiredModelId) {
context.flowChatStore.updateSessionModelName(sessionId, desiredModelId);
}
await agentAPI.updateSessionModel({
sessionId,
modelName: desiredModelId,
});

log.info('Session model synchronized before send', {
sessionId,
agentType,
previousModelId: currentModelId,
nextModelId: desiredModelId,
forcedAutoSync: shouldForceAutoSync,
});
}

/**
* Send message and handle response
* @param message - Message sent to backend
Expand Down Expand Up @@ -90,6 +152,9 @@ export async function sendMessage(
metadata: { sessionId: sessionId, dialogTurnId }
});

const currentAgentType = agentType || session.mode || 'agentic';
await syncSessionModelSelection(context, sessionId, currentAgentType);

const updatedSession = context.flowChatStore.getState().sessions.get(sessionId);
if (!updatedSession) {
throw new Error(`Session lost after adding dialog turn: ${sessionId}`);
Expand All @@ -104,7 +169,6 @@ export async function sendMessage(
context.contentBuffers.set(sessionId, new Map());
context.activeTextItems.set(sessionId, new Map());

const currentAgentType = agentType || 'agentic';
const workspacePath = updatedSession.workspacePath;

try {
Expand Down
Loading
Loading