This commit is contained in:
@@ -1,30 +1,32 @@
|
||||
import 'dart:convert';
|
||||
import 'package:dio/dio.dart';
|
||||
import 'package:riverpod_annotation/riverpod_annotation.dart';
|
||||
import 'package:trainhub_flutter/core/constants/ai_constants.dart';
|
||||
import 'package:trainhub_flutter/domain/repositories/chat_repository.dart';
|
||||
import 'package:trainhub_flutter/domain/repositories/note_repository.dart';
|
||||
import 'package:trainhub_flutter/data/services/ai_process_manager.dart';
|
||||
import 'package:trainhub_flutter/injection.dart';
|
||||
import 'package:trainhub_flutter/presentation/chat/chat_state.dart';
|
||||
import 'package:uuid/uuid.dart';
|
||||
|
||||
part 'chat_controller.g.dart';
|
||||
|
||||
const _chatApiUrl = 'http://localhost:8080/v1/chat/completions';
|
||||
|
||||
/// Base system prompt that is always included.
|
||||
const _baseSystemPrompt =
|
||||
'You are a helpful AI fitness assistant for personal trainers. '
|
||||
'Help users design training plans, analyse exercise technique, '
|
||||
'and answer questions about sports science and nutrition.';
|
||||
@riverpod
|
||||
AiProcessManager aiProcessManager(AiProcessManagerRef ref) {
|
||||
final manager = getIt<AiProcessManager>();
|
||||
manager.addListener(() => ref.notifyListeners());
|
||||
return manager;
|
||||
}
|
||||
|
||||
@riverpod
|
||||
class ChatController extends _$ChatController {
|
||||
late ChatRepository _repo;
|
||||
late NoteRepository _noteRepo;
|
||||
|
||||
// Shared Dio client — generous timeout for 7B models running on CPU.
|
||||
final _dio = Dio(
|
||||
BaseOptions(
|
||||
connectTimeout: const Duration(seconds: 30),
|
||||
receiveTimeout: const Duration(minutes: 5),
|
||||
connectTimeout: AiConstants.serverConnectTimeout,
|
||||
receiveTimeout: AiConstants.serverReceiveTimeout,
|
||||
),
|
||||
);
|
||||
|
||||
@@ -32,14 +34,14 @@ class ChatController extends _$ChatController {
|
||||
Future<ChatState> build() async {
|
||||
_repo = getIt<ChatRepository>();
|
||||
_noteRepo = getIt<NoteRepository>();
|
||||
final aiManager = ref.read(aiProcessManagerProvider);
|
||||
if (aiManager.status == AiServerStatus.offline) {
|
||||
aiManager.startServers();
|
||||
}
|
||||
final sessions = await _repo.getAllSessions();
|
||||
return ChatState(sessions: sessions);
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Session management (unchanged)
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
Future<void> createSession() async {
|
||||
final session = await _repo.createSession();
|
||||
final sessions = await _repo.getAllSessions();
|
||||
@@ -72,28 +74,29 @@ class ChatController extends _$ChatController {
|
||||
);
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Send message (RAG + Step D)
|
||||
// -------------------------------------------------------------------------
|
||||
|
||||
Future<void> sendMessage(String content) async {
|
||||
final current = state.valueOrNull;
|
||||
if (current == null) return;
|
||||
final sessionId = await _resolveSession(current, content);
|
||||
await _persistUserMessage(sessionId, content);
|
||||
final contextChunks = await _searchKnowledgeBase(content);
|
||||
final systemPrompt = _buildSystemPrompt(contextChunks);
|
||||
final history = _buildHistory();
|
||||
final fullAiResponse = await _streamResponse(systemPrompt, history);
|
||||
await _persistAssistantResponse(sessionId, content, fullAiResponse);
|
||||
}
|
||||
|
||||
// ── 1. Resolve / create a session ─────────────────────────────────────
|
||||
String sessionId;
|
||||
if (current.activeSession == null) {
|
||||
final session = await _repo.createSession();
|
||||
sessionId = session.id;
|
||||
final sessions = await _repo.getAllSessions();
|
||||
state = AsyncValue.data(
|
||||
current.copyWith(sessions: sessions, activeSession: session),
|
||||
);
|
||||
} else {
|
||||
sessionId = current.activeSession!.id;
|
||||
}
|
||||
Future<String> _resolveSession(ChatState current, String content) async {
|
||||
if (current.activeSession != null) return current.activeSession!.id;
|
||||
final session = await _repo.createSession();
|
||||
final sessions = await _repo.getAllSessions();
|
||||
state = AsyncValue.data(
|
||||
current.copyWith(sessions: sessions, activeSession: session),
|
||||
);
|
||||
return session.id;
|
||||
}
|
||||
|
||||
// ── 2. Persist user message & show typing indicator ───────────────────
|
||||
Future<void> _persistUserMessage(String sessionId, String content) async {
|
||||
await _repo.addMessage(
|
||||
sessionId: sessionId,
|
||||
role: 'user',
|
||||
@@ -104,95 +107,196 @@ class ChatController extends _$ChatController {
|
||||
state.valueOrNull!.copyWith(
|
||||
messages: messagesAfterUser,
|
||||
isTyping: true,
|
||||
thinkingSteps: [],
|
||||
streamingContent: '',
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// ── 3. RAG: retrieve relevant chunks from the knowledge base ──────────
|
||||
// Gracefully degrades — if Nomic server is unavailable or no chunks
|
||||
// exist, the chat still works with the base system prompt alone.
|
||||
Future<List<String>> _searchKnowledgeBase(String query) async {
|
||||
final searchStep = _createStep('Searching knowledge base...');
|
||||
List<String> contextChunks = [];
|
||||
try {
|
||||
contextChunks = await _noteRepo.searchSimilar(content, topK: 3);
|
||||
} catch (_) {
|
||||
// Nomic server not running or no chunks stored — continue without RAG.
|
||||
contextChunks = await _noteRepo.searchSimilar(query, topK: 3);
|
||||
if (contextChunks.isNotEmpty) {
|
||||
_updateStep(
|
||||
searchStep.id,
|
||||
status: ThinkingStepStatus.completed,
|
||||
title: 'Found ${contextChunks.length} documents',
|
||||
details: 'Context added for assistant.',
|
||||
);
|
||||
} else {
|
||||
_updateStep(
|
||||
searchStep.id,
|
||||
status: ThinkingStepStatus.completed,
|
||||
title: 'No matching documents in knowledge base',
|
||||
details: 'Responding based on general knowledge.',
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
_updateStep(
|
||||
searchStep.id,
|
||||
status: ThinkingStepStatus.error,
|
||||
title: 'Knowledge base search error',
|
||||
details: e.toString(),
|
||||
);
|
||||
}
|
||||
return contextChunks;
|
||||
}
|
||||
|
||||
// ── 4. Build enriched system prompt (Step D) ──────────────────────────
|
||||
final systemPrompt = _buildSystemPrompt(contextChunks);
|
||||
|
||||
// Build the full conversation history so the model maintains context.
|
||||
final history = messagesAfterUser
|
||||
.map(
|
||||
(m) => <String, String>{
|
||||
'role': m.isUser ? 'user' : 'assistant',
|
||||
'content': m.content,
|
||||
},
|
||||
)
|
||||
List<Map<String, String>> _buildHistory() {
|
||||
final messages = state.valueOrNull?.messages ?? [];
|
||||
return messages
|
||||
.map((m) => <String, String>{
|
||||
'role': m.isUser ? 'user' : 'assistant',
|
||||
'content': m.content,
|
||||
})
|
||||
.toList();
|
||||
}
|
||||
|
||||
// ── 5. POST to Qwen (http://localhost:8080/v1/chat/completions) ────────
|
||||
String aiResponse;
|
||||
Future<String> _streamResponse(
|
||||
String systemPrompt,
|
||||
List<Map<String, String>> history,
|
||||
) async {
|
||||
final generateStep = _createStep('Generating response...');
|
||||
String fullAiResponse = '';
|
||||
try {
|
||||
final response = await _dio.post<Map<String, dynamic>>(
|
||||
_chatApiUrl,
|
||||
final response = await _dio.post<ResponseBody>(
|
||||
AiConstants.chatApiUrl,
|
||||
options: Options(responseType: ResponseType.stream),
|
||||
data: {
|
||||
'messages': [
|
||||
{'role': 'system', 'content': systemPrompt},
|
||||
...history,
|
||||
],
|
||||
'temperature': 0.7,
|
||||
'temperature': AiConstants.chatTemperature,
|
||||
'stream': true,
|
||||
},
|
||||
);
|
||||
aiResponse =
|
||||
response.data!['choices'][0]['message']['content'] as String;
|
||||
_updateStep(
|
||||
generateStep.id,
|
||||
status: ThinkingStepStatus.running,
|
||||
title: 'Writing...',
|
||||
);
|
||||
final stream = response.data!.stream;
|
||||
await for (final chunk in stream) {
|
||||
final textChunk = utf8.decode(chunk);
|
||||
for (final line in textChunk.split('\n')) {
|
||||
if (!line.startsWith('data: ')) continue;
|
||||
final dataStr = line.substring(6).trim();
|
||||
if (dataStr == '[DONE]') break;
|
||||
if (dataStr.isEmpty) continue;
|
||||
try {
|
||||
final data = jsonDecode(dataStr);
|
||||
final delta = data['choices']?[0]?['delta']?['content'] ?? '';
|
||||
if (delta.isNotEmpty) {
|
||||
fullAiResponse += delta;
|
||||
final updatedState = state.valueOrNull;
|
||||
if (updatedState != null) {
|
||||
state = AsyncValue.data(
|
||||
updatedState.copyWith(streamingContent: fullAiResponse),
|
||||
);
|
||||
}
|
||||
}
|
||||
} catch (_) {}
|
||||
}
|
||||
}
|
||||
_updateStep(
|
||||
generateStep.id,
|
||||
status: ThinkingStepStatus.completed,
|
||||
title: 'Response generated',
|
||||
);
|
||||
} on DioException catch (e) {
|
||||
aiResponse =
|
||||
'Could not reach the AI server (${e.message}). '
|
||||
'Make sure AI models are downloaded and the inference servers have '
|
||||
'had time to start.';
|
||||
fullAiResponse += '\n\n[AI model communication error]';
|
||||
_updateStep(
|
||||
generateStep.id,
|
||||
status: ThinkingStepStatus.error,
|
||||
title: 'Generation failed',
|
||||
details: '${e.message}',
|
||||
);
|
||||
} catch (e) {
|
||||
aiResponse = 'An unexpected error occurred: $e';
|
||||
fullAiResponse += '\n\n[Unexpected error]';
|
||||
_updateStep(
|
||||
generateStep.id,
|
||||
status: ThinkingStepStatus.error,
|
||||
title: 'Generation failed',
|
||||
details: e.toString(),
|
||||
);
|
||||
}
|
||||
return fullAiResponse;
|
||||
}
|
||||
|
||||
// ── 6. Persist response & update session title on first exchange ───────
|
||||
Future<void> _persistAssistantResponse(
|
||||
String sessionId,
|
||||
String userContent,
|
||||
String aiResponse,
|
||||
) async {
|
||||
await _repo.addMessage(
|
||||
sessionId: sessionId,
|
||||
role: 'assistant',
|
||||
content: aiResponse,
|
||||
);
|
||||
|
||||
final messagesAfterAi = await _repo.getMessages(sessionId);
|
||||
if (messagesAfterAi.length <= 2) {
|
||||
final title =
|
||||
content.length > 30 ? '${content.substring(0, 30)}…' : content;
|
||||
final title = userContent.length > 30
|
||||
? '${userContent.substring(0, 30)}…'
|
||||
: userContent;
|
||||
await _repo.updateSessionTitle(sessionId, title);
|
||||
}
|
||||
|
||||
final sessions = await _repo.getAllSessions();
|
||||
state = AsyncValue.data(
|
||||
state.valueOrNull!.copyWith(
|
||||
messages: messagesAfterAi,
|
||||
isTyping: false,
|
||||
streamingContent: null,
|
||||
thinkingSteps: [],
|
||||
sessions: sessions,
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
// -------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// -------------------------------------------------------------------------
|
||||
ThinkingStep _createStep(String title) {
|
||||
final step = ThinkingStep(
|
||||
id: const Uuid().v4(),
|
||||
title: title,
|
||||
status: ThinkingStepStatus.pending,
|
||||
);
|
||||
final current = state.valueOrNull;
|
||||
if (current != null) {
|
||||
state = AsyncValue.data(
|
||||
current.copyWith(thinkingSteps: [...current.thinkingSteps, step]),
|
||||
);
|
||||
}
|
||||
return step;
|
||||
}
|
||||
|
||||
void _updateStep(
|
||||
String id, {
|
||||
ThinkingStepStatus? status,
|
||||
String? title,
|
||||
String? details,
|
||||
}) {
|
||||
final current = state.valueOrNull;
|
||||
if (current == null) return;
|
||||
final updatedSteps = current.thinkingSteps.map((s) {
|
||||
if (s.id != id) return s;
|
||||
return s.copyWith(
|
||||
status: status ?? s.status,
|
||||
title: title ?? s.title,
|
||||
details: details ?? s.details,
|
||||
);
|
||||
}).toList();
|
||||
state = AsyncValue.data(current.copyWith(thinkingSteps: updatedSteps));
|
||||
}
|
||||
|
||||
/// Builds the system prompt, injecting RAG context when available.
|
||||
static String _buildSystemPrompt(List<String> chunks) {
|
||||
if (chunks.isEmpty) return _baseSystemPrompt;
|
||||
|
||||
if (chunks.isEmpty) return AiConstants.baseSystemPrompt;
|
||||
final contextBlock = chunks
|
||||
.asMap()
|
||||
.entries
|
||||
.map((e) => '[${e.key + 1}] ${e.value}')
|
||||
.join('\n\n');
|
||||
|
||||
return '$_baseSystemPrompt\n\n'
|
||||
return '${AiConstants.baseSystemPrompt}\n\n'
|
||||
'### Relevant notes from the trainer\'s knowledge base:\n'
|
||||
'$contextBlock\n\n'
|
||||
'Use the above context to inform your response when relevant. '
|
||||
|
||||
Reference in New Issue
Block a user