Next refactors
Some checks failed
Build Linux App / build (push) Failing after 1m18s

This commit is contained in:
Kazimierz Ciołek
2026-02-24 02:19:28 +01:00
parent 0c9eb8878d
commit 9dcc4b87de
40 changed files with 3515 additions and 2575 deletions

View File

@@ -5,56 +5,28 @@ import 'package:dio/dio.dart';
import 'package:path/path.dart' as p;
import 'package:path_provider/path_provider.dart';
import 'package:riverpod_annotation/riverpod_annotation.dart';
import 'package:trainhub_flutter/core/constants/ai_constants.dart';
import 'package:trainhub_flutter/data/services/ai_process_manager.dart';
import 'package:trainhub_flutter/injection.dart' as di;
import 'package:trainhub_flutter/presentation/settings/ai_model_settings_state.dart';
part 'ai_model_settings_controller.g.dart';
// ---------------------------------------------------------------------------
// Constants
// ---------------------------------------------------------------------------
const _llamaBuild = 'b8130';
const _nomicModelFile = 'nomic-embed-text-v1.5.Q4_K_M.gguf';
const _qwenModelFile = 'qwen2.5-7b-instruct-q4_k_m.gguf';
const _nomicModelUrl =
'https://huggingface.co/nomic-ai/nomic-embed-text-v1.5-GGUF/resolve/main/nomic-embed-text-v1.5.Q4_K_M.gguf';
const _qwenModelUrl =
'https://huggingface.co/Qwen/Qwen2.5-7B-Instruct-GGUF/resolve/main/qwen2.5-7b-instruct-q4_k_m.gguf';
// ---------------------------------------------------------------------------
// Platform helpers
// ---------------------------------------------------------------------------
/// Returns the llama.cpp archive download URL for the current platform.
/// Throws [UnsupportedError] if the platform is not supported.
Future<String> _llamaArchiveUrl() async {
final build = AiConstants.llamaBuild;
if (Platform.isMacOS) {
// Detect CPU architecture via `uname -m`
final result = await Process.run('uname', ['-m']);
final arch = (result.stdout as String).trim();
if (arch == 'arm64') {
return 'https://github.com/ggml-org/llama.cpp/releases/download/$_llamaBuild/llama-$_llamaBuild-bin-macos-arm64.tar.gz';
} else {
return 'https://github.com/ggml-org/llama.cpp/releases/download/$_llamaBuild/llama-$_llamaBuild-bin-macos-x64.tar.gz';
}
final suffix = arch == 'arm64' ? 'macos-arm64' : 'macos-x64';
return 'https://github.com/ggml-org/llama.cpp/releases/download/$build/llama-$build-bin-$suffix.tar.gz';
} else if (Platform.isWindows) {
return 'https://github.com/ggml-org/llama.cpp/releases/download/$_llamaBuild/llama-$_llamaBuild-bin-win-vulkan-x64.zip';
return 'https://github.com/ggml-org/llama.cpp/releases/download/$build/llama-$build-bin-win-vulkan-x64.zip';
} else if (Platform.isLinux) {
return 'https://github.com/ggml-org/llama.cpp/releases/download/$_llamaBuild/llama-$_llamaBuild-bin-ubuntu-vulkan-x64.tar.gz';
return 'https://github.com/ggml-org/llama.cpp/releases/download/$build/llama-$build-bin-ubuntu-vulkan-x64.tar.gz';
}
throw UnsupportedError('Unsupported platform: ${Platform.operatingSystem}');
}
/// The expected llama-server binary name for the current platform.
String get _serverBinaryName =>
Platform.isWindows ? 'llama-server.exe' : 'llama-server';
// ---------------------------------------------------------------------------
// Controller
// ---------------------------------------------------------------------------
@riverpod
class AiModelSettingsController extends _$AiModelSettingsController {
final _dio = Dio();
@@ -62,30 +34,21 @@ class AiModelSettingsController extends _$AiModelSettingsController {
@override
AiModelSettingsState build() => const AiModelSettingsState();
// -------------------------------------------------------------------------
// Validation
// -------------------------------------------------------------------------
/// Checks whether all required files exist on disk and updates
/// [AiModelSettingsState.areModelsValidated].
Future<void> validateModels() async {
state = state.copyWith(
currentTask: 'Checking installed files…',
errorMessage: null,
);
try {
final dir = await getApplicationDocumentsDirectory();
final base = dir.path;
final serverBin = File(p.join(base, _serverBinaryName));
final nomicModel = File(p.join(base, _nomicModelFile));
final qwenModel = File(p.join(base, _qwenModelFile));
final validated = serverBin.existsSync() &&
final serverBin = File(p.join(base, AiConstants.serverBinaryName));
final nomicModel = File(p.join(base, AiConstants.nomicModelFile));
final qwenModel = File(p.join(base, AiConstants.qwenModelFile));
final validated =
serverBin.existsSync() &&
nomicModel.existsSync() &&
qwenModel.existsSync();
state = state.copyWith(
areModelsValidated: validated,
currentTask: validated ? 'All files present.' : 'Files missing.',
@@ -99,29 +62,22 @@ class AiModelSettingsController extends _$AiModelSettingsController {
}
}
// -------------------------------------------------------------------------
// Download & Install
// -------------------------------------------------------------------------
/// Downloads and installs the llama.cpp binary and both model files.
Future<void> downloadAll() async {
if (state.isDownloading) return;
try {
await di.getIt<AiProcessManager>().stopServers();
} catch (_) {}
state = state.copyWith(
isDownloading: true,
progress: 0.0,
areModelsValidated: false,
errorMessage: null,
);
try {
final dir = await getApplicationDocumentsDirectory();
// -- 1. llama.cpp binary -----------------------------------------------
final archiveUrl = await _llamaArchiveUrl();
final archiveExt = archiveUrl.endsWith('.zip') ? '.zip' : '.tar.gz';
final archivePath = p.join(dir.path, 'llama_binary$archiveExt');
await _downloadFile(
url: archiveUrl,
savePath: archivePath,
@@ -129,41 +85,32 @@ class AiModelSettingsController extends _$AiModelSettingsController {
overallStart: 0.0,
overallEnd: 0.2,
);
state = state.copyWith(
currentTask: 'Extracting llama.cpp binary…',
progress: 0.2,
);
await _extractBinary(archivePath, dir.path);
// Clean up the archive once extracted
final archiveFile = File(archivePath);
if (archiveFile.existsSync()) archiveFile.deleteSync();
// -- 2. Nomic embedding model ------------------------------------------
await _downloadFile(
url: _nomicModelUrl,
savePath: p.join(dir.path, _nomicModelFile),
url: AiConstants.nomicModelUrl,
savePath: p.join(dir.path, AiConstants.nomicModelFile),
taskLabel: 'Downloading Nomic embedding model…',
overallStart: 0.2,
overallEnd: 0.55,
);
// -- 3. Qwen chat model ------------------------------------------------
await _downloadFile(
url: _qwenModelUrl,
savePath: p.join(dir.path, _qwenModelFile),
url: AiConstants.qwenModelUrl,
savePath: p.join(dir.path, AiConstants.qwenModelFile),
taskLabel: 'Downloading Qwen 2.5 7B model…',
overallStart: 0.55,
overallEnd: 1.0,
);
state = state.copyWith(
isDownloading: false,
progress: 1.0,
currentTask: 'Download complete.',
);
await validateModels();
} on DioException catch (e) {
state = state.copyWith(
@@ -180,11 +127,6 @@ class AiModelSettingsController extends _$AiModelSettingsController {
}
}
// -------------------------------------------------------------------------
// Private helpers
// -------------------------------------------------------------------------
/// Downloads a single file with progress mapped into [overallStart]..[overallEnd].
Future<void> _downloadFile({
required String url,
required String savePath,
@@ -193,7 +135,6 @@ class AiModelSettingsController extends _$AiModelSettingsController {
required double overallEnd,
}) async {
state = state.copyWith(currentTask: taskLabel, progress: overallStart);
await _dio.download(
url,
savePath,
@@ -212,52 +153,62 @@ class AiModelSettingsController extends _$AiModelSettingsController {
);
}
/// Extracts the downloaded archive and moves `llama-server[.exe]` to [destDir].
Future<void> _extractBinary(String archivePath, String destDir) async {
final extractDir = p.join(destDir, '_llama_extract_tmp');
final extractDirObj = Directory(extractDir);
if (extractDirObj.existsSync()) extractDirObj.deleteSync(recursive: true);
extractDirObj.createSync(recursive: true);
try {
if (archivePath.endsWith('.zip')) {
await extractFileToDisk(archivePath, extractDir);
} else {
// .tar.gz — use extractFileToDisk which handles both via the archive package
await extractFileToDisk(archivePath, extractDir);
await extractFileToDisk(archivePath, extractDir);
bool foundServer = false;
final binaryName = AiConstants.serverBinaryName;
for (final entity in extractDirObj.listSync(recursive: true)) {
if (entity is File) {
final ext = p.extension(entity.path).toLowerCase();
final name = p.basename(entity.path);
if (name == binaryName ||
ext == '.dll' ||
ext == '.so' ||
ext == '.dylib') {
final destFile = p.join(destDir, name);
int retryCount = 0;
bool success = false;
while (!success && retryCount < 5) {
try {
if (File(destFile).existsSync()) {
File(destFile).deleteSync();
}
entity.copySync(destFile);
success = true;
} on FileSystemException catch (_) {
if (retryCount >= 4) {
throw Exception(
'Failed to overwrite $name. Ensure no other applications are using it.',
);
}
await Future.delayed(const Duration(milliseconds: 500));
retryCount++;
}
}
if (name == binaryName) {
foundServer = true;
if (Platform.isMacOS || Platform.isLinux) {
await Process.run('chmod', ['+x', destFile]);
}
}
}
}
}
// Walk the extracted tree to find the server binary
final binary = _findFile(extractDirObj, _serverBinaryName);
if (binary == null) {
if (!foundServer) {
throw FileSystemException(
'llama-server binary not found in archive.',
archivePath,
);
}
final destBin = p.join(destDir, _serverBinaryName);
binary.copySync(destBin);
// Make executable on POSIX systems
if (Platform.isMacOS || Platform.isLinux) {
await Process.run('chmod', ['+x', destBin]);
}
} finally {
// Always clean up the temp extraction directory
if (extractDirObj.existsSync()) {
extractDirObj.deleteSync(recursive: true);
}
}
}
/// Recursively searches [dir] for a file named [name].
File? _findFile(Directory dir, String name) {
for (final entity in dir.listSync(recursive: true)) {
if (entity is File && p.basename(entity.path) == name) {
return entity;
}
}
return null;
}
}