From 4499f8fe2d5077e94f01b56be98f167a536702c8 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 11 May 2026 08:18:31 +0000
Subject: [PATCH 1/9] feat(pl): add full Polish localization structure and
basics content
Agent-Logs-Url: https://github.com/grand151/LinuxCommandLibrary/sessions/e13c9d82-551e-42de-8217-3a0bec8a834a
Co-authored-by: grand151 <86811297+grand151@users.noreply.github.com>
---
android/src/main/res/values-pl/strings.xml | 18 +++++
assets/basics-pl/filesfolders.md | 66 +++++++++++++++++++
assets/basics-pl/index.txt | 28 ++++++++
assets/basics-pl/network.md | 64 ++++++++++++++++++
assets/basics-pl/systeminformation.md | 60 +++++++++++++++++
.../composeResources/values-pl/strings.xml | 36 ++++++++++
.../composeResources/values/strings.xml | 36 ++++++++++
.../linuxcommandlibrary/app/LinuxNavGraph.kt | 8 ++-
.../app/LinuxNavigationSuite.kt | 3 +-
.../app/nav/TopLevelDestination.kt | 13 ++--
.../app/ui/composables/PaneTopBar.kt | 5 +-
.../app/ui/screens/Dialogs.kt | 40 +++++++----
.../app/ui/screens/basics/BasicsPaneScreen.kt | 8 ++-
.../commanddetail/CommandDetailPane.kt | 10 ++-
.../screens/commandlist/CommandListScreen.kt | 5 +-
.../ui/screens/commands/CommandsPaneScreen.kt | 8 ++-
.../app/ui/screens/search/SearchScreen.kt | 5 +-
17 files changed, 384 insertions(+), 29 deletions(-)
create mode 100644 android/src/main/res/values-pl/strings.xml
create mode 100644 assets/basics-pl/filesfolders.md
create mode 100644 assets/basics-pl/index.txt
create mode 100644 assets/basics-pl/network.md
create mode 100644 assets/basics-pl/systeminformation.md
create mode 100644 composeApp/src/commonMain/composeResources/values-pl/strings.xml
create mode 100644 composeApp/src/commonMain/composeResources/values/strings.xml
diff --git a/android/src/main/res/values-pl/strings.xml b/android/src/main/res/values-pl/strings.xml
new file mode 100644
index 000000000..df20dcf90
--- /dev/null
+++ b/android/src/main/res/values-pl/strings.xml
@@ -0,0 +1,18 @@
+
+
+
+ Biblioteka Poleceń Linux
+ Polecenia
+ Podstawy
+ Wskazówki
+ Informacje
+ Dodaj zakładkę
+ Usuń zakładkę
+ Zwiń wszystko
+ Rozwiń wszystko
+ Szukaj
+ Wstecz
+ Resetuj
+ Udostępnij
+
+
diff --git a/assets/basics-pl/filesfolders.md b/assets/basics-pl/filesfolders.md
new file mode 100644
index 000000000..3a5a7cdf1
--- /dev/null
+++ b/assets/basics-pl/filesfolders.md
@@ -0,0 +1,66 @@
+# Pliki i foldery
+
+## Utwórz plik
+```[touch](/man/touch) [nazwaPliku]```
+```[echo](/man/echo) "" >> [nazwaPliku]```
+```> [nazwaPliku]```
+
+## Usuń plik
+```[rm](/man/rm) [nazwaPliku]```
+
+## Wyświetl zawartość pliku
+```[cat](/man/cat) [nazwaPliku]```
+```[bat](/man/bat) [nazwaPliku]```
+
+## Edytuj plik tekstowy
+```[vi](/man/vi) [nazwaPliku]```
+```[nano](/man/nano) [nazwaPliku]```
+```[emacs](/man/emacs) [nazwaPliku]```
+```[pico](/man/pico) [nazwaPliku]```
+```[ed](/man/ed) [nazwaPliku]```
+
+## Kopiuj plik lub folder
+```[cp](/man/cp) [nazwaPliku] [nowanazwa]```
+```[cat](/man/cat) [nazwaPliku] > [nowanazwa]```
+```[dd](/man/dd) if=[nazwaPliku] of=[nowanazwa]```
+
+## Przenieś/Zmień nazwę pliku lub folderu
+```[mv](/man/mv) [nazwaPliku] [nowanazwa]```
+
+## Utwórz folder
+```[mkdir](/man/mkdir) [nazwaFolderu]```
+
+## Usuń folder
+```[rm](/man/rm) -r [nazwaFolderu]```
+
+## Wyświetl zawartość folderu
+```[ls](/man/ls)```
+```[dir](/man/dir)```
+```[exa](/man/exa)```
+```[lsd](/man/lsd)```
+
+## Wyświetl aktualny katalog roboczy
+```[pwd](/man/pwd)```
+
+## Zmień katalog
+```[cd](/man/cd) [ścieżka]```
+
+## Wyświetl rozmiar pliku/folderu
+```[du](/man/du) -sh [ścieżka]```
+
+## Wyszukaj plik
+```[find](/man/find) [ścieżka] -name [wzorzec]```
+```[locate](/man/locate) [nazwaPliku]```
+
+## Utwórz dowiązanie symboliczne
+```[ln](/man/ln) -s [cel] [dowiązanie]```
+
+## Zmień uprawnienia pliku
+```[chmod](/man/chmod) [uprawnienia] [nazwaPliku]```
+
+## Zmień właściciela pliku
+```[chown](/man/chown) [użytkownik]:[grupa] [nazwaPliku]```
+
+## Wyświetl właściwości pliku
+```[stat](/man/stat) [nazwaPliku]```
+```[file](/man/file) [nazwaPliku]```
diff --git a/assets/basics-pl/index.txt b/assets/basics-pl/index.txt
new file mode 100644
index 000000000..9b2c85a8e
--- /dev/null
+++ b/assets/basics-pl/index.txt
@@ -0,0 +1,28 @@
+aitools.md
+compressionarchiving.md
+cryptocurrencies.md
+emacstexteditor.md
+filesfolders.md
+git.md
+hackingtools.md
+input.md
+json.md
+microtexteditor.md
+nanotexteditor.md
+network.md
+oneliners.md
+packagemanager.md
+picotexteditor.md
+printing.md
+regularexpressions.md
+searchfind.md
+shellscripting.md
+ssh.md
+systemcontrol.md
+systeminformation.md
+terminalgames.md
+textprocessing.md
+tmux.md
+usersgroups.md
+videoaudio.md
+vimtexteditor.md
diff --git a/assets/basics-pl/network.md b/assets/basics-pl/network.md
new file mode 100644
index 000000000..63bda443b
--- /dev/null
+++ b/assets/basics-pl/network.md
@@ -0,0 +1,64 @@
+# Sieć
+
+## Zewnętrzny adres IP
+```[curl](/man/curl) ifconfig.me```
+```[dig](/man/dig) +short myip.opendns.com @resolver1.opendns.com```
+
+## Wyświetl interfejsy sieciowe
+```[ifconfig](/man/ifconfig) -a```
+```[ip](/man/ip) a```
+```[ip](/man/ip) link show```
+```[nmcli](/man/nmcli) device status```
+
+## Pobierz plik
+```[wget](/man/wget) [url]```
+```[curl](/man/curl) -O [url]```
+```[aria2c](/man/aria2c) [url]```
+
+## Informacje o domenie
+```[whois](/man/whois) [ip]```
+
+## Ping do urządzenia
+```[ping](/man/ping) [ip]```
+```[mtr](/man/mtr) [ip]```
+
+## Nazwa hosta
+```[hostname](/man/hostname)```
+```[hostnamectl](/man/hostnamectl)```
+
+## Trasowanie pakietów
+```[traceroute](/man/traceroute) [ip]```
+```[tracepath](/man/tracepath) [ip]```
+
+## Połączenie Telnet
+```[telnet](/man/telnet) [ip] [port]```
+
+## Sprawdzenie DNS
+```[dig](/man/dig) [domena]```
+```[nslookup](/man/nslookup) [domena]```
+```[host](/man/host) [domena]```
+
+## Wyświetl otwarte gniazda
+```[netstat](/man/netstat) -l```
+```[lsof](/man/lsof) -i```
+```[ss](/man/ss) -l```
+
+## Wyświetl tablicę routingu
+```[ip](/man/ip) route```
+```[route](/man/route)```
+
+## Skanuj otwarte porty
+```[nmap](/man/nmap) [ip]```
+
+## Wyślij żądanie HTTP
+```[curl](/man/curl) [url]```
+```[httpie](/man/httpie) [url]```
+
+## Transfer plików SSH
+```[scp](/man/scp) [plik] [użytkownik]@[host]:[ścieżka]```
+```[sftp](/man/sftp) [użytkownik]@[host]```
+
+## Monitor ruchu sieciowego
+```[iftop](/man/iftop)```
+```[nethogs](/man/nethogs)```
+```[bmon](/man/bmon)```
diff --git a/assets/basics-pl/systeminformation.md b/assets/basics-pl/systeminformation.md
new file mode 100644
index 000000000..60b3cd708
--- /dev/null
+++ b/assets/basics-pl/systeminformation.md
@@ -0,0 +1,60 @@
+# Informacje o systemie
+
+## Użycie CPU w procentach
+```HZ=`[awk](/man/awk) '/\#define HZ/ { print $3}' /usr/include/asm-generic/param.h`; NUMCPUS=`[grep](/man/grep) ^proc /proc/cpuinfo | [wc](/man/wc) -l`; FIRST=`[cat](/man/cat) /proc/stat | [awk](/man/awk) '/^cpu / { print $5}'`; [sleep](/man/sleep) 1; SECOND=`[cat](/man/cat) /proc/stat | [awk](/man/awk) '/^cpu / { print $5}'`; USED=`[echo](/man/echo) 4 k 100 $SECOND $FIRST - $NUMCPUS $HZ \* / 100 \* - p | [dc](/man/dc)`; [echo](/man/echo) ${USED}` ```
+
+## Naładowanie baterii laptopa w procentach
+```[upower](/man/upower) -i $([upower](/man/upower) -e | [grep](/man/grep) BAT) | [grep](/man/grep) --color=never -E percentage | [xargs](/man/xargs) | [cut](/man/cut) -d' ' -f2 | [sed](/man/sed) 's/%//'```
+
+## Interfejsy sieciowe
+```[ifconfig](/man/ifconfig) -a```
+```[ip](/man/ip) a```
+
+## Informacje o pamięci
+```[cat](/man/cat) /proc/meminfo```
+
+## Informacje o systemie Linux
+```[uname](/man/uname) -a```
+```[neofetch](/man/neofetch)```
+```[hostnamectl](/man/hostnamectl)```
+```[lsb_release](/man/lsb_release) -a```
+
+## Urządzenia PCI
+```[lspci](/man/lspci)```
+
+## Urządzenia USB
+```[lsusb](/man/lsusb)```
+
+## Urządzenia blokowe (poza RAM) w postaci drzewa
+```[lsblk](/man/lsblk)```
+
+## Czas działania systemu
+```[uptime](/man/uptime)```
+
+## Informacje o użyciu dysku
+```[df](/man/df) -h```
+```[duf](/man/duf)```
+
+## Informacje o CPU
+```[lscpu](/man/lscpu)```
+```[cat](/man/cat) /proc/cpuinfo```
+
+## Uruchomione procesy
+```[ps](/man/ps) aux```
+```[top](/man/top)```
+```[htop](/man/htop)```
+```[btop](/man/btop)```
+
+## Użycie pamięci
+```[free](/man/free) -h```
+```[vmstat](/man/vmstat)```
+
+## Komunikaty jądra
+```[dmesg](/man/dmesg)```
+
+## Załadowane moduły jądra
+```[lsmod](/man/lsmod)```
+
+## Wyświetl partycje dysku
+```[fdisk](/man/fdisk) -l```
+```[parted](/man/parted) -l```
diff --git a/composeApp/src/commonMain/composeResources/values-pl/strings.xml b/composeApp/src/commonMain/composeResources/values-pl/strings.xml
new file mode 100644
index 000000000..d8dfc2096
--- /dev/null
+++ b/composeApp/src/commonMain/composeResources/values-pl/strings.xml
@@ -0,0 +1,36 @@
+
+
+
+ Biblioteka Poleceń Linux
+ Polecenia
+ Podstawy
+ Wskazówki
+ Informacje
+ Dodaj zakładkę
+ Usuń zakładkę
+ Zwiń wszystko
+ Rozwiń wszystko
+ Szukaj
+ Wstecz
+ Resetuj
+ Udostępnij
+ Kopiuj do schowka
+ Dodano do zakładek
+ Oceń aplikację
+ Wesprzyj na GitHub
+ Wesprzyj ten projekt
+ Korzystając z moich linków partnerskich do tych niesamowitych produktów.
+ Podziękowania
+ Strony man
+ Informacje licencyjne dotyczące strony man są zwykle określone na stronie szczegółów w kategorii Autor, Copyright lub Licencja.
+ Strony TLDR
+ Licencja MIT (MIT) Copyright (c) 2014 zespół TLDR i współtwórcy
+ Dziękujemy icons8.com za ikony
+ 404 nie znaleziono polecenia
+ Wybierz kategorię
+ Wybierz polecenie
+ Wersja %1$s
+ Proton Free
+ Linode Cloud
+
+
diff --git a/composeApp/src/commonMain/composeResources/values/strings.xml b/composeApp/src/commonMain/composeResources/values/strings.xml
new file mode 100644
index 000000000..ba181a703
--- /dev/null
+++ b/composeApp/src/commonMain/composeResources/values/strings.xml
@@ -0,0 +1,36 @@
+
+
+
+ Linux Command Library
+ Commands
+ Basics
+ Tips
+ Info
+ Add bookmark
+ Remove bookmark
+ Collapse all
+ Expand all
+ Search
+ Back
+ Reset
+ Share
+ Copy to clipboard
+ Bookmarked
+ Rate the app
+ Sponsor on GitHub
+ Support this project
+ By using my referral links for these amazing products.
+ Acknowledgements
+ Man pages
+ Licence information about the man page is usually specified in the man detail page under the category Author, Copyright or Licence.
+ TLDR pages
+ The MIT License (MIT) Copyright (c) 2014 the TLDR team and contributors
+ Thanks to icons8.com for the icons
+ 404 command not found
+ Select a category
+ Select a command
+ Version %1$s
+ Proton Free
+ Linode Cloud
+
+
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/LinuxNavGraph.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/LinuxNavGraph.kt
index 6df4fd103..8e3860a67 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/LinuxNavGraph.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/LinuxNavGraph.kt
@@ -21,6 +21,9 @@ import com.linuxcommandlibrary.app.nav.LinuxNavState
import com.linuxcommandlibrary.app.nav.RouteKey
import com.linuxcommandlibrary.app.nav.TabStackTop
import com.linuxcommandlibrary.app.platform.AppNavHost
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.info
+import com.linuxcommandlibrary.app.resources.tips
import com.linuxcommandlibrary.app.ui.AppIcons
import com.linuxcommandlibrary.app.ui.composables.PaneTopBar
import com.linuxcommandlibrary.app.ui.screens.AppInfoDialog
@@ -28,6 +31,7 @@ import com.linuxcommandlibrary.app.ui.screens.basics.BasicsPaneScreen
import com.linuxcommandlibrary.app.ui.screens.commands.CommandsPaneScreen
import com.linuxcommandlibrary.app.ui.screens.tips.TipsScreen
import com.linuxcommandlibrary.app.ui.screens.tips.TipsViewModel
+import org.jetbrains.compose.resources.stringResource
import org.koin.compose.koinInject
@Composable
@@ -102,7 +106,7 @@ private fun TipsTab(state: LinuxNavState) {
var showInfo by rememberSaveable { mutableStateOf(false) }
Column(modifier = Modifier.fillMaxSize()) {
PaneTopBar(
- title = "Tips",
+ title = stringResource(Res.string.tips),
actions = {
IconButton(
modifier = Modifier.pointerHoverIcon(PointerIcon.Hand),
@@ -110,7 +114,7 @@ private fun TipsTab(state: LinuxNavState) {
) {
Icon(
imageVector = AppIcons.Info,
- contentDescription = "Info",
+ contentDescription = stringResource(Res.string.info),
)
}
},
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/LinuxNavigationSuite.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/LinuxNavigationSuite.kt
index 7b1af94a9..bc639d0f2 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/LinuxNavigationSuite.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/LinuxNavigationSuite.kt
@@ -25,6 +25,7 @@ import com.linuxcommandlibrary.app.nav.TopLevelDestinations
import com.linuxcommandlibrary.app.nav.route
import com.linuxcommandlibrary.app.ui.composables.rememberIconPainter
import com.linuxcommandlibrary.app.ui.theme.LocalCustomColors
+import org.jetbrains.compose.resources.stringResource
@OptIn(ExperimentalMaterial3AdaptiveApi::class)
@Composable
@@ -67,7 +68,7 @@ internal fun LinuxNavigationSuite(
modifier = Modifier.size(24.dp),
)
},
- label = { Text(dest.label) },
+ label = { Text(stringResource(dest.label)) },
colors = itemColors,
)
}
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/nav/TopLevelDestination.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/nav/TopLevelDestination.kt
index 37771d233..85983bb74 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/nav/TopLevelDestination.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/nav/TopLevelDestination.kt
@@ -1,15 +1,20 @@
package com.linuxcommandlibrary.app.nav
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.basics
+import com.linuxcommandlibrary.app.resources.commands
+import com.linuxcommandlibrary.app.resources.tips
import com.linuxcommandlibrary.app.ui.composables.AppIcon
+import org.jetbrains.compose.resources.StringResource
internal data class TopLevelDestination(
val key: RouteKey,
- val label: String,
+ val label: StringResource,
val icon: AppIcon,
)
internal val TopLevelDestinations: List = listOf(
- TopLevelDestination(RouteKey.Basics, "Basics", AppIcon.PUZZLE),
- TopLevelDestination(RouteKey.Tips, "Tips", AppIcon.IDEA),
- TopLevelDestination(RouteKey.Commands, "Commands", AppIcon.SEARCH),
+ TopLevelDestination(RouteKey.Basics, Res.string.basics, AppIcon.PUZZLE),
+ TopLevelDestination(RouteKey.Tips, Res.string.tips, AppIcon.IDEA),
+ TopLevelDestination(RouteKey.Commands, Res.string.commands, AppIcon.SEARCH),
)
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/composables/PaneTopBar.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/composables/PaneTopBar.kt
index 5a638e9ca..0437eb7ed 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/composables/PaneTopBar.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/composables/PaneTopBar.kt
@@ -16,6 +16,9 @@ import androidx.compose.ui.input.pointer.pointerHoverIcon
import androidx.compose.ui.text.style.TextOverflow
import androidx.compose.ui.unit.dp
import com.linuxcommandlibrary.app.platform.backIcon
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.back
+import org.jetbrains.compose.resources.stringResource
@OptIn(ExperimentalMaterial3Api::class)
@Composable
@@ -39,7 +42,7 @@ fun PaneTopBar(
modifier = Modifier.pointerHoverIcon(PointerIcon.Hand),
onClick = onBack,
) {
- Icon(imageVector = backIcon, contentDescription = "Back")
+ Icon(imageVector = backIcon, contentDescription = stringResource(Res.string.back))
}
}
},
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/Dialogs.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/Dialogs.kt
index 81025feb5..f1df3c5be 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/Dialogs.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/Dialogs.kt
@@ -37,13 +37,27 @@ import androidx.compose.ui.unit.dp
import androidx.compose.ui.window.Dialog
import com.linuxcommandlibrary.app.platform.showRateAppButton
import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.acknowledgements
import com.linuxcommandlibrary.app.resources.af_linode
import com.linuxcommandlibrary.app.resources.af_proton
import com.linuxcommandlibrary.app.resources.app_logo
+import com.linuxcommandlibrary.app.resources.icons_thanks
+import com.linuxcommandlibrary.app.resources.linode_content_description
+import com.linuxcommandlibrary.app.resources.man_pages
+import com.linuxcommandlibrary.app.resources.man_pages_licence
+import com.linuxcommandlibrary.app.resources.proton_content_description
+import com.linuxcommandlibrary.app.resources.rate_the_app
+import com.linuxcommandlibrary.app.resources.referral_links_description
+import com.linuxcommandlibrary.app.resources.sponsor_on_github
+import com.linuxcommandlibrary.app.resources.support_this_project
+import com.linuxcommandlibrary.app.resources.tldr_licence
+import com.linuxcommandlibrary.app.resources.tldr_pages
+import com.linuxcommandlibrary.app.resources.version_format
import com.linuxcommandlibrary.app.ui.composables.AppIcon
import com.linuxcommandlibrary.app.ui.composables.rememberIconPainter
import com.linuxcommandlibrary.shared.Version
import org.jetbrains.compose.resources.painterResource
+import org.jetbrains.compose.resources.stringResource
@Composable
fun AppInfoDialog(
@@ -76,7 +90,7 @@ fun AppInfoDialog(
fontWeight = FontWeight.Bold,
)
Text(
- "Version ${Version.APP_VERSION}",
+ stringResource(Res.string.version_format, Version.APP_VERSION),
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.6f),
)
@@ -97,7 +111,7 @@ fun AppInfoDialog(
elevation = ButtonDefaults.buttonElevation(defaultElevation = 0.dp),
colors = ButtonDefaults.buttonColors(contentColor = Color.White),
) {
- Text("Rate the app")
+ Text(stringResource(Res.string.rate_the_app))
}
}
OutlinedButton(
@@ -121,13 +135,13 @@ fun AppInfoDialog(
HorizontalDivider(color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.12f))
Spacer(Modifier.height(20.dp))
Text(
- "Support this project",
+ stringResource(Res.string.support_this_project),
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.SemiBold,
)
Spacer(Modifier.height(4.dp))
Text(
- "By using my referral links for these amazing products.",
+ stringResource(Res.string.referral_links_description),
style = MaterialTheme.typography.bodyMedium,
color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.7f),
)
@@ -145,13 +159,13 @@ fun AppInfoDialog(
modifier = Modifier.size(18.dp),
)
Spacer(Modifier.width(8.dp))
- Text("Sponsor on GitHub")
+ Text(stringResource(Res.string.sponsor_on_github))
}
Spacer(Modifier.height(12.dp))
Row(horizontalArrangement = Arrangement.spacedBy(12.dp)) {
Image(
painter = painterResource(Res.drawable.af_proton),
- contentDescription = "Proton Free",
+ contentDescription = stringResource(Res.string.proton_content_description),
modifier = Modifier
.weight(1f)
.clip(RoundedCornerShape(12.dp))
@@ -162,7 +176,7 @@ fun AppInfoDialog(
)
Image(
painter = painterResource(Res.drawable.af_linode),
- contentDescription = "Linode Cloud",
+ contentDescription = stringResource(Res.string.linode_content_description),
modifier = Modifier
.weight(1f)
.clip(RoundedCornerShape(12.dp))
@@ -176,35 +190,35 @@ fun AppInfoDialog(
Spacer(Modifier.height(24.dp))
Text(
- "Acknowledgements",
+ stringResource(Res.string.acknowledgements),
style = MaterialTheme.typography.titleMedium,
fontWeight = FontWeight.SemiBold,
)
Spacer(Modifier.height(8.dp))
Text(
- "Man pages",
+ stringResource(Res.string.man_pages),
style = MaterialTheme.typography.bodyMedium,
fontWeight = FontWeight.Medium,
)
Text(
- "Licence information about the man page is usually specified in the man detail page under the category Author, Copyright or Licence.",
+ stringResource(Res.string.man_pages_licence),
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.6f),
)
Spacer(Modifier.height(12.dp))
Text(
- "TLDR pages",
+ stringResource(Res.string.tldr_pages),
style = MaterialTheme.typography.bodyMedium,
fontWeight = FontWeight.Medium,
)
Text(
- "The MIT License (MIT) Copyright (c) 2014 the TLDR team and contributors",
+ stringResource(Res.string.tldr_licence),
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.6f),
)
Spacer(Modifier.height(12.dp))
Text(
- "Thanks to icons8.com for the icons",
+ stringResource(Res.string.icons_thanks),
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.6f),
)
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/basics/BasicsPaneScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/basics/BasicsPaneScreen.kt
index 637748800..290ddc437 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/basics/BasicsPaneScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/basics/BasicsPaneScreen.kt
@@ -27,6 +27,9 @@ import com.linuxcommandlibrary.app.NavEvent
import com.linuxcommandlibrary.app.data.BasicsRepository
import com.linuxcommandlibrary.app.nav.TabStackEntry
import com.linuxcommandlibrary.app.nav.TabStackEntryContent
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.search
+import com.linuxcommandlibrary.app.resources.select_a_category
import com.linuxcommandlibrary.app.ui.composables.InlineSearchField
import com.linuxcommandlibrary.app.ui.composables.PaneTopBar
import com.linuxcommandlibrary.app.ui.composables.SearchOverlayBox
@@ -40,6 +43,7 @@ import com.linuxcommandlibrary.app.ui.screens.basicgroups.BasicGroupsViewModel
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.launch
+import org.jetbrains.compose.resources.stringResource
import org.koin.compose.currentKoinScope
import org.koin.compose.koinInject
import org.koin.core.parameter.parametersOf
@@ -112,7 +116,7 @@ internal fun BasicsPaneScreen(
.fillMaxSize()
.background(MaterialTheme.colorScheme.surface),
) {
- InlineSearchField(searchState = searchState, placeholder = "Search")
+ InlineSearchField(searchState = searchState, placeholder = stringResource(Res.string.search))
SearchOverlayBox(
searchState = searchState,
onNavigate = onNavigate,
@@ -146,7 +150,7 @@ internal fun BasicsPaneScreen(
contentAlignment = Alignment.Center,
) {
Text(
- text = "Select a category",
+ text = stringResource(Res.string.select_a_category),
style = MaterialTheme.typography.bodyLarge,
color = MaterialTheme.colorScheme.onSurfaceVariant,
)
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commanddetail/CommandDetailPane.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commanddetail/CommandDetailPane.kt
index 630c9efea..b4023b489 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commanddetail/CommandDetailPane.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commanddetail/CommandDetailPane.kt
@@ -14,9 +14,15 @@ import androidx.compose.ui.Modifier
import androidx.compose.ui.input.pointer.PointerIcon
import androidx.compose.ui.input.pointer.pointerHoverIcon
import com.linuxcommandlibrary.app.NavEvent
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.add_bookmark
+import com.linuxcommandlibrary.app.resources.collapse_all
+import com.linuxcommandlibrary.app.resources.expand_all
+import com.linuxcommandlibrary.app.resources.remove_bookmark
import com.linuxcommandlibrary.app.ui.composables.AppIcon
import com.linuxcommandlibrary.app.ui.composables.PaneTopBar
import com.linuxcommandlibrary.app.ui.composables.rememberIconPainter
+import org.jetbrains.compose.resources.stringResource
import org.koin.compose.currentKoinScope
import org.koin.core.parameter.parametersOf
@@ -54,7 +60,7 @@ fun CommandDetailPane(
) {
Icon(
painter = expandPainter,
- contentDescription = if (isAllExpanded) "Collapse all" else "Expand all",
+ contentDescription = if (isAllExpanded) stringResource(Res.string.collapse_all) else stringResource(Res.string.expand_all),
)
}
IconButton(
@@ -65,7 +71,7 @@ fun CommandDetailPane(
) {
Icon(
painter = bookmarkPainter,
- contentDescription = if (uiState.isBookmarked) "Remove bookmark" else "Add bookmark",
+ contentDescription = if (uiState.isBookmarked) stringResource(Res.string.remove_bookmark) else stringResource(Res.string.add_bookmark),
)
}
},
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commandlist/CommandListScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commandlist/CommandListScreen.kt
index e4847a632..713aa2f3d 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commandlist/CommandListScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commandlist/CommandListScreen.kt
@@ -18,6 +18,8 @@ import androidx.compose.ui.input.pointer.pointerHoverIcon
import com.linuxcommandlibrary.app.NavEvent
import com.linuxcommandlibrary.app.data.CommandInfo
import com.linuxcommandlibrary.app.platform.showFastScrollBar
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.bookmarked
import com.linuxcommandlibrary.app.ui.composables.AppIcon
import com.linuxcommandlibrary.app.ui.composables.FastScrollBar
import com.linuxcommandlibrary.app.ui.composables.HighlightedText
@@ -25,6 +27,7 @@ import com.linuxcommandlibrary.app.ui.composables.WithScrollbar
import com.linuxcommandlibrary.app.ui.composables.debouncedClickable
import com.linuxcommandlibrary.app.ui.composables.rememberIconPainter
import com.linuxcommandlibrary.app.ui.composables.selectableListItemColors
+import org.jetbrains.compose.resources.stringResource
@Composable
fun CommandListScreen(
@@ -109,7 +112,7 @@ fun CommandListItem(
{
Icon(
painter = bookmarkPainter,
- contentDescription = "Bookmarked",
+ contentDescription = stringResource(Res.string.bookmarked),
)
}
} else {
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commands/CommandsPaneScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commands/CommandsPaneScreen.kt
index 8aa9e4c0e..228d2c78e 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commands/CommandsPaneScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/commands/CommandsPaneScreen.kt
@@ -24,6 +24,9 @@ import androidx.compose.ui.Modifier
import com.linuxcommandlibrary.app.NavEvent
import com.linuxcommandlibrary.app.nav.TabStackEntry
import com.linuxcommandlibrary.app.nav.TabStackEntryContent
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.search
+import com.linuxcommandlibrary.app.resources.select_a_command
import com.linuxcommandlibrary.app.ui.composables.InlineSearchField
import com.linuxcommandlibrary.app.ui.composables.SearchOverlayBox
import com.linuxcommandlibrary.app.ui.composables.SearchState
@@ -33,6 +36,7 @@ import com.linuxcommandlibrary.app.ui.screens.commandlist.CommandListViewModel
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.launch
+import org.jetbrains.compose.resources.stringResource
import org.koin.compose.koinInject
@OptIn(ExperimentalMaterial3AdaptiveApi::class)
@@ -113,7 +117,7 @@ internal fun CommandsPaneScreen(
} else {
val selected = navigator.currentDestination?.contentKey
if (selected == null) {
- EmptyDetailPlaceholder("Select a command")
+ EmptyDetailPlaceholder(stringResource(Res.string.select_a_command))
} else {
CommandDetailPane(
commandName = selected,
@@ -145,7 +149,7 @@ private fun CommandsListPane(
.fillMaxSize()
.background(MaterialTheme.colorScheme.surface),
) {
- InlineSearchField(searchState = searchState, placeholder = "Search")
+ InlineSearchField(searchState = searchState, placeholder = stringResource(Res.string.search))
SearchOverlayBox(
searchState = searchState,
onNavigate = onNavigate,
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/search/SearchScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/search/SearchScreen.kt
index 3ed848c9b..e91a8c843 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/search/SearchScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/search/SearchScreen.kt
@@ -28,6 +28,8 @@ import androidx.compose.ui.unit.dp
import com.linuxcommandlibrary.app.NavEvent
import com.linuxcommandlibrary.app.data.BasicGroup
import com.linuxcommandlibrary.app.data.BasicGroupMatch
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.command_not_found
import com.linuxcommandlibrary.app.ui.composables.HighlightedText
import com.linuxcommandlibrary.app.ui.composables.WithScrollbar
import com.linuxcommandlibrary.app.ui.composables.debouncedClickable
@@ -35,6 +37,7 @@ import com.linuxcommandlibrary.app.ui.composables.getIconId
import com.linuxcommandlibrary.app.ui.composables.rememberIconPainter
import com.linuxcommandlibrary.app.ui.composables.selectableListItemColors
import com.linuxcommandlibrary.app.ui.screens.commandlist.CommandListItem
+import org.jetbrains.compose.resources.stringResource
@OptIn(ExperimentalComposeUiApi::class)
@Composable
@@ -86,7 +89,7 @@ fun SearchContent(
.clickable(enabled = false, onClick = {})
.background(MaterialTheme.colorScheme.background),
) {
- Text("404 command not found", modifier = Modifier.align(Alignment.Center))
+ Text(stringResource(Res.string.command_not_found), modifier = Modifier.align(Alignment.Center))
}
} else {
WithScrollbar(
From 020b9f25aa0f59e57d3a1d351c734f646d59257a Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 11 May 2026 08:27:11 +0000
Subject: [PATCH 2/9] feat(ai): add MCP + JSON-RPC 2.0 AI layer module with LLM
provider abstraction, tool registry, chat domain and UI
Agent-Logs-Url: https://github.com/grand151/LinuxCommandLibrary/sessions/e13c9d82-551e-42de-8217-3a0bec8a834a
Co-authored-by: grand151 <86811297+grand151@users.noreply.github.com>
---
ai/build.gradle.kts | 88 ++++++
ai/src/androidMain/AndroidManifest.xml | 2 +
.../linuxcommandlibrary/ai/chat/ChatModels.kt | 20 ++
.../ai/chat/ChatRepository.kt | 160 ++++++++++
.../ai/chat/ChatViewModel.kt | 138 +++++++++
.../com/linuxcommandlibrary/ai/di/AiModule.kt | 54 ++++
.../ai/jsonrpc/JsonRpcId.kt | 19 ++
.../ai/jsonrpc/JsonRpcModels.kt | 66 +++++
.../ai/jsonrpc/JsonRpcSerializer.kt | 24 ++
.../linuxcommandlibrary/ai/llm/LlmModels.kt | 81 ++++++
.../linuxcommandlibrary/ai/llm/LlmProvider.kt | 33 +++
.../ai/llm/OllamaProvider.kt | 119 ++++++++
.../ai/llm/OpenAiProvider.kt | 209 +++++++++++++
.../linuxcommandlibrary/ai/mcp/McpClient.kt | 139 +++++++++
.../linuxcommandlibrary/ai/mcp/McpModels.kt | 173 +++++++++++
.../linuxcommandlibrary/ai/mcp/McpServer.kt | 115 ++++++++
.../ai/tools/LinuxLibraryToolRegistry.kt | 155 ++++++++++
.../LinuxApplication.kt | 3 +-
composeApp/build.gradle.kts | 1 +
.../linuxcommandlibrary/app/ui/AppIcons.kt | 20 ++
.../app/ui/screens/aichat/AiChatScreen.kt | 275 ++++++++++++++++++
.../app/ui/screens/aichat/AiSettingsScreen.kt | 175 +++++++++++
.../kotlin/com/linuxcommandlibrary/Main.kt | 3 +-
gradle/libs.versions.toml | 8 +
settings.gradle.kts | 2 +-
25 files changed, 2079 insertions(+), 3 deletions(-)
create mode 100644 ai/build.gradle.kts
create mode 100644 ai/src/androidMain/AndroidManifest.xml
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatModels.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/di/AiModule.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcId.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcModels.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcSerializer.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmModels.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmProvider.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OllamaProvider.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpModels.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpServer.kt
create mode 100644 ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
create mode 100644 composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
create mode 100644 composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
diff --git a/ai/build.gradle.kts b/ai/build.gradle.kts
new file mode 100644
index 000000000..9c51cde60
--- /dev/null
+++ b/ai/build.gradle.kts
@@ -0,0 +1,88 @@
+import org.jetbrains.kotlin.gradle.dsl.JvmTarget
+
+plugins {
+ alias(libs.plugins.kotlin.multiplatform)
+ alias(libs.plugins.android.kotlin.multiplatform.library)
+ alias(libs.plugins.kotlin.serialization)
+}
+
+group = "com.linuxcommandlibrary"
+
+kotlin {
+ android {
+ namespace = "com.linuxcommandlibrary.ai"
+ compileSdk =
+ libs.versions.android.compileSdk
+ .get()
+ .toInt()
+ minSdk =
+ libs.versions.android.minSdk
+ .get()
+ .toInt()
+ compilerOptions {
+ jvmTarget.set(JvmTarget.JVM_17)
+ }
+ withHostTest {}
+ }
+
+ jvm("desktop") {
+ compilerOptions {
+ jvmTarget.set(JvmTarget.JVM_17)
+ }
+ }
+
+ listOf(
+ iosX64(),
+ iosArm64(),
+ iosSimulatorArm64(),
+ ).forEach {
+ it.binaries.framework {
+ baseName = "AiModule"
+ isStatic = true
+ }
+ }
+
+ sourceSets {
+ commonMain {
+ dependencies {
+ api(project(":viewmodels"))
+ implementation(libs.kotlinx.coroutines.core)
+ implementation(libs.kotlinx.serialization.json)
+ implementation(libs.ktor.client.core)
+ implementation(libs.ktor.client.content.negotiation)
+ implementation(libs.ktor.serialization.kotlinx.json)
+ implementation(libs.koin.core)
+ }
+ }
+
+ commonTest {
+ dependencies {
+ implementation(kotlin("test"))
+ }
+ }
+
+ androidMain {
+ dependencies {
+ implementation(libs.ktor.client.okhttp)
+ implementation(libs.koin.android)
+ }
+ }
+
+ val desktopMain by getting {
+ dependencies {
+ implementation(libs.ktor.client.java)
+ }
+ }
+
+ val iosMain by creating {
+ dependsOn(commonMain.get())
+ dependencies {
+ implementation(libs.ktor.client.darwin)
+ }
+ }
+
+ listOf(iosX64Main, iosArm64Main, iosSimulatorArm64Main).forEach {
+ it.dependsOn(iosMain)
+ }
+ }
+}
diff --git a/ai/src/androidMain/AndroidManifest.xml b/ai/src/androidMain/AndroidManifest.xml
new file mode 100644
index 000000000..8072ee00d
--- /dev/null
+++ b/ai/src/androidMain/AndroidManifest.xml
@@ -0,0 +1,2 @@
+
+
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatModels.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatModels.kt
new file mode 100644
index 000000000..47f71c626
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatModels.kt
@@ -0,0 +1,20 @@
+package com.linuxcommandlibrary.ai.chat
+
+import com.linuxcommandlibrary.ai.llm.LlmRole
+
+/** A single message in the AI chat conversation. */
+data class ChatMessage(
+ val id: String,
+ val role: LlmRole,
+ val content: String,
+ val isStreaming: Boolean = false,
+ val isError: Boolean = false,
+)
+
+/** Current state of the AI chat conversation. */
+data class ChatUiState(
+ val messages: List = emptyList(),
+ val isWaitingForResponse: Boolean = false,
+ val errorMessage: String? = null,
+ val isConfigured: Boolean = false,
+)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
new file mode 100644
index 000000000..e9b77d21c
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
@@ -0,0 +1,160 @@
+package com.linuxcommandlibrary.ai.chat
+
+import com.linuxcommandlibrary.ai.llm.LlmConfig
+import com.linuxcommandlibrary.ai.llm.LlmMessage
+import com.linuxcommandlibrary.ai.llm.LlmProvider
+import com.linuxcommandlibrary.ai.llm.LlmProviderType
+import com.linuxcommandlibrary.ai.llm.LlmRole
+import com.linuxcommandlibrary.ai.llm.LlmToolCall
+import com.linuxcommandlibrary.ai.llm.OllamaProvider
+import com.linuxcommandlibrary.ai.llm.OpenAiProvider
+import com.linuxcommandlibrary.ai.tools.LinuxLibraryToolRegistry
+import com.linuxcommandlibrary.shared.platform.PreferencesStorage
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.emptyFlow
+
+/**
+ * Orchestrates conversation between the user and the configured LLM provider.
+ *
+ * Implements an **agentic loop**: after each assistant turn, if the model requested
+ * tool calls, this repository executes them via [LinuxLibraryToolRegistry], appends the
+ * results as [LlmRole.TOOL] messages, and re-invokes the model — repeating until no
+ * further tool calls are requested.
+ */
+class ChatRepository(
+ private val toolRegistry: LinuxLibraryToolRegistry,
+ private val preferences: PreferencesStorage,
+) {
+ companion object {
+ private const val KEY_PROVIDER_TYPE = "ai_provider_type"
+ private const val KEY_API_KEY = "ai_api_key"
+ private const val KEY_BASE_URL = "ai_base_url"
+ private const val KEY_MODEL_ID = "ai_model_id"
+
+ /** Default system prompt injected into every conversation. */
+ const val SYSTEM_PROMPT = """You are a helpful Linux assistant built into the Linux Command Library app.
+You have access to tools that let you search the command database and Linux basics guide.
+Always prefer using the provided tools to give accurate, up-to-date answers.
+Keep responses concise and practical. Use code blocks for commands."""
+ }
+
+ // ──────────────────────────────────────────────
+ // Configuration
+ // ──────────────────────────────────────────────
+
+ fun saveConfig(config: LlmConfig) {
+ preferences.putString(KEY_PROVIDER_TYPE, config.providerType.name)
+ preferences.putString(KEY_API_KEY, config.apiKey)
+ preferences.putString(KEY_BASE_URL, config.baseUrl)
+ preferences.putString(KEY_MODEL_ID, config.modelId)
+ }
+
+ fun loadConfig(): LlmConfig {
+ val typeName = preferences.getString(KEY_PROVIDER_TYPE, LlmProviderType.OPENAI.name)
+ val type = LlmProviderType.entries.firstOrNull { it.name == typeName } ?: LlmProviderType.OPENAI
+ return LlmConfig(
+ providerType = type,
+ apiKey = preferences.getString(KEY_API_KEY, ""),
+ baseUrl = preferences.getString(KEY_BASE_URL, ""),
+ modelId = preferences.getString(KEY_MODEL_ID, ""),
+ )
+ }
+
+ fun isConfigured(): Boolean {
+ val config = loadConfig()
+ return when (config.providerType) {
+ LlmProviderType.OLLAMA -> config.baseUrl.isNotBlank() || config.providerType.defaultBaseUrl.isNotBlank()
+ else -> config.apiKey.isNotBlank()
+ }
+ }
+
+ // ──────────────────────────────────────────────
+ // Sending messages
+ // ──────────────────────────────────────────────
+
+ /**
+ * Send a user message and return the assistant's complete response.
+ * Executes the agentic tool-call loop internally.
+ *
+ * @param history Full conversation history (system prompt excluded – it is prepended here).
+ * @param userMessage Latest user text.
+ */
+ suspend fun sendMessage(
+ history: List,
+ userMessage: String,
+ ): Result = runCatching {
+ val provider = buildProvider()
+ val tools = toolRegistry.allTools()
+
+ val messages = buildList {
+ add(LlmMessage(role = LlmRole.SYSTEM, content = SYSTEM_PROMPT))
+ addAll(history)
+ add(LlmMessage(role = LlmRole.USER, content = userMessage))
+ }.toMutableList()
+
+ // Agentic loop – execute tool calls until the model produces a final text response
+ repeat(MAX_TOOL_CALL_ROUNDS) {
+ val response = provider.chat(messages, tools)
+ if (response.toolCalls.isEmpty()) {
+ return@runCatching response.content
+ }
+ // Append the assistant tool-call turn
+ messages.add(LlmMessage(role = LlmRole.ASSISTANT, content = response.content))
+ // Execute each tool and append results
+ response.toolCalls.forEach { call ->
+ val result = executeToolCall(call)
+ messages.add(
+ LlmMessage(
+ role = LlmRole.TOOL,
+ content = result,
+ toolCallId = call.id,
+ toolName = call.name,
+ ),
+ )
+ }
+ }
+ error("Max tool call rounds ($MAX_TOOL_CALL_ROUNDS) exceeded without a final response.")
+ }
+
+ /**
+ * Stream the assistant response token by token.
+ * Note: streaming does not support tool calls in this implementation.
+ */
+ fun streamMessage(
+ history: List,
+ userMessage: String,
+ ): Flow {
+ val provider = runCatching { buildProvider() }.getOrNull() ?: return emptyFlow()
+ val messages = buildList {
+ add(LlmMessage(role = LlmRole.SYSTEM, content = SYSTEM_PROMPT))
+ addAll(history)
+ add(LlmMessage(role = LlmRole.USER, content = userMessage))
+ }
+ return provider.stream(messages, emptyList())
+ }
+
+ // ──────────────────────────────────────────────
+ // Internal helpers
+ // ──────────────────────────────────────────────
+
+ private fun buildProvider(): LlmProvider {
+ val config = loadConfig()
+ return when (config.providerType) {
+ LlmProviderType.OLLAMA -> OllamaProvider(config)
+ else -> OpenAiProvider(config)
+ }
+ }
+
+ private suspend fun executeToolCall(call: LlmToolCall): String {
+ val result = runCatching {
+ toolRegistry.call(call.name, call.arguments)
+ }.getOrElse { e ->
+ return "Tool '${call.name}' failed: ${e.message}"
+ }
+ return result.content.joinToString("\n") { it.text ?: "" }
+ }
+
+ private companion object {
+ const val MAX_TOOL_CALL_ROUNDS = 5
+ }
+}
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
new file mode 100644
index 000000000..542aa9153
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
@@ -0,0 +1,138 @@
+package com.linuxcommandlibrary.ai.chat
+
+import com.linuxcommandlibrary.ai.llm.LlmConfig
+import com.linuxcommandlibrary.ai.llm.LlmMessage
+import com.linuxcommandlibrary.ai.llm.LlmRole
+import kotlinx.coroutines.CoroutineScope
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.Job
+import kotlinx.coroutines.flow.MutableStateFlow
+import kotlinx.coroutines.flow.StateFlow
+import kotlinx.coroutines.flow.asStateFlow
+import kotlinx.coroutines.flow.update
+import kotlinx.coroutines.launch
+import kotlin.uuid.ExperimentalUuidApi
+import kotlin.uuid.Uuid
+
+/**
+ * ViewModel for the AI chat screen.
+ *
+ * Manages conversation history, sends user messages through [ChatRepository]
+ * (which handles the agentic tool-call loop), and exposes [uiState] to the UI.
+ */
+@OptIn(ExperimentalUuidApi::class)
+class ChatViewModel(
+ private val chatRepository: ChatRepository,
+ private val scope: CoroutineScope,
+) {
+
+ private val _uiState = MutableStateFlow(
+ ChatUiState(isConfigured = chatRepository.isConfigured()),
+ )
+ val uiState: StateFlow = _uiState.asStateFlow()
+
+ private val _history = mutableListOf()
+ private var streamingJob: Job? = null
+
+ // ──────────────────────────────────────────────
+ // Public API
+ // ──────────────────────────────────────────────
+
+ /** Send a new user message. */
+ fun sendMessage(text: String) {
+ if (text.isBlank()) return
+ if (!chatRepository.isConfigured()) {
+ _uiState.update { it.copy(errorMessage = "AI provider not configured. Please open Settings and add an API key.") }
+ return
+ }
+
+ val userMessageId = Uuid.random().toString()
+ val assistantMessageId = Uuid.random().toString()
+
+ _uiState.update { state ->
+ state.copy(
+ messages = state.messages + ChatMessage(
+ id = userMessageId,
+ role = LlmRole.USER,
+ content = text,
+ ),
+ isWaitingForResponse = true,
+ errorMessage = null,
+ )
+ }
+
+ streamingJob = scope.launch(Dispatchers.Default) {
+ // Add streaming placeholder
+ _uiState.update { state ->
+ state.copy(
+ messages = state.messages + ChatMessage(
+ id = assistantMessageId,
+ role = LlmRole.ASSISTANT,
+ content = "",
+ isStreaming = true,
+ ),
+ )
+ }
+
+ chatRepository.sendMessage(_history.toList(), text)
+ .onSuccess { responseText ->
+ _history.add(LlmMessage(role = LlmRole.USER, content = text))
+ _history.add(LlmMessage(role = LlmRole.ASSISTANT, content = responseText))
+
+ _uiState.update { state ->
+ state.copy(
+ messages = state.messages.map { msg ->
+ if (msg.id == assistantMessageId) {
+ msg.copy(content = responseText, isStreaming = false)
+ } else {
+ msg
+ }
+ },
+ isWaitingForResponse = false,
+ )
+ }
+ }
+ .onFailure { e ->
+ _uiState.update { state ->
+ state.copy(
+ messages = state.messages.map { msg ->
+ if (msg.id == assistantMessageId) {
+ msg.copy(
+ content = "Error: ${e.message}",
+ isStreaming = false,
+ isError = true,
+ )
+ } else {
+ msg
+ }
+ },
+ isWaitingForResponse = false,
+ errorMessage = e.message,
+ )
+ }
+ }
+ }
+ }
+
+ /** Clear conversation history. */
+ fun clearConversation() {
+ streamingJob?.cancel()
+ _history.clear()
+ _uiState.update {
+ ChatUiState(isConfigured = chatRepository.isConfigured())
+ }
+ }
+
+ /** Update provider configuration and refresh the configured state. */
+ fun saveConfig(config: LlmConfig) {
+ chatRepository.saveConfig(config)
+ _uiState.update { it.copy(isConfigured = chatRepository.isConfigured()) }
+ }
+
+ /** Load current configuration. */
+ fun loadConfig(): LlmConfig = chatRepository.loadConfig()
+
+ fun dismissError() {
+ _uiState.update { it.copy(errorMessage = null) }
+ }
+}
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/di/AiModule.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/di/AiModule.kt
new file mode 100644
index 000000000..6355e4413
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/di/AiModule.kt
@@ -0,0 +1,54 @@
+package com.linuxcommandlibrary.ai.di
+
+import com.linuxcommandlibrary.ai.chat.ChatRepository
+import com.linuxcommandlibrary.ai.chat.ChatViewModel
+import com.linuxcommandlibrary.ai.mcp.McpClient
+import com.linuxcommandlibrary.ai.mcp.McpServer
+import com.linuxcommandlibrary.ai.mcp.McpServerInfo
+import com.linuxcommandlibrary.ai.tools.LinuxLibraryToolRegistry
+import org.koin.dsl.module
+
+/**
+ * Koin module for the AI layer.
+ *
+ * Provides:
+ * - [LinuxLibraryToolRegistry] — MCP tools backed by the command/basics repositories
+ * - [McpServer] — in-process MCP server for tool dispatch
+ * - [ChatRepository] — high-level chat orchestrator (LLM + agentic tool loop)
+ * - [ChatViewModel] — UI state manager for the AI chat screen
+ *
+ * [McpClient] is *not* registered as a singleton because each remote endpoint
+ * requires a distinct URL; create instances on demand where needed.
+ */
+val aiModule = module {
+ single {
+ LinuxLibraryToolRegistry(
+ commandsRepository = get(),
+ basicsRepository = get(),
+ )
+ }
+
+ single {
+ McpServer(
+ toolRegistry = get(),
+ serverInfo = McpServerInfo(
+ name = "linux-command-library-mcp",
+ version = "1.0.0",
+ ),
+ )
+ }
+
+ single {
+ ChatRepository(
+ toolRegistry = get(),
+ preferences = get(),
+ )
+ }
+
+ factory {
+ ChatViewModel(
+ chatRepository = get(),
+ scope = get(),
+ )
+ }
+}
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcId.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcId.kt
new file mode 100644
index 000000000..486982fbb
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcId.kt
@@ -0,0 +1,19 @@
+package com.linuxcommandlibrary.ai.jsonrpc
+
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.json.JsonElement
+import kotlinx.serialization.json.JsonPrimitive
+import kotlinx.serialization.json.buildJsonObject
+import kotlinx.serialization.json.put
+
+/**
+ * Polymorphic JSON-RPC request ID – can be a string, integer, or null (notification).
+ * Stored as a nullable String for simplicity; numeric IDs are preserved as string digits.
+ */
+typealias JsonRpcId = JsonElement
+
+/** Convenience builder for a string-keyed request ID. */
+fun jsonRpcId(value: String): JsonRpcId = JsonPrimitive(value)
+
+/** Convenience builder for an integer-keyed request ID. */
+fun jsonRpcId(value: Long): JsonRpcId = JsonPrimitive(value)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcModels.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcModels.kt
new file mode 100644
index 000000000..cbd8d373e
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcModels.kt
@@ -0,0 +1,66 @@
+package com.linuxcommandlibrary.ai.jsonrpc
+
+import kotlinx.serialization.DeserializationStrategy
+import kotlinx.serialization.KSerializer
+import kotlinx.serialization.SerialName
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.json.JsonContentPolymorphicSerializer
+import kotlinx.serialization.json.JsonElement
+import kotlinx.serialization.json.JsonObject
+import kotlinx.serialization.json.jsonObject
+
+/**
+ * JSON-RPC 2.0 request message.
+ * https://www.jsonrpc.org/specification
+ */
+@Serializable
+data class JsonRpcRequest(
+ val jsonrpc: String = JSON_RPC_VERSION,
+ val method: String,
+ val params: JsonElement? = null,
+ val id: JsonRpcId? = null,
+)
+
+/**
+ * JSON-RPC 2.0 response message (result or error, never both).
+ */
+@Serializable
+data class JsonRpcResponse(
+ val jsonrpc: String = JSON_RPC_VERSION,
+ val result: JsonElement? = null,
+ val error: JsonRpcError? = null,
+ val id: JsonRpcId? = null,
+)
+
+/**
+ * JSON-RPC 2.0 notification (request without id – no response expected).
+ */
+@Serializable
+data class JsonRpcNotification(
+ val jsonrpc: String = JSON_RPC_VERSION,
+ val method: String,
+ val params: JsonElement? = null,
+)
+
+/**
+ * JSON-RPC 2.0 error object.
+ */
+@Serializable
+data class JsonRpcError(
+ val code: Int,
+ val message: String,
+ val data: JsonElement? = null,
+)
+
+/**
+ * Standard JSON-RPC 2.0 error codes.
+ */
+object JsonRpcErrorCode {
+ const val PARSE_ERROR = -32700
+ const val INVALID_REQUEST = -32600
+ const val METHOD_NOT_FOUND = -32601
+ const val INVALID_PARAMS = -32602
+ const val INTERNAL_ERROR = -32603
+}
+
+const val JSON_RPC_VERSION = "2.0"
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcSerializer.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcSerializer.kt
new file mode 100644
index 000000000..11b5762cb
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/jsonrpc/JsonRpcSerializer.kt
@@ -0,0 +1,24 @@
+package com.linuxcommandlibrary.ai.jsonrpc
+
+import kotlinx.serialization.json.Json
+import kotlinx.serialization.json.JsonElement
+import kotlinx.serialization.json.decodeFromJsonElement
+import kotlinx.serialization.json.encodeToJsonElement
+
+/**
+ * Shared [Json] instance configured for JSON-RPC / MCP use.
+ * - Unknown keys are ignored for forward-compatibility.
+ * - Null values in JSON are encoded to remove the key from the output.
+ */
+val JsonRpcJson = Json {
+ ignoreUnknownKeys = true
+ encodeDefaults = false
+ isLenient = true
+ explicitNulls = false
+}
+
+/** Encode any serializable object to [JsonElement] using [JsonRpcJson]. */
+inline fun T.toJsonElement(): JsonElement = JsonRpcJson.encodeToJsonElement(this)
+
+/** Decode a [JsonElement] to a concrete type using [JsonRpcJson]. */
+inline fun JsonElement.fromJsonElement(): T = JsonRpcJson.decodeFromJsonElement(this)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmModels.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmModels.kt
new file mode 100644
index 000000000..55433dd4c
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmModels.kt
@@ -0,0 +1,81 @@
+package com.linuxcommandlibrary.ai.llm
+
+import kotlinx.serialization.SerialName
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.json.JsonObject
+
+// ──────────────────────────────────────────────
+// Message roles
+// ──────────────────────────────────────────────
+
+enum class LlmRole { SYSTEM, USER, ASSISTANT, TOOL }
+
+// ──────────────────────────────────────────────
+// Conversation messages
+// ──────────────────────────────────────────────
+
+/**
+ * A single turn in a conversation, with an optional tool-call result.
+ */
+data class LlmMessage(
+ val role: LlmRole,
+ val content: String,
+ /** Present when this message carries a tool-call result (role == TOOL). */
+ val toolCallId: String? = null,
+ val toolName: String? = null,
+)
+
+// ──────────────────────────────────────────────
+// Response
+// ──────────────────────────────────────────────
+
+/**
+ * Complete response from [LlmProvider.chat].
+ *
+ * If the model decided to call one or more tools, [toolCalls] will be non-empty and
+ * [content] may be blank. The caller should execute the tool calls, append the results
+ * as [LlmRole.TOOL] messages, and re-invoke [LlmProvider.chat] in a loop until
+ * [toolCalls] is empty (agentic loop).
+ */
+data class LlmResponse(
+ val content: String,
+ val toolCalls: List = emptyList(),
+ val finishReason: LlmFinishReason = LlmFinishReason.STOP,
+)
+
+/**
+ * A single tool-call instruction from the model.
+ */
+data class LlmToolCall(
+ val id: String,
+ val name: String,
+ val arguments: JsonObject,
+)
+
+enum class LlmFinishReason {
+ STOP,
+ TOOL_CALLS,
+ LENGTH,
+ ERROR,
+}
+
+// ──────────────────────────────────────────────
+// Provider configuration
+// ──────────────────────────────────────────────
+
+/**
+ * Runtime configuration for an [LlmProvider].
+ * Persisted in [PreferencesStorage] by [AiSettingsRepository].
+ */
+data class LlmConfig(
+ val providerType: LlmProviderType,
+ val apiKey: String = "",
+ val baseUrl: String = "",
+ val modelId: String = "",
+)
+
+enum class LlmProviderType(val displayName: String, val defaultBaseUrl: String, val defaultModel: String) {
+ OPENAI("OpenAI", "https://api.openai.com/v1", "gpt-4o-mini"),
+ OLLAMA("Ollama (local)", "http://localhost:11434", "llama3.2"),
+ OPENAI_COMPATIBLE("OpenAI-compatible", "", ""),
+}
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmProvider.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmProvider.kt
new file mode 100644
index 000000000..66750f3c3
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmProvider.kt
@@ -0,0 +1,33 @@
+package com.linuxcommandlibrary.ai.llm
+
+import com.linuxcommandlibrary.ai.mcp.McpTool
+import kotlinx.coroutines.flow.Flow
+
+/**
+ * Platform-agnostic abstraction for a Large Language Model provider.
+ *
+ * Implementations:
+ * - [OpenAiProvider] — OpenAI-compatible REST API (OpenAI, Groq, LocalAI, LM Studio …)
+ * - [OllamaProvider] — Ollama local inference server
+ */
+interface LlmProvider {
+
+ /** Human-readable display name (shown in settings UI). */
+ val name: String
+
+ /**
+ * Send a list of messages and receive a complete response.
+ *
+ * @param messages Conversation history including the latest user turn.
+ * @param tools Optional MCP tools the model may call.
+ * @return The assistant's response (may include tool-call requests).
+ */
+ suspend fun chat(messages: List, tools: List = emptyList()): LlmResponse
+
+ /**
+ * Stream the assistant response token by token.
+ *
+ * @return [Flow] of incremental text deltas; collect until the flow completes.
+ */
+ fun stream(messages: List, tools: List = emptyList()): Flow
+}
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OllamaProvider.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OllamaProvider.kt
new file mode 100644
index 000000000..6a944e8bd
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OllamaProvider.kt
@@ -0,0 +1,119 @@
+package com.linuxcommandlibrary.ai.llm
+
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcJson
+import com.linuxcommandlibrary.ai.mcp.McpTool
+import io.ktor.client.HttpClient
+import io.ktor.client.call.body
+import io.ktor.client.plugins.contentnegotiation.ContentNegotiation
+import io.ktor.client.request.post
+import io.ktor.client.request.preparePost
+import io.ktor.client.request.setBody
+import io.ktor.client.statement.bodyAsChannel
+import io.ktor.http.ContentType
+import io.ktor.http.contentType
+import io.ktor.serialization.kotlinx.json.json
+import io.ktor.utils.io.readUTF8Line
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.flow
+import kotlinx.serialization.SerialName
+import kotlinx.serialization.Serializable
+
+/**
+ * LLM provider implementation for Ollama local inference server.
+ *
+ * Default endpoint: http://localhost:11434
+ * Docs: https://github.com/ollama/ollama/blob/main/docs/api.md
+ */
+class OllamaProvider(
+ private val config: LlmConfig,
+ httpClient: HttpClient? = null,
+) : LlmProvider {
+
+ override val name: String get() = config.providerType.displayName
+
+ private val baseUrl: String = config.baseUrl.ifBlank { LlmProviderType.OLLAMA.defaultBaseUrl }
+ private val modelId: String = config.modelId.ifBlank { LlmProviderType.OLLAMA.defaultModel }
+
+ private val client: HttpClient = httpClient ?: HttpClient {
+ install(ContentNegotiation) { json(JsonRpcJson) }
+ }
+
+ override suspend fun chat(messages: List, tools: List): LlmResponse {
+ val request = OllamaChatRequest(
+ model = modelId,
+ messages = messages.toOllama(),
+ stream = false,
+ )
+ val response: OllamaChatResponse = client.post("$baseUrl/api/chat") {
+ contentType(ContentType.Application.Json)
+ setBody(request)
+ }.body()
+
+ return LlmResponse(
+ content = response.message.content,
+ finishReason = if (response.done) LlmFinishReason.STOP else LlmFinishReason.LENGTH,
+ )
+ }
+
+ override fun stream(messages: List, tools: List): Flow = flow {
+ val request = OllamaChatRequest(
+ model = modelId,
+ messages = messages.toOllama(),
+ stream = true,
+ )
+ client.preparePost("$baseUrl/api/chat") {
+ contentType(ContentType.Application.Json)
+ setBody(request)
+ }.execute { response ->
+ val channel = response.bodyAsChannel()
+ while (!channel.isClosedForRead) {
+ val line = channel.readUTF8Line() ?: break
+ val chunk = runCatching {
+ JsonRpcJson.decodeFromString(line)
+ }.getOrNull() ?: continue
+ if (chunk.message.content.isNotEmpty()) {
+ emit(chunk.message.content)
+ }
+ if (chunk.done) break
+ }
+ }
+ }
+
+ private fun List.toOllama() = map { msg ->
+ OllamaMessage(
+ role = when (msg.role) {
+ LlmRole.SYSTEM -> "system"
+ LlmRole.USER -> "user"
+ LlmRole.ASSISTANT -> "assistant"
+ LlmRole.TOOL -> "tool"
+ },
+ content = msg.content,
+ )
+ }
+
+ fun close() = client.close()
+}
+
+// ──────────────────────────────────────────────
+// Ollama REST API DTOs
+// ──────────────────────────────────────────────
+
+@Serializable
+private data class OllamaChatRequest(
+ val model: String,
+ val messages: List,
+ val stream: Boolean = false,
+)
+
+@Serializable
+private data class OllamaMessage(
+ val role: String,
+ val content: String,
+)
+
+@Serializable
+private data class OllamaChatResponse(
+ val model: String = "",
+ val message: OllamaMessage,
+ val done: Boolean = false,
+)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
new file mode 100644
index 000000000..a1ab24cd4
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
@@ -0,0 +1,209 @@
+package com.linuxcommandlibrary.ai.llm
+
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcJson
+import com.linuxcommandlibrary.ai.mcp.McpTool
+import io.ktor.client.HttpClient
+import io.ktor.client.call.body
+import io.ktor.client.plugins.contentnegotiation.ContentNegotiation
+import io.ktor.client.plugins.defaultRequest
+import io.ktor.client.request.header
+import io.ktor.client.request.post
+import io.ktor.client.request.preparePost
+import io.ktor.client.request.setBody
+import io.ktor.client.statement.bodyAsChannel
+import io.ktor.http.ContentType
+import io.ktor.http.HttpHeaders
+import io.ktor.http.contentType
+import io.ktor.serialization.kotlinx.json.json
+import io.ktor.utils.io.readUTF8Line
+import kotlinx.coroutines.flow.Flow
+import kotlinx.coroutines.flow.flow
+import kotlinx.serialization.SerialName
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.json.JsonElement
+import kotlinx.serialization.json.JsonNull
+import kotlinx.serialization.json.JsonObject
+import kotlinx.serialization.json.jsonObject
+import kotlinx.serialization.json.jsonPrimitive
+
+/**
+ * LLM provider implementation for the OpenAI Chat Completions API.
+ *
+ * Compatible with:
+ * - OpenAI (https://api.openai.com/v1)
+ * - Groq (https://api.groq.com/openai/v1)
+ * - LM Studio / LocalAI (http://localhost:1234/v1)
+ * - Any other OpenAI-compatible endpoint
+ */
+class OpenAiProvider(
+ private val config: LlmConfig,
+ httpClient: HttpClient? = null,
+) : LlmProvider {
+
+ override val name: String get() = config.providerType.displayName
+
+ private val baseUrl: String = config.baseUrl.ifBlank { config.providerType.defaultBaseUrl }
+ private val modelId: String = config.modelId.ifBlank { config.providerType.defaultModel }
+
+ private val client: HttpClient = httpClient ?: HttpClient {
+ install(ContentNegotiation) { json(JsonRpcJson) }
+ defaultRequest {
+ header(HttpHeaders.Authorization, "Bearer ${config.apiKey}")
+ }
+ }
+
+ override suspend fun chat(messages: List, tools: List): LlmResponse {
+ val request = buildChatRequest(messages, tools, stream = false)
+ val response: OpenAiChatResponse = client.post("$baseUrl/chat/completions") {
+ contentType(ContentType.Application.Json)
+ setBody(request)
+ }.body()
+
+ val choice = response.choices.firstOrNull()
+ ?: return LlmResponse(content = "", finishReason = LlmFinishReason.ERROR)
+
+ val toolCalls = choice.message.toolCalls?.map { tc ->
+ LlmToolCall(
+ id = tc.id,
+ name = tc.function.name,
+ arguments = JsonRpcJson.decodeFromString(tc.function.arguments),
+ )
+ } ?: emptyList()
+
+ val finishReason = when (choice.finishReason) {
+ "tool_calls" -> LlmFinishReason.TOOL_CALLS
+ "length" -> LlmFinishReason.LENGTH
+ else -> LlmFinishReason.STOP
+ }
+
+ return LlmResponse(
+ content = choice.message.content ?: "",
+ toolCalls = toolCalls,
+ finishReason = finishReason,
+ )
+ }
+
+ override fun stream(messages: List, tools: List): Flow = flow {
+ val request = buildChatRequest(messages, tools, stream = true)
+ client.preparePost("$baseUrl/chat/completions") {
+ contentType(ContentType.Application.Json)
+ setBody(request)
+ }.execute { response ->
+ val channel = response.bodyAsChannel()
+ while (!channel.isClosedForRead) {
+ val line = channel.readUTF8Line() ?: break
+ if (!line.startsWith("data: ")) continue
+ val data = line.removePrefix("data: ").trim()
+ if (data == "[DONE]") break
+ val chunk = runCatching {
+ JsonRpcJson.decodeFromString(data)
+ }.getOrNull() ?: continue
+ val delta = chunk.choices.firstOrNull()?.delta?.content ?: continue
+ emit(delta)
+ }
+ }
+ }
+
+ private fun buildChatRequest(
+ messages: List,
+ tools: List,
+ stream: Boolean,
+ ) = OpenAiChatRequest(
+ model = modelId,
+ messages = messages.map { msg ->
+ OpenAiMessage(
+ role = msg.role.name.lowercase(),
+ content = msg.content,
+ toolCallId = msg.toolCallId,
+ name = msg.toolName,
+ )
+ },
+ tools = tools.takeIf { it.isNotEmpty() }?.map { tool ->
+ OpenAiTool(
+ function = OpenAiFunction(
+ name = tool.name,
+ description = tool.description,
+ parameters = tool.inputSchema,
+ ),
+ )
+ },
+ stream = stream,
+ )
+
+ fun close() = client.close()
+}
+
+// ──────────────────────────────────────────────
+// OpenAI REST API DTOs
+// ──────────────────────────────────────────────
+
+@Serializable
+private data class OpenAiChatRequest(
+ val model: String,
+ val messages: List,
+ val tools: List? = null,
+ val stream: Boolean = false,
+)
+
+@Serializable
+private data class OpenAiMessage(
+ val role: String,
+ val content: String? = null,
+ @SerialName("tool_call_id") val toolCallId: String? = null,
+ val name: String? = null,
+ @SerialName("tool_calls") val toolCalls: List? = null,
+)
+
+@Serializable
+private data class OpenAiTool(
+ val type: String = "function",
+ val function: OpenAiFunction,
+)
+
+@Serializable
+private data class OpenAiFunction(
+ val name: String,
+ val description: String? = null,
+ val parameters: JsonObject,
+)
+
+@Serializable
+private data class OpenAiToolCall(
+ val id: String,
+ val type: String = "function",
+ val function: OpenAiFunctionCall,
+)
+
+@Serializable
+private data class OpenAiFunctionCall(
+ val name: String,
+ val arguments: String,
+)
+
+@Serializable
+private data class OpenAiChatResponse(
+ val choices: List,
+)
+
+@Serializable
+private data class OpenAiChoice(
+ val message: OpenAiMessage,
+ @SerialName("finish_reason") val finishReason: String? = null,
+)
+
+@Serializable
+private data class OpenAiStreamChunk(
+ val choices: List,
+)
+
+@Serializable
+private data class OpenAiStreamChoice(
+ val delta: OpenAiDelta,
+ @SerialName("finish_reason") val finishReason: String? = null,
+)
+
+@Serializable
+private data class OpenAiDelta(
+ val content: String? = null,
+ @SerialName("tool_calls") val toolCalls: List? = null,
+)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
new file mode 100644
index 000000000..4964137a9
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
@@ -0,0 +1,139 @@
+package com.linuxcommandlibrary.ai.mcp
+
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcError
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcErrorCode
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcJson
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcRequest
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcResponse
+import com.linuxcommandlibrary.ai.jsonrpc.fromJsonElement
+import com.linuxcommandlibrary.ai.jsonrpc.jsonRpcId
+import com.linuxcommandlibrary.ai.jsonrpc.toJsonElement
+import io.ktor.client.HttpClient
+import io.ktor.client.call.body
+import io.ktor.client.plugins.contentnegotiation.ContentNegotiation
+import io.ktor.client.request.post
+import io.ktor.client.request.setBody
+import io.ktor.http.ContentType
+import io.ktor.http.contentType
+import io.ktor.serialization.kotlinx.json.json
+import kotlinx.serialization.json.JsonElement
+import kotlinx.serialization.json.JsonNull
+import kotlinx.serialization.json.JsonObject
+import java.util.concurrent.atomic.AtomicLong
+
+/**
+ * Lightweight MCP client that communicates over HTTP using JSON-RPC 2.0.
+ *
+ * Typical flow:
+ * 1. [initialize] — negotiate protocol version and capabilities
+ * 2. [listTools] — discover available tools
+ * 3. [callTool] — invoke a tool with arguments
+ *
+ * @param endpointUrl Full URL of the MCP server endpoint (e.g. "http://localhost:3000/mcp").
+ * @param httpClient Optional pre-configured Ktor [HttpClient]; a default is created if omitted.
+ */
+class McpClient(
+ private val endpointUrl: String,
+ httpClient: HttpClient? = null,
+) {
+ private val client: HttpClient = httpClient ?: HttpClient {
+ install(ContentNegotiation) { json(JsonRpcJson) }
+ }
+
+ private val idCounter = AtomicLong(1L)
+ private fun nextId() = jsonRpcId(idCounter.getAndIncrement())
+
+ // ──────────────────────────────────────────────
+ // Lifecycle
+ // ──────────────────────────────────────────────
+
+ /**
+ * Send the MCP `initialize` request and return the server's capabilities.
+ */
+ suspend fun initialize(clientInfo: McpClientInfo): McpInitializeResult {
+ val params = McpInitializeParams(
+ capabilities = McpCapabilities(tools = McpToolsCapability()),
+ clientInfo = clientInfo,
+ )
+ val result = call(McpMethod.INITIALIZE, params.toJsonElement())
+ return result.fromJsonElement()
+ }
+
+ /**
+ * Send the `notifications/initialized` notification (no response expected).
+ */
+ suspend fun notifyInitialized() {
+ postRaw(
+ JsonRpcRequest(method = McpMethod.INITIALIZED, params = JsonNull, id = null),
+ )
+ }
+
+ // ──────────────────────────────────────────────
+ // Tools
+ // ──────────────────────────────────────────────
+
+ /** List all tools exposed by the MCP server. */
+ suspend fun listTools(): List {
+ val result = call(McpMethod.TOOLS_LIST, null)
+ return result.fromJsonElement().tools
+ }
+
+ /**
+ * Call a named tool with an optional argument map.
+ *
+ * @param name Tool name as returned by [listTools].
+ * @param arguments Key-value map of input arguments (must match the tool's `inputSchema`).
+ */
+ suspend fun callTool(name: String, arguments: JsonObject? = null): McpToolCallResult {
+ val params = McpToolCallParams(name = name, arguments = arguments)
+ val result = call(McpMethod.TOOLS_CALL, params.toJsonElement())
+ return result.fromJsonElement()
+ }
+
+ // ──────────────────────────────────────────────
+ // Resources
+ // ──────────────────────────────────────────────
+
+ /** List all resources exposed by the MCP server. */
+ suspend fun listResources(): List {
+ val result = call(McpMethod.RESOURCES_LIST, null)
+ return result.fromJsonElement().resources
+ }
+
+ /** Read the content of a specific resource by URI. */
+ suspend fun readResource(uri: String): List {
+ val params = McpResourceReadParams(uri = uri)
+ val result = call(McpMethod.RESOURCES_READ, params.toJsonElement())
+ return result.fromJsonElement().contents
+ }
+
+ // ──────────────────────────────────────────────
+ // Internal helpers
+ // ──────────────────────────────────────────────
+
+ private suspend fun call(method: String, params: JsonElement?): JsonElement {
+ val request = JsonRpcRequest(method = method, params = params, id = nextId())
+ val response = postRaw(request)
+ response.error?.let { err ->
+ throw McpException(code = err.code, message = err.message)
+ }
+ return response.result
+ ?: throw McpException(
+ code = JsonRpcErrorCode.INTERNAL_ERROR,
+ message = "MCP response missing both 'result' and 'error' for method '$method'",
+ )
+ }
+
+ private suspend fun postRaw(request: JsonRpcRequest): JsonRpcResponse =
+ client.post(endpointUrl) {
+ contentType(ContentType.Application.Json)
+ setBody(request)
+ }.body()
+
+ fun close() = client.close()
+}
+
+/**
+ * Thrown when the MCP server returns a JSON-RPC error object.
+ */
+class McpException(val code: Int, override val message: String) : Exception(message)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpModels.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpModels.kt
new file mode 100644
index 000000000..7fcbc6092
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpModels.kt
@@ -0,0 +1,173 @@
+package com.linuxcommandlibrary.ai.mcp
+
+import kotlinx.serialization.SerialName
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.json.JsonElement
+import kotlinx.serialization.json.JsonObject
+
+// ──────────────────────────────────────────────
+// MCP Lifecycle
+// ──────────────────────────────────────────────
+
+@Serializable
+data class McpClientInfo(
+ val name: String,
+ val version: String,
+)
+
+@Serializable
+data class McpServerInfo(
+ val name: String,
+ val version: String,
+)
+
+@Serializable
+data class McpCapabilities(
+ val tools: McpToolsCapability? = null,
+ val resources: McpResourcesCapability? = null,
+ val prompts: McpPromptsCapability? = null,
+)
+
+@Serializable
+class McpToolsCapability
+
+@Serializable
+class McpResourcesCapability
+
+@Serializable
+class McpPromptsCapability
+
+@Serializable
+data class McpInitializeParams(
+ val protocolVersion: String = MCP_PROTOCOL_VERSION,
+ val capabilities: McpCapabilities,
+ val clientInfo: McpClientInfo,
+)
+
+@Serializable
+data class McpInitializeResult(
+ val protocolVersion: String,
+ val capabilities: McpCapabilities,
+ val serverInfo: McpServerInfo,
+ val instructions: String? = null,
+)
+
+// ──────────────────────────────────────────────
+// Tools
+// ──────────────────────────────────────────────
+
+/**
+ * Describes a callable tool exposed by an MCP server.
+ */
+@Serializable
+data class McpTool(
+ val name: String,
+ val description: String? = null,
+ val inputSchema: JsonObject,
+)
+
+@Serializable
+data class McpToolsListResult(
+ val tools: List,
+ val nextCursor: String? = null,
+)
+
+@Serializable
+data class McpToolCallParams(
+ val name: String,
+ val arguments: JsonObject? = null,
+)
+
+@Serializable
+data class McpToolCallResult(
+ val content: List,
+ val isError: Boolean = false,
+)
+
+// ──────────────────────────────────────────────
+// Resources
+// ──────────────────────────────────────────────
+
+@Serializable
+data class McpResource(
+ val uri: String,
+ val name: String,
+ val description: String? = null,
+ val mimeType: String? = null,
+)
+
+@Serializable
+data class McpResourcesListResult(
+ val resources: List,
+ val nextCursor: String? = null,
+)
+
+@Serializable
+data class McpResourceReadParams(val uri: String)
+
+@Serializable
+data class McpResourceReadResult(val contents: List)
+
+// ──────────────────────────────────────────────
+// Prompts
+// ──────────────────────────────────────────────
+
+@Serializable
+data class McpPrompt(
+ val name: String,
+ val description: String? = null,
+ val arguments: List? = null,
+)
+
+@Serializable
+data class McpPromptArgument(
+ val name: String,
+ val description: String? = null,
+ val required: Boolean = false,
+)
+
+@Serializable
+data class McpPromptsListResult(
+ val prompts: List,
+ val nextCursor: String? = null,
+)
+
+// ──────────────────────────────────────────────
+// Content blocks
+// ──────────────────────────────────────────────
+
+/**
+ * Polymorphic content block returned by tools and resources.
+ * Discriminated by the `type` field.
+ */
+@Serializable
+data class McpContent(
+ val type: String,
+ val text: String? = null,
+ val data: String? = null,
+ val mimeType: String? = null,
+ val uri: String? = null,
+)
+
+object McpContentType {
+ const val TEXT = "text"
+ const val IMAGE = "image"
+ const val RESOURCE = "resource"
+}
+
+// ──────────────────────────────────────────────
+// MCP method names
+// ──────────────────────────────────────────────
+
+object McpMethod {
+ const val INITIALIZE = "initialize"
+ const val INITIALIZED = "notifications/initialized"
+ const val TOOLS_LIST = "tools/list"
+ const val TOOLS_CALL = "tools/call"
+ const val RESOURCES_LIST = "resources/list"
+ const val RESOURCES_READ = "resources/read"
+ const val PROMPTS_LIST = "prompts/list"
+ const val PROMPTS_GET = "prompts/get"
+}
+
+const val MCP_PROTOCOL_VERSION = "2024-11-05"
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpServer.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpServer.kt
new file mode 100644
index 000000000..bcf48f1d2
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpServer.kt
@@ -0,0 +1,115 @@
+package com.linuxcommandlibrary.ai.mcp
+
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcError
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcErrorCode
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcJson
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcNotification
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcRequest
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcResponse
+import com.linuxcommandlibrary.ai.jsonrpc.fromJsonElement
+import com.linuxcommandlibrary.ai.jsonrpc.toJsonElement
+import com.linuxcommandlibrary.ai.tools.LinuxLibraryToolRegistry
+import kotlinx.serialization.json.JsonNull
+
+/**
+ * In-process MCP server that handles JSON-RPC requests and delegates to [LinuxLibraryToolRegistry].
+ *
+ * The server is transport-agnostic — it receives a serialised JSON-RPC request string and
+ * returns a serialised response string. The caller is responsible for transport (HTTP, stdio,
+ * WebSocket, etc.).
+ */
+class McpServer(
+ private val toolRegistry: LinuxLibraryToolRegistry,
+ private val serverInfo: McpServerInfo = McpServerInfo(
+ name = "linux-command-library-mcp",
+ version = "1.0.0",
+ ),
+) {
+ private var initialized = false
+
+ /**
+ * Process a raw JSON-RPC request string and return a JSON-RPC response string.
+ * Returns `null` for notifications (no id → no response expected).
+ */
+ suspend fun handle(requestJson: String): String? {
+ val request = runCatching {
+ JsonRpcJson.decodeFromString(requestJson)
+ }.getOrElse { e ->
+ val errorResponse = JsonRpcResponse(
+ error = JsonRpcError(JsonRpcErrorCode.PARSE_ERROR, "Parse error: ${e.message}"),
+ )
+ return JsonRpcJson.encodeToString(JsonRpcResponse.serializer(), errorResponse)
+ }
+
+ // Notifications (no id) require no response
+ if (request.id == null) {
+ handleNotification(request)
+ return null
+ }
+
+ val response = handleRequest(request)
+ return JsonRpcJson.encodeToString(JsonRpcResponse.serializer(), response)
+ }
+
+ private suspend fun handleRequest(request: JsonRpcRequest): JsonRpcResponse {
+ return try {
+ val result = when (request.method) {
+ McpMethod.INITIALIZE -> handleInitialize(request)
+ McpMethod.TOOLS_LIST -> handleToolsList()
+ McpMethod.TOOLS_CALL -> handleToolsCall(request)
+ McpMethod.RESOURCES_LIST -> handleResourcesList()
+ else -> throw McpException(
+ JsonRpcErrorCode.METHOD_NOT_FOUND,
+ "Method not found: ${request.method}",
+ )
+ }
+ JsonRpcResponse(result = result, id = request.id)
+ } catch (e: McpException) {
+ JsonRpcResponse(
+ error = JsonRpcError(code = e.code, message = e.message),
+ id = request.id,
+ )
+ } catch (e: Exception) {
+ JsonRpcResponse(
+ error = JsonRpcError(JsonRpcErrorCode.INTERNAL_ERROR, e.message ?: "Internal error"),
+ id = request.id,
+ )
+ }
+ }
+
+ private fun handleNotification(request: JsonRpcRequest) {
+ if (request.method == McpMethod.INITIALIZED) {
+ initialized = true
+ }
+ }
+
+ private fun handleInitialize(request: JsonRpcRequest): kotlinx.serialization.json.JsonElement {
+ val result = McpInitializeResult(
+ protocolVersion = MCP_PROTOCOL_VERSION,
+ capabilities = McpCapabilities(
+ tools = McpToolsCapability(),
+ resources = McpResourcesCapability(),
+ ),
+ serverInfo = serverInfo,
+ instructions = "Linux Command Library MCP server. Use tools to search commands and basics.",
+ )
+ return result.toJsonElement()
+ }
+
+ private fun handleToolsList(): kotlinx.serialization.json.JsonElement {
+ val result = McpToolsListResult(tools = toolRegistry.allTools())
+ return result.toJsonElement()
+ }
+
+ private suspend fun handleToolsCall(request: JsonRpcRequest): kotlinx.serialization.json.JsonElement {
+ val params = request.params?.fromJsonElement()
+ ?: throw McpException(JsonRpcErrorCode.INVALID_PARAMS, "Missing params")
+ val result = toolRegistry.call(params.name, params.arguments)
+ return result.toJsonElement()
+ }
+
+ private fun handleResourcesList(): kotlinx.serialization.json.JsonElement {
+ val result = McpResourcesListResult(resources = emptyList())
+ return result.toJsonElement()
+ }
+}
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
new file mode 100644
index 000000000..f52b85716
--- /dev/null
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
@@ -0,0 +1,155 @@
+package com.linuxcommandlibrary.ai.tools
+
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcJson
+import com.linuxcommandlibrary.ai.mcp.McpContent
+import com.linuxcommandlibrary.ai.mcp.McpContentType
+import com.linuxcommandlibrary.ai.mcp.McpException
+import com.linuxcommandlibrary.ai.mcp.McpTool
+import com.linuxcommandlibrary.ai.mcp.McpToolCallResult
+import com.linuxcommandlibrary.app.data.CommandsRepository
+import com.linuxcommandlibrary.app.data.BasicsRepository
+import kotlinx.serialization.json.JsonObject
+import kotlinx.serialization.json.buildJsonObject
+import kotlinx.serialization.json.jsonPrimitive
+import kotlinx.serialization.json.put
+import kotlinx.serialization.json.putJsonObject
+
+/**
+ * Registry of all MCP tools exposed by the Linux Command Library.
+ *
+ * Each tool is defined with a name, description, and JSON Schema for its input.
+ * The [call] method dispatches to the appropriate handler.
+ */
+class LinuxLibraryToolRegistry(
+ private val commandsRepository: CommandsRepository,
+ private val basicsRepository: BasicsRepository,
+) {
+
+ fun allTools(): List = listOf(
+ searchCommandsTool,
+ getCommandDetailTool,
+ searchBasicsTool,
+ )
+
+ suspend fun call(name: String, arguments: JsonObject?): McpToolCallResult =
+ when (name) {
+ SEARCH_COMMANDS -> searchCommands(arguments)
+ GET_COMMAND_DETAIL -> getCommandDetail(arguments)
+ SEARCH_BASICS -> searchBasics(arguments)
+ else -> McpToolCallResult(
+ content = listOf(McpContent(type = McpContentType.TEXT, text = "Unknown tool: $name")),
+ isError = true,
+ )
+ }
+
+ // ──────────────────────────────────────────────
+ // Tool: search_commands
+ // ──────────────────────────────────────────────
+
+ private val searchCommandsTool = McpTool(
+ name = SEARCH_COMMANDS,
+ description = "Search for Linux commands by name or keyword. Returns a list of matching command names.",
+ inputSchema = buildJsonObject {
+ put("type", "object")
+ putJsonObject("properties") {
+ putJsonObject("query") {
+ put("type", "string")
+ put("description", "Search query (command name or keyword)")
+ }
+ }
+ put("required", JsonRpcJson.parseToJsonElement("""["query"]"""))
+ },
+ )
+
+ private fun searchCommands(arguments: JsonObject?): McpToolCallResult {
+ val query = arguments?.get("query")?.jsonPrimitive?.content ?: ""
+ val results = commandsRepository.getCommandsByQuery(query).take(20)
+ val text = if (results.isEmpty()) {
+ "No commands found for query: '$query'"
+ } else {
+ results.joinToString("\n") { "- ${it.name}" }
+ }
+ return textResult(text)
+ }
+
+ // ──────────────────────────────────────────────
+ // Tool: get_command_detail
+ // ──────────────────────────────────────────────
+
+ private val getCommandDetailTool = McpTool(
+ name = GET_COMMAND_DETAIL,
+ description = "Get detailed information (man page sections) for a specific Linux command.",
+ inputSchema = buildJsonObject {
+ put("type", "object")
+ putJsonObject("properties") {
+ putJsonObject("command") {
+ put("type", "string")
+ put("description", "Exact Linux command name (e.g. 'ls', 'grep', 'curl')")
+ }
+ }
+ put("required", JsonRpcJson.parseToJsonElement("""["command"]"""))
+ },
+ )
+
+ private fun getCommandDetail(arguments: JsonObject?): McpToolCallResult {
+ val commandName = arguments?.get("command")?.jsonPrimitive?.content ?: ""
+ val sections = commandsRepository.getSections(commandName)
+ if (sections.isEmpty()) {
+ return textResult("No details found for command: '$commandName'")
+ }
+ val text = buildString {
+ appendLine("# $commandName")
+ sections.forEach { section ->
+ appendLine("\n## ${section.title}")
+ appendLine(section.content)
+ }
+ }
+ return textResult(text)
+ }
+
+ // ──────────────────────────────────────────────
+ // Tool: search_basics
+ // ──────────────────────────────────────────────
+
+ private val searchBasicsTool = McpTool(
+ name = SEARCH_BASICS,
+ description = "Search the Linux basics guide sections. Returns matching sections with command examples.",
+ inputSchema = buildJsonObject {
+ put("type", "object")
+ putJsonObject("properties") {
+ putJsonObject("query") {
+ put("type", "string")
+ put("description", "Topic to search (e.g. 'files', 'network', 'permissions')")
+ }
+ }
+ put("required", JsonRpcJson.parseToJsonElement("""["query"]"""))
+ },
+ )
+
+ private fun searchBasics(arguments: JsonObject?): McpToolCallResult {
+ val query = arguments?.get("query")?.jsonPrimitive?.content ?: ""
+ val matches = basicsRepository.getBasicGroupsByQuery(query).take(10)
+ val text = if (matches.isEmpty()) {
+ "No basics sections found for query: '$query'"
+ } else {
+ matches.joinToString("\n\n") { match ->
+ "**${match.categoryTitle} › ${match.description}**"
+ }
+ }
+ return textResult(text)
+ }
+
+ // ──────────────────────────────────────────────
+ // Helpers
+ // ──────────────────────────────────────────────
+
+ private fun textResult(text: String) = McpToolCallResult(
+ content = listOf(McpContent(type = McpContentType.TEXT, text = text)),
+ )
+
+ companion object {
+ const val SEARCH_COMMANDS = "search_commands"
+ const val GET_COMMAND_DETAIL = "get_command_detail"
+ const val SEARCH_BASICS = "search_basics"
+ }
+}
diff --git a/android/src/main/java/com/inspiredandroid/linuxcommandbibliotheca/LinuxApplication.kt b/android/src/main/java/com/inspiredandroid/linuxcommandbibliotheca/LinuxApplication.kt
index 82b2e972b..f98d802f4 100644
--- a/android/src/main/java/com/inspiredandroid/linuxcommandbibliotheca/LinuxApplication.kt
+++ b/android/src/main/java/com/inspiredandroid/linuxcommandbibliotheca/LinuxApplication.kt
@@ -3,6 +3,7 @@ package com.inspiredandroid.linuxcommandbibliotheca
import android.app.Application
import com.linuxcommandlibrary.app.di.commonModule
import com.linuxcommandlibrary.app.di.platformModule
+import com.linuxcommandlibrary.ai.di.aiModule
import org.koin.android.ext.koin.androidContext
import org.koin.android.ext.koin.androidLogger
import org.koin.core.context.GlobalContext.startKoin
@@ -15,7 +16,7 @@ class LinuxApplication : Application() {
startKoin {
androidLogger()
androidContext(this@LinuxApplication)
- modules(commonModule, platformModule())
+ modules(commonModule, platformModule(), aiModule)
}
}
}
diff --git a/composeApp/build.gradle.kts b/composeApp/build.gradle.kts
index b3bd9fb55..e5f7a8683 100644
--- a/composeApp/build.gradle.kts
+++ b/composeApp/build.gradle.kts
@@ -49,6 +49,7 @@ kotlin {
commonMain {
dependencies {
api(project(":viewmodels"))
+ api(project(":ai"))
implementation(libs.compose.runtime)
implementation(libs.compose.foundation)
implementation(libs.compose.material3)
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/AppIcons.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/AppIcons.kt
index 4366318f9..bdfb0432e 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/AppIcons.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/AppIcons.kt
@@ -235,4 +235,24 @@ object AppIcons {
}
}.build()
}
+
+ val Send: ImageVector by lazy {
+ ImageVector.Builder(
+ name = "Send",
+ defaultWidth = 24.dp,
+ defaultHeight = 24.dp,
+ viewportWidth = 24f,
+ viewportHeight = 24f,
+ ).apply {
+ path(fill = SolidColor(Color.Black)) {
+ moveTo(2.01f, 21.0f)
+ lineTo(23.0f, 12.0f)
+ lineTo(2.01f, 3.0f)
+ lineTo(2.0f, 10.0f)
+ lineToRelative(15.0f, 2.0f)
+ lineToRelative(-15.0f, 2.0f)
+ close()
+ }
+ }.build()
+ }
}
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
new file mode 100644
index 000000000..0d1dbd278
--- /dev/null
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
@@ -0,0 +1,275 @@
+package com.linuxcommandlibrary.app.ui.screens.aichat
+
+import androidx.compose.foundation.background
+import androidx.compose.foundation.layout.Arrangement
+import androidx.compose.foundation.layout.Box
+import androidx.compose.foundation.layout.Column
+import androidx.compose.foundation.layout.Row
+import androidx.compose.foundation.layout.Spacer
+import androidx.compose.foundation.layout.fillMaxSize
+import androidx.compose.foundation.layout.fillMaxWidth
+import androidx.compose.foundation.layout.height
+import androidx.compose.foundation.layout.imePadding
+import androidx.compose.foundation.layout.padding
+import androidx.compose.foundation.layout.size
+import androidx.compose.foundation.layout.width
+import androidx.compose.foundation.layout.widthIn
+import androidx.compose.foundation.lazy.LazyColumn
+import androidx.compose.foundation.lazy.items
+import androidx.compose.foundation.lazy.rememberLazyListState
+import androidx.compose.foundation.shape.CircleShape
+import androidx.compose.foundation.shape.RoundedCornerShape
+import androidx.compose.foundation.text.KeyboardActions
+import androidx.compose.foundation.text.KeyboardOptions
+import androidx.compose.material3.Card
+import androidx.compose.material3.CardDefaults
+import androidx.compose.material3.CircularProgressIndicator
+import androidx.compose.material3.Icon
+import androidx.compose.material3.IconButton
+import androidx.compose.material3.MaterialTheme
+import androidx.compose.material3.OutlinedTextField
+import androidx.compose.material3.SnackbarHost
+import androidx.compose.material3.SnackbarHostState
+import androidx.compose.material3.Text
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.LaunchedEffect
+import androidx.compose.runtime.collectAsState
+import androidx.compose.runtime.getValue
+import androidx.compose.runtime.mutableStateOf
+import androidx.compose.runtime.remember
+import androidx.compose.runtime.rememberCoroutineScope
+import androidx.compose.runtime.setValue
+import androidx.compose.ui.Alignment
+import androidx.compose.ui.Modifier
+import androidx.compose.ui.draw.clip
+import androidx.compose.ui.input.pointer.PointerIcon
+import androidx.compose.ui.input.pointer.pointerHoverIcon
+import androidx.compose.ui.text.input.ImeAction
+import androidx.compose.ui.unit.dp
+import com.linuxcommandlibrary.ai.chat.ChatMessage
+import com.linuxcommandlibrary.ai.chat.ChatViewModel
+import com.linuxcommandlibrary.ai.llm.LlmRole
+import com.linuxcommandlibrary.app.ui.AppIcons
+import com.linuxcommandlibrary.app.ui.composables.PaneTopBar
+import kotlinx.coroutines.launch
+import org.koin.compose.koinInject
+
+/**
+ * Full-screen AI chat interface.
+ *
+ * Features:
+ * - Conversation history in a scrollable list
+ * - Streaming-aware message bubbles (shows spinner while waiting)
+ * - Automatic scroll-to-bottom on new messages
+ * - Input field with send button
+ * - Settings shortcut in the top bar
+ */
+@Composable
+fun AiChatScreen(
+ onBack: (() -> Unit)? = null,
+ onOpenSettings: () -> Unit = {},
+) {
+ val viewModel: ChatViewModel = koinInject()
+ val uiState by viewModel.uiState.collectAsState()
+ val snackbarHostState = remember { SnackbarHostState() }
+ val scope = rememberCoroutineScope()
+
+ LaunchedEffect(uiState.errorMessage) {
+ val msg = uiState.errorMessage ?: return@LaunchedEffect
+ scope.launch {
+ snackbarHostState.showSnackbar(msg)
+ viewModel.dismissError()
+ }
+ }
+
+ Box(modifier = Modifier.fillMaxSize()) {
+ Column(modifier = Modifier.fillMaxSize()) {
+ PaneTopBar(
+ title = "AI Assistant",
+ onBack = onBack,
+ actions = {
+ IconButton(
+ modifier = Modifier.pointerHoverIcon(PointerIcon.Hand),
+ onClick = onOpenSettings,
+ ) {
+ Icon(
+ imageVector = AppIcons.Info,
+ contentDescription = "AI Settings",
+ )
+ }
+ },
+ )
+
+ MessageList(
+ messages = uiState.messages,
+ isWaiting = uiState.isWaitingForResponse,
+ modifier = Modifier.weight(1f),
+ )
+
+ ChatInputBar(
+ enabled = !uiState.isWaitingForResponse,
+ onSend = viewModel::sendMessage,
+ )
+ }
+
+ SnackbarHost(
+ hostState = snackbarHostState,
+ modifier = Modifier.align(Alignment.BottomCenter),
+ )
+ }
+}
+
+// ──────────────────────────────────────────────
+// Message list
+// ──────────────────────────────────────────────
+
+@Composable
+private fun MessageList(
+ messages: List,
+ isWaiting: Boolean,
+ modifier: Modifier = Modifier,
+) {
+ val listState = rememberLazyListState()
+
+ LaunchedEffect(messages.size, isWaiting) {
+ if (messages.isNotEmpty()) {
+ listState.animateScrollToItem(messages.lastIndex)
+ }
+ }
+
+ LazyColumn(
+ state = listState,
+ modifier = modifier
+ .fillMaxWidth()
+ .padding(horizontal = 16.dp),
+ verticalArrangement = Arrangement.spacedBy(8.dp),
+ ) {
+ item { Spacer(Modifier.height(8.dp)) }
+ items(messages) { message ->
+ MessageBubble(message = message)
+ }
+ item { Spacer(Modifier.height(8.dp)) }
+ }
+}
+
+// ──────────────────────────────────────────────
+// Message bubble
+// ──────────────────────────────────────────────
+
+@Composable
+private fun MessageBubble(message: ChatMessage) {
+ val isUser = message.role == LlmRole.USER
+ Row(
+ modifier = Modifier.fillMaxWidth(),
+ horizontalArrangement = if (isUser) Arrangement.End else Arrangement.Start,
+ ) {
+ Card(
+ shape = RoundedCornerShape(
+ topStart = 16.dp,
+ topEnd = 16.dp,
+ bottomStart = if (isUser) 16.dp else 4.dp,
+ bottomEnd = if (isUser) 4.dp else 16.dp,
+ ),
+ colors = CardDefaults.cardColors(
+ containerColor = if (isUser) {
+ MaterialTheme.colorScheme.primaryContainer
+ } else {
+ MaterialTheme.colorScheme.surfaceContainerHigh
+ },
+ ),
+ modifier = Modifier.widthIn(max = 320.dp),
+ ) {
+ Box(modifier = Modifier.padding(horizontal = 14.dp, vertical = 10.dp)) {
+ if (message.isStreaming && message.content.isEmpty()) {
+ CircularProgressIndicator(
+ modifier = Modifier.size(18.dp),
+ strokeWidth = 2.dp,
+ )
+ } else {
+ Text(
+ text = message.content,
+ style = MaterialTheme.typography.bodyMedium,
+ color = if (message.isError) {
+ MaterialTheme.colorScheme.error
+ } else {
+ MaterialTheme.colorScheme.onSurface
+ },
+ )
+ }
+ }
+ }
+ }
+}
+
+// ──────────────────────────────────────────────
+// Input bar
+// ──────────────────────────────────────────────
+
+@Composable
+private fun ChatInputBar(
+ enabled: Boolean,
+ onSend: (String) -> Unit,
+) {
+ var text by remember { mutableStateOf("") }
+
+ Row(
+ modifier = Modifier
+ .fillMaxWidth()
+ .background(MaterialTheme.colorScheme.surface)
+ .padding(horizontal = 16.dp, vertical = 12.dp)
+ .imePadding(),
+ verticalAlignment = Alignment.Bottom,
+ ) {
+ OutlinedTextField(
+ value = text,
+ onValueChange = { text = it },
+ modifier = Modifier.weight(1f),
+ placeholder = { Text("Zadaj pytanie o Linuksa…") },
+ enabled = enabled,
+ maxLines = 5,
+ shape = RoundedCornerShape(24.dp),
+ keyboardOptions = KeyboardOptions(imeAction = ImeAction.Send),
+ keyboardActions = KeyboardActions(
+ onSend = {
+ if (text.isNotBlank() && enabled) {
+ onSend(text)
+ text = ""
+ }
+ },
+ ),
+ )
+
+ Spacer(Modifier.width(8.dp))
+
+ IconButton(
+ onClick = {
+ if (text.isNotBlank() && enabled) {
+ onSend(text)
+ text = ""
+ }
+ },
+ enabled = text.isNotBlank() && enabled,
+ modifier = Modifier
+ .size(48.dp)
+ .clip(CircleShape)
+ .background(
+ if (text.isNotBlank() && enabled) {
+ MaterialTheme.colorScheme.primary
+ } else {
+ MaterialTheme.colorScheme.surfaceVariant
+ },
+ )
+ .pointerHoverIcon(PointerIcon.Hand),
+ ) {
+ Icon(
+ imageVector = AppIcons.Send,
+ contentDescription = "Wyślij",
+ tint = if (text.isNotBlank() && enabled) {
+ MaterialTheme.colorScheme.onPrimary
+ } else {
+ MaterialTheme.colorScheme.onSurfaceVariant
+ },
+ )
+ }
+ }
+}
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
new file mode 100644
index 000000000..6d85ade1a
--- /dev/null
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
@@ -0,0 +1,175 @@
+package com.linuxcommandlibrary.app.ui.screens.aichat
+
+import androidx.compose.foundation.layout.Column
+import androidx.compose.foundation.layout.Spacer
+import androidx.compose.foundation.layout.fillMaxSize
+import androidx.compose.foundation.layout.fillMaxWidth
+import androidx.compose.foundation.layout.height
+import androidx.compose.foundation.layout.padding
+import androidx.compose.foundation.rememberScrollState
+import androidx.compose.foundation.shape.RoundedCornerShape
+import androidx.compose.foundation.text.KeyboardOptions
+import androidx.compose.foundation.verticalScroll
+import androidx.compose.material3.Button
+import androidx.compose.material3.DropdownMenuItem
+import androidx.compose.material3.ExperimentalMaterial3Api
+import androidx.compose.material3.ExposedDropdownMenuBox
+import androidx.compose.material3.ExposedDropdownMenuDefaults
+import androidx.compose.material3.MaterialTheme
+import androidx.compose.material3.OutlinedTextField
+import androidx.compose.material3.Text
+import androidx.compose.runtime.Composable
+import androidx.compose.runtime.getValue
+import androidx.compose.runtime.mutableStateOf
+import androidx.compose.runtime.remember
+import androidx.compose.runtime.setValue
+import androidx.compose.ui.Modifier
+import androidx.compose.ui.input.pointer.PointerIcon
+import androidx.compose.ui.input.pointer.pointerHoverIcon
+import androidx.compose.ui.text.input.KeyboardType
+import androidx.compose.ui.text.input.PasswordVisualTransformation
+import androidx.compose.ui.unit.dp
+import com.linuxcommandlibrary.ai.chat.ChatViewModel
+import com.linuxcommandlibrary.ai.llm.LlmConfig
+import com.linuxcommandlibrary.ai.llm.LlmProviderType
+import com.linuxcommandlibrary.app.ui.composables.PaneTopBar
+import org.koin.compose.koinInject
+
+/**
+ * Settings screen for configuring the AI provider.
+ *
+ * Supports selecting the provider type (OpenAI, Ollama, OpenAI-compatible),
+ * entering API key, custom base URL, and model ID.
+ */
+@OptIn(ExperimentalMaterial3Api::class)
+@Composable
+fun AiSettingsScreen(onBack: () -> Unit) {
+ val viewModel: ChatViewModel = koinInject()
+ val currentConfig = remember { viewModel.loadConfig() }
+
+ var selectedProvider by remember { mutableStateOf(currentConfig.providerType) }
+ var apiKey by remember { mutableStateOf(currentConfig.apiKey) }
+ var baseUrl by remember { mutableStateOf(currentConfig.baseUrl) }
+ var modelId by remember { mutableStateOf(currentConfig.modelId) }
+ var providerDropdownExpanded by remember { mutableStateOf(false) }
+
+ Column(modifier = Modifier.fillMaxSize()) {
+ PaneTopBar(title = "Ustawienia AI", onBack = onBack)
+
+ Column(
+ modifier = Modifier
+ .fillMaxSize()
+ .verticalScroll(rememberScrollState())
+ .padding(24.dp),
+ ) {
+ Text(
+ text = "Dostawca modelu LLM",
+ style = MaterialTheme.typography.titleMedium,
+ )
+ Spacer(Modifier.height(12.dp))
+
+ ExposedDropdownMenuBox(
+ expanded = providerDropdownExpanded,
+ onExpandedChange = { providerDropdownExpanded = it },
+ modifier = Modifier.fillMaxWidth(),
+ ) {
+ OutlinedTextField(
+ value = selectedProvider.displayName,
+ onValueChange = {},
+ readOnly = true,
+ label = { Text("Dostawca") },
+ trailingIcon = { ExposedDropdownMenuDefaults.TrailingIcon(providerDropdownExpanded) },
+ modifier = Modifier.fillMaxWidth().menuAnchor(),
+ shape = RoundedCornerShape(12.dp),
+ )
+ ExposedDropdownMenu(
+ expanded = providerDropdownExpanded,
+ onDismissRequest = { providerDropdownExpanded = false },
+ ) {
+ LlmProviderType.entries.forEach { type ->
+ DropdownMenuItem(
+ text = { Text(type.displayName) },
+ onClick = {
+ selectedProvider = type
+ if (baseUrl.isBlank()) baseUrl = type.defaultBaseUrl
+ if (modelId.isBlank()) modelId = type.defaultModel
+ providerDropdownExpanded = false
+ },
+ )
+ }
+ }
+ }
+
+ Spacer(Modifier.height(16.dp))
+
+ if (selectedProvider != LlmProviderType.OLLAMA) {
+ OutlinedTextField(
+ value = apiKey,
+ onValueChange = { apiKey = it },
+ label = { Text("Klucz API") },
+ placeholder = { Text("sk-…") },
+ visualTransformation = PasswordVisualTransformation(),
+ keyboardOptions = KeyboardOptions(keyboardType = KeyboardType.Password),
+ modifier = Modifier.fillMaxWidth(),
+ shape = RoundedCornerShape(12.dp),
+ )
+ Spacer(Modifier.height(16.dp))
+ }
+
+ OutlinedTextField(
+ value = baseUrl,
+ onValueChange = { baseUrl = it },
+ label = { Text("URL serwera") },
+ placeholder = { Text(selectedProvider.defaultBaseUrl) },
+ modifier = Modifier.fillMaxWidth(),
+ shape = RoundedCornerShape(12.dp),
+ )
+
+ Spacer(Modifier.height(16.dp))
+
+ OutlinedTextField(
+ value = modelId,
+ onValueChange = { modelId = it },
+ label = { Text("Identyfikator modelu") },
+ placeholder = { Text(selectedProvider.defaultModel) },
+ modifier = Modifier.fillMaxWidth(),
+ shape = RoundedCornerShape(12.dp),
+ )
+
+ Spacer(Modifier.height(24.dp))
+
+ Button(
+ onClick = {
+ viewModel.saveConfig(
+ LlmConfig(
+ providerType = selectedProvider,
+ apiKey = apiKey,
+ baseUrl = baseUrl,
+ modelId = modelId,
+ ),
+ )
+ onBack()
+ },
+ modifier = Modifier
+ .fillMaxWidth()
+ .pointerHoverIcon(PointerIcon.Hand),
+ shape = RoundedCornerShape(12.dp),
+ ) {
+ Text("Zapisz konfigurację")
+ }
+
+ Spacer(Modifier.height(16.dp))
+
+ Text(
+ text = buildString {
+ appendLine("Obsługiwane dostawcy:")
+ appendLine("• OpenAI — wymaga klucza API (platform.openai.com)")
+ appendLine("• Ollama — lokalny serwer LLM, domyślnie http://localhost:11434")
+ appendLine("• Kompatybilny z OpenAI — Groq, LM Studio, LocalAI i inne")
+ },
+ style = MaterialTheme.typography.bodySmall,
+ color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.6f),
+ )
+ }
+ }
+}
diff --git a/desktopApp/src/desktopMain/kotlin/com/linuxcommandlibrary/Main.kt b/desktopApp/src/desktopMain/kotlin/com/linuxcommandlibrary/Main.kt
index c995eed5f..d6f93af53 100644
--- a/desktopApp/src/desktopMain/kotlin/com/linuxcommandlibrary/Main.kt
+++ b/desktopApp/src/desktopMain/kotlin/com/linuxcommandlibrary/Main.kt
@@ -7,6 +7,7 @@ import androidx.compose.ui.window.rememberWindowState
import com.linuxcommandlibrary.app.App
import com.linuxcommandlibrary.app.di.commonModule
import com.linuxcommandlibrary.app.di.platformModule
+import com.linuxcommandlibrary.ai.di.aiModule
import org.koin.core.context.startKoin
fun main() {
@@ -17,7 +18,7 @@ fun main() {
application {
startKoin {
- modules(commonModule, platformModule())
+ modules(commonModule, platformModule(), aiModule)
}
Window(
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 3c78774e2..fc3699d9a 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -17,6 +17,7 @@ koinCore = "4.2.1"
kotlinxCoroutinesCore = "1.10.2"
kotlinxHtmlJvm = "0.12.0"
kotlinxSerializationCore = "1.11.0"
+ktorVersion = "3.1.3"
navigationCompose = "2.9.2"
preference = "1.2.1"
spotless = "8.4.0"
@@ -28,6 +29,13 @@ paparazzi = "2.0.0-alpha04"
[libraries]
kotlinx-serialization-core = { module = "org.jetbrains.kotlinx:kotlinx-serialization-core", version.ref = "kotlinxSerializationCore" }
+kotlinx-serialization-json = { module = "org.jetbrains.kotlinx:kotlinx-serialization-json", version.ref = "kotlinxSerializationCore" }
+ktor-client-core = { module = "io.ktor:ktor-client-core", version.ref = "ktorVersion" }
+ktor-client-content-negotiation = { module = "io.ktor:ktor-client-content-negotiation", version.ref = "ktorVersion" }
+ktor-client-okhttp = { module = "io.ktor:ktor-client-okhttp", version.ref = "ktorVersion" }
+ktor-client-darwin = { module = "io.ktor:ktor-client-darwin", version.ref = "ktorVersion" }
+ktor-client-java = { module = "io.ktor:ktor-client-java", version.ref = "ktorVersion" }
+ktor-serialization-kotlinx-json = { module = "io.ktor:ktor-serialization-kotlinx-json", version.ref = "ktorVersion" }
mordant = { module = "com.github.ajalt.mordant:mordant", version.ref = "mordant" }
accompanist-appcompat-theme = { module = "com.google.accompanist:accompanist-appcompat-theme", version.ref = "accompanistAppcompatTheme" }
accompanist-systemuicontroller = { module = "com.google.accompanist:accompanist-systemuicontroller", version.ref = "accompanistAppcompatTheme" }
diff --git a/settings.gradle.kts b/settings.gradle.kts
index 5dcef08c9..191bec60c 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -16,4 +16,4 @@ dependencyResolutionManagement {
rootProject.name = "Linux Command Library"
-include(":android", ":common", ":websiteBuilder", ":cli", ":composeApp", ":desktopApp", ":viewmodels", ":screenshotTests")
+include(":android", ":common", ":websiteBuilder", ":cli", ":composeApp", ":desktopApp", ":viewmodels", ":screenshotTests", ":ai")
From 536225052a8f14e8f1cb0aaf4a4dec4b69a5aa45 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 11 May 2026 08:30:48 +0000
Subject: [PATCH 3/9] fix(ai): replace AtomicLong with KMP-compatible Mutex
counter; extract all AI screen strings to resources
Agent-Logs-Url: https://github.com/grand151/LinuxCommandLibrary/sessions/e13c9d82-551e-42de-8217-3a0bec8a834a
Co-authored-by: grand151 <86811297+grand151@users.noreply.github.com>
---
.../ai/chat/ChatRepository.kt | 1 +
.../ai/chat/ChatViewModel.kt | 2 +-
.../linuxcommandlibrary/ai/mcp/McpClient.kt | 10 ++++--
.../composeResources/values-pl/strings.xml | 14 ++++++++
.../composeResources/values/strings.xml | 14 ++++++++
.../app/ui/screens/aichat/AiChatScreen.kt | 22 ++++++++----
.../app/ui/screens/aichat/AiSettingsScreen.kt | 34 +++++++++++--------
7 files changed, 73 insertions(+), 24 deletions(-)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
index e9b77d21c..c8d3f36df 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
@@ -30,6 +30,7 @@ class ChatRepository(
private const val KEY_API_KEY = "ai_api_key"
private const val KEY_BASE_URL = "ai_base_url"
private const val KEY_MODEL_ID = "ai_model_id"
+ const val NOT_CONFIGURED_ERROR_KEY = "ai_not_configured"
/** Default system prompt injected into every conversation. */
const val SYSTEM_PROMPT = """You are a helpful Linux assistant built into the Linux Command Library app.
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
index 542aa9153..2ad62e39a 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
@@ -42,7 +42,7 @@ class ChatViewModel(
fun sendMessage(text: String) {
if (text.isBlank()) return
if (!chatRepository.isConfigured()) {
- _uiState.update { it.copy(errorMessage = "AI provider not configured. Please open Settings and add an API key.") }
+ _uiState.update { it.copy(errorMessage = ChatRepository.NOT_CONFIGURED_ERROR_KEY) }
return
}
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
index 4964137a9..b53039eae 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
@@ -16,10 +16,11 @@ import io.ktor.client.request.setBody
import io.ktor.http.ContentType
import io.ktor.http.contentType
import io.ktor.serialization.kotlinx.json.json
+import kotlinx.coroutines.sync.Mutex
+import kotlinx.coroutines.sync.withLock
import kotlinx.serialization.json.JsonElement
import kotlinx.serialization.json.JsonNull
import kotlinx.serialization.json.JsonObject
-import java.util.concurrent.atomic.AtomicLong
/**
* Lightweight MCP client that communicates over HTTP using JSON-RPC 2.0.
@@ -40,8 +41,11 @@ class McpClient(
install(ContentNegotiation) { json(JsonRpcJson) }
}
- private val idCounter = AtomicLong(1L)
- private fun nextId() = jsonRpcId(idCounter.getAndIncrement())
+ private val idMutex = Mutex()
+ private var idCounter = 0L
+ private suspend fun nextId(): JsonRpcId {
+ return idMutex.withLock { jsonRpcId(++idCounter) }
+ }
// ──────────────────────────────────────────────
// Lifecycle
diff --git a/composeApp/src/commonMain/composeResources/values-pl/strings.xml b/composeApp/src/commonMain/composeResources/values-pl/strings.xml
index d8dfc2096..cf7d199ae 100644
--- a/composeApp/src/commonMain/composeResources/values-pl/strings.xml
+++ b/composeApp/src/commonMain/composeResources/values-pl/strings.xml
@@ -32,5 +32,19 @@
Wersja %1$s
Proton Free
Linode Cloud
+ Asystent AI
+ Ustawienia AI
+ Konfiguracja dostawcy AI
+ Dostawca
+ Dostawca modelu LLM
+ Klucz API
+ sk-…
+ URL serwera
+ Identyfikator modelu
+ Zapisz konfigurację
+ Wyślij
+ Zadaj pytanie o Linuksa…
+ Obsługiwani dostawcy:\n• OpenAI — wymaga klucza API (platform.openai.com)\n• Ollama — lokalny serwer LLM, domyślnie http://localhost:11434\n• Kompatybilny z OpenAI — Groq, LM Studio, LocalAI i inne
+ Dostawca AI nie jest skonfigurowany. Otwórz Ustawienia i dodaj klucz API.
diff --git a/composeApp/src/commonMain/composeResources/values/strings.xml b/composeApp/src/commonMain/composeResources/values/strings.xml
index ba181a703..eedce9634 100644
--- a/composeApp/src/commonMain/composeResources/values/strings.xml
+++ b/composeApp/src/commonMain/composeResources/values/strings.xml
@@ -32,5 +32,19 @@
Version %1$s
Proton Free
Linode Cloud
+ AI Assistant
+ AI Settings
+ AI Provider Settings
+ Provider
+ LLM Model Provider
+ API Key
+ sk-…
+ Server URL
+ Model ID
+ Save configuration
+ Send
+ Ask a Linux question…
+ Supported providers:\n• OpenAI — requires API key (platform.openai.com)\n• Ollama — local LLM server, default http://localhost:11434\n• OpenAI-compatible — Groq, LM Studio, LocalAI and others
+ AI provider not configured. Please open Settings and add an API key.
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
index 0d1dbd278..3df000d33 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
@@ -47,11 +47,19 @@ import androidx.compose.ui.input.pointer.pointerHoverIcon
import androidx.compose.ui.text.input.ImeAction
import androidx.compose.ui.unit.dp
import com.linuxcommandlibrary.ai.chat.ChatMessage
+import com.linuxcommandlibrary.ai.chat.ChatRepository
import com.linuxcommandlibrary.ai.chat.ChatViewModel
import com.linuxcommandlibrary.ai.llm.LlmRole
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.ai_assistant
+import com.linuxcommandlibrary.app.resources.ai_input_placeholder
+import com.linuxcommandlibrary.app.resources.ai_not_configured
+import com.linuxcommandlibrary.app.resources.ai_send
+import com.linuxcommandlibrary.app.resources.ai_settings
import com.linuxcommandlibrary.app.ui.AppIcons
import com.linuxcommandlibrary.app.ui.composables.PaneTopBar
import kotlinx.coroutines.launch
+import org.jetbrains.compose.resources.stringResource
import org.koin.compose.koinInject
/**
@@ -74,10 +82,12 @@ fun AiChatScreen(
val snackbarHostState = remember { SnackbarHostState() }
val scope = rememberCoroutineScope()
+ val notConfiguredStr = stringResource(Res.string.ai_not_configured)
LaunchedEffect(uiState.errorMessage) {
- val msg = uiState.errorMessage ?: return@LaunchedEffect
+ val key = uiState.errorMessage ?: return@LaunchedEffect
+ val displayMsg = if (key == ChatRepository.NOT_CONFIGURED_ERROR_KEY) notConfiguredStr else key
scope.launch {
- snackbarHostState.showSnackbar(msg)
+ snackbarHostState.showSnackbar(displayMsg)
viewModel.dismissError()
}
}
@@ -85,7 +95,7 @@ fun AiChatScreen(
Box(modifier = Modifier.fillMaxSize()) {
Column(modifier = Modifier.fillMaxSize()) {
PaneTopBar(
- title = "AI Assistant",
+ title = stringResource(Res.string.ai_assistant),
onBack = onBack,
actions = {
IconButton(
@@ -94,7 +104,7 @@ fun AiChatScreen(
) {
Icon(
imageVector = AppIcons.Info,
- contentDescription = "AI Settings",
+ contentDescription = stringResource(Res.string.ai_settings),
)
}
},
@@ -224,7 +234,7 @@ private fun ChatInputBar(
value = text,
onValueChange = { text = it },
modifier = Modifier.weight(1f),
- placeholder = { Text("Zadaj pytanie o Linuksa…") },
+ placeholder = { Text(stringResource(Res.string.ai_input_placeholder)) },
enabled = enabled,
maxLines = 5,
shape = RoundedCornerShape(24.dp),
@@ -263,7 +273,7 @@ private fun ChatInputBar(
) {
Icon(
imageVector = AppIcons.Send,
- contentDescription = "Wyślij",
+ contentDescription = stringResource(Res.string.ai_send),
tint = if (text.isNotBlank() && enabled) {
MaterialTheme.colorScheme.onPrimary
} else {
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
index 6d85ade1a..b56d8741b 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
@@ -32,7 +32,18 @@ import androidx.compose.ui.unit.dp
import com.linuxcommandlibrary.ai.chat.ChatViewModel
import com.linuxcommandlibrary.ai.llm.LlmConfig
import com.linuxcommandlibrary.ai.llm.LlmProviderType
+import com.linuxcommandlibrary.app.resources.Res
+import com.linuxcommandlibrary.app.resources.ai_api_key_label
+import com.linuxcommandlibrary.app.resources.ai_api_key_placeholder
+import com.linuxcommandlibrary.app.resources.ai_model_id_label
+import com.linuxcommandlibrary.app.resources.ai_provider_label
+import com.linuxcommandlibrary.app.resources.ai_provider_section
+import com.linuxcommandlibrary.app.resources.ai_save_config
+import com.linuxcommandlibrary.app.resources.ai_server_url_label
+import com.linuxcommandlibrary.app.resources.ai_settings_title
+import com.linuxcommandlibrary.app.resources.ai_supported_providers
import com.linuxcommandlibrary.app.ui.composables.PaneTopBar
+import org.jetbrains.compose.resources.stringResource
import org.koin.compose.koinInject
/**
@@ -54,7 +65,7 @@ fun AiSettingsScreen(onBack: () -> Unit) {
var providerDropdownExpanded by remember { mutableStateOf(false) }
Column(modifier = Modifier.fillMaxSize()) {
- PaneTopBar(title = "Ustawienia AI", onBack = onBack)
+ PaneTopBar(title = stringResource(Res.string.ai_settings_title), onBack = onBack)
Column(
modifier = Modifier
@@ -63,7 +74,7 @@ fun AiSettingsScreen(onBack: () -> Unit) {
.padding(24.dp),
) {
Text(
- text = "Dostawca modelu LLM",
+ text = stringResource(Res.string.ai_provider_section),
style = MaterialTheme.typography.titleMedium,
)
Spacer(Modifier.height(12.dp))
@@ -77,7 +88,7 @@ fun AiSettingsScreen(onBack: () -> Unit) {
value = selectedProvider.displayName,
onValueChange = {},
readOnly = true,
- label = { Text("Dostawca") },
+ label = { Text(stringResource(Res.string.ai_provider_label)) },
trailingIcon = { ExposedDropdownMenuDefaults.TrailingIcon(providerDropdownExpanded) },
modifier = Modifier.fillMaxWidth().menuAnchor(),
shape = RoundedCornerShape(12.dp),
@@ -106,8 +117,8 @@ fun AiSettingsScreen(onBack: () -> Unit) {
OutlinedTextField(
value = apiKey,
onValueChange = { apiKey = it },
- label = { Text("Klucz API") },
- placeholder = { Text("sk-…") },
+ label = { Text(stringResource(Res.string.ai_api_key_label)) },
+ placeholder = { Text(stringResource(Res.string.ai_api_key_placeholder)) },
visualTransformation = PasswordVisualTransformation(),
keyboardOptions = KeyboardOptions(keyboardType = KeyboardType.Password),
modifier = Modifier.fillMaxWidth(),
@@ -119,7 +130,7 @@ fun AiSettingsScreen(onBack: () -> Unit) {
OutlinedTextField(
value = baseUrl,
onValueChange = { baseUrl = it },
- label = { Text("URL serwera") },
+ label = { Text(stringResource(Res.string.ai_server_url_label)) },
placeholder = { Text(selectedProvider.defaultBaseUrl) },
modifier = Modifier.fillMaxWidth(),
shape = RoundedCornerShape(12.dp),
@@ -130,7 +141,7 @@ fun AiSettingsScreen(onBack: () -> Unit) {
OutlinedTextField(
value = modelId,
onValueChange = { modelId = it },
- label = { Text("Identyfikator modelu") },
+ label = { Text(stringResource(Res.string.ai_model_id_label)) },
placeholder = { Text(selectedProvider.defaultModel) },
modifier = Modifier.fillMaxWidth(),
shape = RoundedCornerShape(12.dp),
@@ -155,18 +166,13 @@ fun AiSettingsScreen(onBack: () -> Unit) {
.pointerHoverIcon(PointerIcon.Hand),
shape = RoundedCornerShape(12.dp),
) {
- Text("Zapisz konfigurację")
+ Text(stringResource(Res.string.ai_save_config))
}
Spacer(Modifier.height(16.dp))
Text(
- text = buildString {
- appendLine("Obsługiwane dostawcy:")
- appendLine("• OpenAI — wymaga klucza API (platform.openai.com)")
- appendLine("• Ollama — lokalny serwer LLM, domyślnie http://localhost:11434")
- appendLine("• Kompatybilny z OpenAI — Groq, LM Studio, LocalAI i inne")
- },
+ text = stringResource(Res.string.ai_supported_providers),
style = MaterialTheme.typography.bodySmall,
color = MaterialTheme.colorScheme.onSurface.copy(alpha = 0.6f),
)
From 6ea28b8c30414f692c9ca13bd89a0689dd765c28 Mon Sep 17 00:00:00 2001
From: TTK <86811297+grand151@users.noreply.github.com>
Date: Mon, 11 May 2026 10:56:35 +0200
Subject: [PATCH 4/9] Potential fix for pull request finding
Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
---
.../com/linuxcommandlibrary/ai/chat/ChatRepository.kt | 10 ++++++++--
1 file changed, 8 insertions(+), 2 deletions(-)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
index c8d3f36df..eefabc211 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
@@ -64,8 +64,14 @@ Keep responses concise and practical. Use code blocks for commands."""
fun isConfigured(): Boolean {
val config = loadConfig()
return when (config.providerType) {
- LlmProviderType.OLLAMA -> config.baseUrl.isNotBlank() || config.providerType.defaultBaseUrl.isNotBlank()
- else -> config.apiKey.isNotBlank()
+ LlmProviderType.OLLAMA ->
+ config.baseUrl.isNotBlank() || config.providerType.defaultBaseUrl.isNotBlank()
+
+ LlmProviderType.OPENAI ->
+ config.apiKey.isNotBlank()
+
+ LlmProviderType.OPENAI_COMPATIBLE ->
+ config.baseUrl.isNotBlank() && config.modelId.isNotBlank()
}
}
From 8b3730bf7afd6449a089690162d6b60fd526f45c Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 11 May 2026 09:01:05 +0000
Subject: [PATCH 5/9] fix(ai): address all review feedback - compile errors,
resource leak, localization, API correctness
Agent-Logs-Url: https://github.com/grand151/LinuxCommandLibrary/sessions/e0005cd7-b9db-4fad-91b7-d2a102f34bf4
Co-authored-by: grand151 <86811297+grand151@users.noreply.github.com>
---
.../ai/chat/ChatRepository.kt | 19 ++++++++++++++++++-
.../ai/chat/ChatViewModel.kt | 2 +-
.../linuxcommandlibrary/ai/llm/LlmProvider.kt | 3 +++
.../linuxcommandlibrary/ai/mcp/McpClient.kt | 12 +++++++++---
.../ai/tools/LinuxLibraryToolRegistry.kt | 5 +++--
.../composeResources/values-pl/strings.xml | 2 ++
.../composeResources/values/strings.xml | 2 ++
.../app/ui/screens/aichat/AiChatScreen.kt | 10 +++++++++-
.../app/ui/screens/aichat/AiSettingsScreen.kt | 3 ++-
9 files changed, 49 insertions(+), 9 deletions(-)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
index eefabc211..3401a81e0 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
@@ -144,14 +144,31 @@ Keep responses concise and practical. Use code blocks for commands."""
// Internal helpers
// ──────────────────────────────────────────────
+ /**
+ * Cached provider keyed by the config it was created from.
+ * Re-used on subsequent calls so that each [sendMessage] / [streamMessage]
+ * does not spin up a fresh Ktor [HttpClient] (and associated thread pool).
+ * The old provider is closed whenever the config changes.
+ */
+ @Volatile
+ private var cachedProvider: Pair? = null
+
private fun buildProvider(): LlmProvider {
val config = loadConfig()
- return when (config.providerType) {
+ cachedProvider?.let { (cachedConfig, provider) ->
+ if (cachedConfig == config) return provider
+ closeProvider(provider)
+ }
+ val newProvider: LlmProvider = when (config.providerType) {
LlmProviderType.OLLAMA -> OllamaProvider(config)
else -> OpenAiProvider(config)
}
+ cachedProvider = config to newProvider
+ return newProvider
}
+ private fun closeProvider(provider: LlmProvider) = provider.close()
+
private suspend fun executeToolCall(call: LlmToolCall): String {
val result = runCatching {
toolRegistry.call(call.name, call.arguments)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
index 2ad62e39a..b01e5fa7a 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatViewModel.kt
@@ -98,7 +98,7 @@ class ChatViewModel(
messages = state.messages.map { msg ->
if (msg.id == assistantMessageId) {
msg.copy(
- content = "Error: ${e.message}",
+ content = e.message ?: "",
isStreaming = false,
isError = true,
)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmProvider.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmProvider.kt
index 66750f3c3..3f79dc512 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmProvider.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmProvider.kt
@@ -30,4 +30,7 @@ interface LlmProvider {
* @return [Flow] of incremental text deltas; collect until the flow completes.
*/
fun stream(messages: List, tools: List = emptyList()): Flow
+
+ /** Release underlying HTTP client resources. */
+ fun close()
}
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
index b53039eae..7ee8f4b9b 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/mcp/McpClient.kt
@@ -2,6 +2,7 @@ package com.linuxcommandlibrary.ai.mcp
import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcError
import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcErrorCode
+import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcId
import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcJson
import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcRequest
import com.linuxcommandlibrary.ai.jsonrpc.JsonRpcResponse
@@ -65,11 +66,16 @@ class McpClient(
/**
* Send the `notifications/initialized` notification (no response expected).
+ * Fire-and-forget: many servers return no body for notifications, so we discard
+ * the response entirely.
*/
suspend fun notifyInitialized() {
- postRaw(
- JsonRpcRequest(method = McpMethod.INITIALIZED, params = JsonNull, id = null),
- )
+ runCatching {
+ client.post(endpointUrl) {
+ contentType(ContentType.Application.Json)
+ setBody(JsonRpcRequest(method = McpMethod.INITIALIZED, params = JsonNull, id = null))
+ }
+ }
}
// ──────────────────────────────────────────────
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
index f52b85716..df1c0d0a9 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
@@ -6,8 +6,9 @@ import com.linuxcommandlibrary.ai.mcp.McpContentType
import com.linuxcommandlibrary.ai.mcp.McpException
import com.linuxcommandlibrary.ai.mcp.McpTool
import com.linuxcommandlibrary.ai.mcp.McpToolCallResult
-import com.linuxcommandlibrary.app.data.CommandsRepository
import com.linuxcommandlibrary.app.data.BasicsRepository
+import com.linuxcommandlibrary.app.data.BasicGroupMatch
+import com.linuxcommandlibrary.app.data.CommandsRepository
import kotlinx.serialization.json.JsonObject
import kotlinx.serialization.json.buildJsonObject
import kotlinx.serialization.json.jsonPrimitive
@@ -128,7 +129,7 @@ class LinuxLibraryToolRegistry(
private fun searchBasics(arguments: JsonObject?): McpToolCallResult {
val query = arguments?.get("query")?.jsonPrimitive?.content ?: ""
- val matches = basicsRepository.getBasicGroupsByQuery(query).take(10)
+ val matches = basicsRepository.getMatchingGroups(query).take(10)
val text = if (matches.isEmpty()) {
"No basics sections found for query: '$query'"
} else {
diff --git a/composeApp/src/commonMain/composeResources/values-pl/strings.xml b/composeApp/src/commonMain/composeResources/values-pl/strings.xml
index cf7d199ae..4f4340157 100644
--- a/composeApp/src/commonMain/composeResources/values-pl/strings.xml
+++ b/composeApp/src/commonMain/composeResources/values-pl/strings.xml
@@ -46,5 +46,7 @@
Zadaj pytanie o Linuksa…
Obsługiwani dostawcy:\n• OpenAI — wymaga klucza API (platform.openai.com)\n• Ollama — lokalny serwer LLM, domyślnie http://localhost:11434\n• Kompatybilny z OpenAI — Groq, LM Studio, LocalAI i inne
Dostawca AI nie jest skonfigurowany. Otwórz Ustawienia i dodaj klucz API.
+ Błąd: %s
+ Wystąpił nieznany błąd.
diff --git a/composeApp/src/commonMain/composeResources/values/strings.xml b/composeApp/src/commonMain/composeResources/values/strings.xml
index eedce9634..0cd279bd6 100644
--- a/composeApp/src/commonMain/composeResources/values/strings.xml
+++ b/composeApp/src/commonMain/composeResources/values/strings.xml
@@ -46,5 +46,7 @@
Ask a Linux question…
Supported providers:\n• OpenAI — requires API key (platform.openai.com)\n• Ollama — local LLM server, default http://localhost:11434\n• OpenAI-compatible — Groq, LM Studio, LocalAI and others
AI provider not configured. Please open Settings and add an API key.
+ Error: %s
+ An unknown error occurred.
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
index 3df000d33..2e45d50f8 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiChatScreen.kt
@@ -52,6 +52,8 @@ import com.linuxcommandlibrary.ai.chat.ChatViewModel
import com.linuxcommandlibrary.ai.llm.LlmRole
import com.linuxcommandlibrary.app.resources.Res
import com.linuxcommandlibrary.app.resources.ai_assistant
+import com.linuxcommandlibrary.app.resources.ai_error_format
+import com.linuxcommandlibrary.app.resources.ai_error_unknown
import com.linuxcommandlibrary.app.resources.ai_input_placeholder
import com.linuxcommandlibrary.app.resources.ai_not_configured
import com.linuxcommandlibrary.app.resources.ai_send
@@ -196,8 +198,14 @@ private fun MessageBubble(message: ChatMessage) {
strokeWidth = 2.dp,
)
} else {
+ val displayText = if (message.isError) {
+ val detail = message.content.ifBlank { stringResource(Res.string.ai_error_unknown) }
+ stringResource(Res.string.ai_error_format, detail)
+ } else {
+ message.content
+ }
Text(
- text = message.content,
+ text = displayText,
style = MaterialTheme.typography.bodyMedium,
color = if (message.isError) {
MaterialTheme.colorScheme.error
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
index b56d8741b..ab751e767 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
@@ -16,6 +16,7 @@ import androidx.compose.material3.ExperimentalMaterial3Api
import androidx.compose.material3.ExposedDropdownMenuBox
import androidx.compose.material3.ExposedDropdownMenuDefaults
import androidx.compose.material3.MaterialTheme
+import androidx.compose.material3.MenuAnchorType
import androidx.compose.material3.OutlinedTextField
import androidx.compose.material3.Text
import androidx.compose.runtime.Composable
@@ -90,7 +91,7 @@ fun AiSettingsScreen(onBack: () -> Unit) {
readOnly = true,
label = { Text(stringResource(Res.string.ai_provider_label)) },
trailingIcon = { ExposedDropdownMenuDefaults.TrailingIcon(providerDropdownExpanded) },
- modifier = Modifier.fillMaxWidth().menuAnchor(),
+ modifier = Modifier.fillMaxWidth().menuAnchor(MenuAnchorType.PrimaryNotEditable),
shape = RoundedCornerShape(12.dp),
)
ExposedDropdownMenu(
From bc8c758e84493df81d8ddd8daccda76b4800c282 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 11 May 2026 09:02:38 +0000
Subject: [PATCH 6/9] fix(ai): remove unused import, protect buildProvider with
Mutex for thread safety, make streamMessage suspend
Agent-Logs-Url: https://github.com/grand151/LinuxCommandLibrary/sessions/e0005cd7-b9db-4fad-91b7-d2a102f34bf4
Co-authored-by: grand151 <86811297+grand151@users.noreply.github.com>
---
.../ai/chat/ChatRepository.kt | 18 +++++++++++-------
.../ai/tools/LinuxLibraryToolRegistry.kt | 1 -
2 files changed, 11 insertions(+), 8 deletions(-)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
index 3401a81e0..7a5b27ee5 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
@@ -12,6 +12,8 @@ import com.linuxcommandlibrary.ai.tools.LinuxLibraryToolRegistry
import com.linuxcommandlibrary.shared.platform.PreferencesStorage
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.emptyFlow
+import kotlinx.coroutines.sync.Mutex
+import kotlinx.coroutines.sync.withLock
/**
* Orchestrates conversation between the user and the configured LLM provider.
@@ -90,8 +92,7 @@ Keep responses concise and practical. Use code blocks for commands."""
history: List,
userMessage: String,
): Result = runCatching {
- val provider = buildProvider()
- val tools = toolRegistry.allTools()
+ val provider = buildProvider() val tools = toolRegistry.allTools()
val messages = buildList {
add(LlmMessage(role = LlmRole.SYSTEM, content = SYSTEM_PROMPT))
@@ -127,7 +128,7 @@ Keep responses concise and practical. Use code blocks for commands."""
* Stream the assistant response token by token.
* Note: streaming does not support tool calls in this implementation.
*/
- fun streamMessage(
+ suspend fun streamMessage(
history: List,
userMessage: String,
): Flow {
@@ -149,14 +150,17 @@ Keep responses concise and practical. Use code blocks for commands."""
* Re-used on subsequent calls so that each [sendMessage] / [streamMessage]
* does not spin up a fresh Ktor [HttpClient] (and associated thread pool).
* The old provider is closed whenever the config changes.
+ *
+ * [providerMutex] serialises the check-then-act so that concurrent coroutines
+ * cannot simultaneously observe a stale cache and create duplicate providers.
*/
- @Volatile
+ private val providerMutex = Mutex()
private var cachedProvider: Pair? = null
- private fun buildProvider(): LlmProvider {
+ private suspend fun buildProvider(): LlmProvider = providerMutex.withLock {
val config = loadConfig()
cachedProvider?.let { (cachedConfig, provider) ->
- if (cachedConfig == config) return provider
+ if (cachedConfig == config) return@withLock provider
closeProvider(provider)
}
val newProvider: LlmProvider = when (config.providerType) {
@@ -164,7 +168,7 @@ Keep responses concise and practical. Use code blocks for commands."""
else -> OpenAiProvider(config)
}
cachedProvider = config to newProvider
- return newProvider
+ newProvider
}
private fun closeProvider(provider: LlmProvider) = provider.close()
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
index df1c0d0a9..ffcacae26 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/tools/LinuxLibraryToolRegistry.kt
@@ -7,7 +7,6 @@ import com.linuxcommandlibrary.ai.mcp.McpException
import com.linuxcommandlibrary.ai.mcp.McpTool
import com.linuxcommandlibrary.ai.mcp.McpToolCallResult
import com.linuxcommandlibrary.app.data.BasicsRepository
-import com.linuxcommandlibrary.app.data.BasicGroupMatch
import com.linuxcommandlibrary.app.data.CommandsRepository
import kotlinx.serialization.json.JsonObject
import kotlinx.serialization.json.buildJsonObject
From e6f54de2d85e81ae3e8ac304491afb487824486b Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 11 May 2026 09:07:27 +0000
Subject: [PATCH 7/9] fix(ai): persist tool_calls payload in assistant turn so
OpenAI follow-up requests are valid
Agent-Logs-Url: https://github.com/grand151/LinuxCommandLibrary/sessions/d47d67b2-fe54-41d5-8f9f-8066ab949501
Co-authored-by: grand151 <86811297+grand151@users.noreply.github.com>
---
.../linuxcommandlibrary/ai/chat/ChatRepository.kt | 11 +++++++++--
.../com/linuxcommandlibrary/ai/llm/LlmModels.kt | 7 +++++++
.../linuxcommandlibrary/ai/llm/OpenAiProvider.kt | 14 +++++++++++++-
3 files changed, 29 insertions(+), 3 deletions(-)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
index 7a5b27ee5..38eb572dc 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
@@ -106,8 +106,15 @@ Keep responses concise and practical. Use code blocks for commands."""
if (response.toolCalls.isEmpty()) {
return@runCatching response.content
}
- // Append the assistant tool-call turn
- messages.add(LlmMessage(role = LlmRole.ASSISTANT, content = response.content))
+ // Append the assistant tool-call turn, including the tool_calls payload so
+ // that OpenAI-compatible APIs can match subsequent tool-result messages by ID.
+ messages.add(
+ LlmMessage(
+ role = LlmRole.ASSISTANT,
+ content = response.content,
+ toolCalls = response.toolCalls,
+ ),
+ )
// Execute each tool and append results
response.toolCalls.forEach { call ->
val result = executeToolCall(call)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmModels.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmModels.kt
index 55433dd4c..33e74ab5a 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmModels.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/LlmModels.kt
@@ -16,10 +16,17 @@ enum class LlmRole { SYSTEM, USER, ASSISTANT, TOOL }
/**
* A single turn in a conversation, with an optional tool-call result.
+ *
+ * When [role] is [LlmRole.ASSISTANT] and the model issued tool calls, [toolCalls] holds
+ * those requests so that follow-up requests to OpenAI-compatible APIs can replay the
+ * `tool_calls` payload (required for subsequent [LlmRole.TOOL] result messages to be
+ * accepted).
*/
data class LlmMessage(
val role: LlmRole,
val content: String,
+ /** Tool-call instructions issued by the model (assistant turns only). */
+ val toolCalls: List? = null,
/** Present when this message carries a tool-call result (role == TOOL). */
val toolCallId: String? = null,
val toolName: String? = null,
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
index a1ab24cd4..0d4d9f5f7 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
@@ -111,11 +111,23 @@ class OpenAiProvider(
) = OpenAiChatRequest(
model = modelId,
messages = messages.map { msg ->
+ val rawToolCalls = msg.toolCalls?.map { tc ->
+ OpenAiToolCall(
+ id = tc.id,
+ function = OpenAiFunctionCall(
+ name = tc.name,
+ // JsonObject.toString() produces valid compact JSON
+ arguments = tc.arguments.toString(),
+ ),
+ )
+ }
OpenAiMessage(
role = msg.role.name.lowercase(),
- content = msg.content,
+ // OpenAI requires content to be null (not "") when tool_calls are present
+ content = if (rawToolCalls != null) msg.content.ifBlank { null } else msg.content,
toolCallId = msg.toolCallId,
name = msg.toolName,
+ toolCalls = rawToolCalls,
)
},
tools = tools.takeIf { it.isNotEmpty() }?.map { tool ->
From 1f159318d0145210f0e05ad32a0906ea164804e7 Mon Sep 17 00:00:00 2001
From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com>
Date: Mon, 11 May 2026 09:16:47 +0000
Subject: [PATCH 8/9] fix(ai): omit name field for TOOL messages; fix
formatting in sendMessage
Agent-Logs-Url: https://github.com/grand151/LinuxCommandLibrary/sessions/573167e9-98fd-4112-8a35-a8e6bffc5043
Co-authored-by: grand151 <86811297+grand151@users.noreply.github.com>
---
.../kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt | 3 ++-
.../kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt | 3 ++-
2 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
index 38eb572dc..fba5a1e35 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/chat/ChatRepository.kt
@@ -92,7 +92,8 @@ Keep responses concise and practical. Use code blocks for commands."""
history: List,
userMessage: String,
): Result = runCatching {
- val provider = buildProvider() val tools = toolRegistry.allTools()
+ val provider = buildProvider()
+ val tools = toolRegistry.allTools()
val messages = buildList {
add(LlmMessage(role = LlmRole.SYSTEM, content = SYSTEM_PROMPT))
diff --git a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
index 0d4d9f5f7..8b9d4aaca 100644
--- a/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
+++ b/ai/src/commonMain/kotlin/com/linuxcommandlibrary/ai/llm/OpenAiProvider.kt
@@ -126,7 +126,8 @@ class OpenAiProvider(
// OpenAI requires content to be null (not "") when tool_calls are present
content = if (rawToolCalls != null) msg.content.ifBlank { null } else msg.content,
toolCallId = msg.toolCallId,
- name = msg.toolName,
+ // `name` is not a valid field for tool-role messages in the OpenAI API
+ name = if (msg.role == LlmRole.TOOL) null else msg.toolName,
toolCalls = rawToolCalls,
)
},
From 72021ae4b7a14209927ae1bba1340ce5af0a6fa0 Mon Sep 17 00:00:00 2001
From: TTK <86811297+grand151@users.noreply.github.com>
Date: Mon, 11 May 2026 16:00:52 +0200
Subject: [PATCH 9/9] Update
composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
Co-authored-by: devin-ai-integration[bot] <158243242+devin-ai-integration[bot]@users.noreply.github.com>
---
.../app/ui/screens/aichat/AiSettingsScreen.kt | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
index ab751e767..054ef5aae 100644
--- a/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
+++ b/composeApp/src/commonMain/kotlin/com/linuxcommandlibrary/app/ui/screens/aichat/AiSettingsScreen.kt
@@ -103,8 +103,8 @@ fun AiSettingsScreen(onBack: () -> Unit) {
text = { Text(type.displayName) },
onClick = {
selectedProvider = type
- if (baseUrl.isBlank()) baseUrl = type.defaultBaseUrl
- if (modelId.isBlank()) modelId = type.defaultModel
+ baseUrl = type.defaultBaseUrl
+ modelId = type.defaultModel
providerDropdownExpanded = false
},
)