├── setup.cfg ├── linux ├── bootstrap.sh ├── org.fuentelibre.gtk_llm_Chat.png ├── org.fuentelibre.gtk_llm_Chat.appdata.xml ├── appimagetool-wrap.sh └── org.fuentelibre.gtk_llm_Chat.flatpak.yml ├── .github ├── FUNDING.yml ├── dependabot.yml └── workflows │ ├── build-all.yml │ ├── build-python.yml │ ├── build-windows.yml │ ├── release.yml │ ├── build-macos.yml │ └── build-linux.yml ├── docs └── screenshot01.png ├── hooks ├── hook-llm_groq.py ├── hook-llm_gemini.py ├── hook-llm_grok.py ├── hook-llm_deepseek.py ├── hook-llm_anthropic.py ├── hook-llm_openrouter.py ├── hook-llm_perplexity.py ├── hook-llm.py └── rthook_numpy_python313.py ├── po ├── de │ └── LC_MESSAGES │ │ ├── gtk-llm-chat.mo │ │ └── gtk-llm-chat.po ├── es │ └── LC_MESSAGES │ │ └── gtk-llm-chat.mo ├── pt │ └── LC_MESSAGES │ │ ├── gtk-llm-chat.mo │ │ └── gtk-llm-chat.po ├── zh │ └── LC_MESSAGES │ │ ├── gtk-llm-chat.mo │ │ └── gtk-llm-chat.po └── gtk-llm-chat.pot ├── gtk_llm_chat ├── hicolor │ ├── icon-theme.cache │ ├── 256x256 │ │ └── apps │ │ │ └── org.fuentelibre.gtk_llm_Chat.png │ ├── 48x48 │ │ └── apps │ │ │ └── org.fuentelibre.gtk_llm_Chat-symbolic.png │ ├── scalable │ │ ├── devices │ │ │ ├── padlock2-symbolic.svg │ │ │ ├── padlock2-open-symbolic.svg │ │ │ ├── open-book-symbolic.svg │ │ │ ├── brain-symbolic.svg │ │ │ └── brain-augmented-symbolic.svg │ │ └── actions │ │ │ └── checkmark-symbolic.svg │ ├── index.theme │ └── symbolic │ │ └── apps │ │ └── org.fuentelibre.gtk_llm_Chat-symbolic.svg ├── debug_utils.py ├── __init__.py ├── single_instance.py ├── llm_gui.py ├── widgets.py ├── main.py ├── python313_compatibility.py ├── resource_manager.py └── markdownview.py ├── macos ├── org.fuentelibre.gtk_llm_Chat.icns └── bootstrap.sh ├── windows ├── org.fuentelibre.gtk_llm_Chat.ico ├── org.fuentelibre.gtk_llm_Chat.png ├── bootstrap.sh └── build.nsi ├── .gitmodules ├── .gitignore ├── MANIFEST.in ├── desktop ├── org.fuentelibre.gtk_llm_Chat.desktop └── org.fuentelibre.gtk_llm_Applet.desktop ├── requirements.txt ├── compile_po.sh ├── add_language.sh ├── update_po.sh ├── .env.ci ├── pyproject.toml ├── debug_icons.sh ├── spec.md ├── debug_theme.sh ├── numpy_python313_patch.py ├── README.md └── todo.md /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | version = attr: gtk_llm_chat._version.get_version 3 | -------------------------------------------------------------------------------- /linux/bootstrap.sh: -------------------------------------------------------------------------------- 1 | echo VERSION=\"$(git describe --tags --exact-match)\" >> .env.ci 2 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: icarito 2 | patreon: icarito 3 | buy_me_a_coffee: icarito 4 | -------------------------------------------------------------------------------- /docs/screenshot01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/docs/screenshot01.png -------------------------------------------------------------------------------- /hooks/hook-llm_groq.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import copy_metadata 2 | 3 | datas = copy_metadata('llm-groq') -------------------------------------------------------------------------------- /hooks/hook-llm_gemini.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import copy_metadata 2 | 3 | datas = copy_metadata('llm-gemini') 4 | -------------------------------------------------------------------------------- /hooks/hook-llm_grok.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import copy_metadata 2 | 3 | datas = copy_metadata('llm-grok') 4 | -------------------------------------------------------------------------------- /po/de/LC_MESSAGES/gtk-llm-chat.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/po/de/LC_MESSAGES/gtk-llm-chat.mo -------------------------------------------------------------------------------- /po/es/LC_MESSAGES/gtk-llm-chat.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/po/es/LC_MESSAGES/gtk-llm-chat.mo -------------------------------------------------------------------------------- /po/pt/LC_MESSAGES/gtk-llm-chat.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/po/pt/LC_MESSAGES/gtk-llm-chat.mo -------------------------------------------------------------------------------- /po/zh/LC_MESSAGES/gtk-llm-chat.mo: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/po/zh/LC_MESSAGES/gtk-llm-chat.mo -------------------------------------------------------------------------------- /hooks/hook-llm_deepseek.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import copy_metadata 2 | 3 | datas = copy_metadata('llm-deepseek') 4 | -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/icon-theme.cache: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/gtk_llm_chat/hicolor/icon-theme.cache -------------------------------------------------------------------------------- /hooks/hook-llm_anthropic.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import copy_metadata 2 | 3 | datas = copy_metadata('llm-anthropic') 4 | -------------------------------------------------------------------------------- /hooks/hook-llm_openrouter.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import copy_metadata 2 | 3 | datas = copy_metadata('llm-openrouter') 4 | -------------------------------------------------------------------------------- /hooks/hook-llm_perplexity.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import copy_metadata 2 | 3 | datas = copy_metadata('llm-perplexity') 4 | -------------------------------------------------------------------------------- /linux/org.fuentelibre.gtk_llm_Chat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/linux/org.fuentelibre.gtk_llm_Chat.png -------------------------------------------------------------------------------- /macos/org.fuentelibre.gtk_llm_Chat.icns: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/macos/org.fuentelibre.gtk_llm_Chat.icns -------------------------------------------------------------------------------- /windows/org.fuentelibre.gtk_llm_Chat.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/windows/org.fuentelibre.gtk_llm_Chat.ico -------------------------------------------------------------------------------- /windows/org.fuentelibre.gtk_llm_Chat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/windows/org.fuentelibre.gtk_llm_Chat.png -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/256x256/apps/org.fuentelibre.gtk_llm_Chat.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/gtk_llm_chat/hicolor/256x256/apps/org.fuentelibre.gtk_llm_Chat.png -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/48x48/apps/org.fuentelibre.gtk_llm_Chat-symbolic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icarito/gtk-llm-chat/HEAD/gtk_llm_chat/hicolor/48x48/apps/org.fuentelibre.gtk_llm_Chat-symbolic.png -------------------------------------------------------------------------------- /macos/bootstrap.sh: -------------------------------------------------------------------------------- 1 | brew install pygobject3 gtk4 adwaita-icon-theme libadwaita 2 | python3 -m pip install --no-binary :all: --force-reinstall Pillow -C harfbuzz=disable -C freetype=disable --no-cache-dir 3 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "linux/shared-modules"] 2 | path = linux/shared-modules 3 | url = https://github.com/flathub/shared-modules.git 4 | [submodule "linux/pystray"] 5 | path = linux/pystray 6 | url = git@github.com:icarito/pystray.git 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | venv/ 2 | # Byte-compiled files 3 | __pycache__/ 4 | *.py[cod] 5 | *$py.class 6 | *.so 7 | gtk_llm_chat/_version.py 8 | 9 | # Distribution / packaging 10 | dist/ 11 | build/ 12 | *.egg-info/ 13 | plans/ 14 | po/**/*~ 15 | .env 16 | tests 17 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include README.md 3 | include requirements.txt 4 | recursive-include gtk_llm_chat *.py 5 | recursive-include po *.mo 6 | include desktop/*.desktop 7 | global-exclude *.pyc 8 | global-exclude *.pyo 9 | global-exclude __pycache__ 10 | -------------------------------------------------------------------------------- /gtk_llm_chat/debug_utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | debug_utils.py - helpers puros sin dependencias de GTK ni gi 3 | """ 4 | import os 5 | 6 | DEBUG = os.environ.get('DEBUG') or False 7 | 8 | def debug_print(*args, **kwargs): 9 | if DEBUG: 10 | print(*args, **kwargs) 11 | -------------------------------------------------------------------------------- /desktop/org.fuentelibre.gtk_llm_Chat.desktop: -------------------------------------------------------------------------------- 1 | [Desktop Entry] 2 | Name=GTK LLM Chat 3 | Comment=Una interfaz gráfica GTK para chatear con modelos de lenguaje (LLMs) 4 | Exec=gtk-llm-chat 5 | Icon=org.fuentelibre.gtk_llm_Chat 6 | Terminal=false 7 | Type=Application 8 | Categories=GNOME;GTK;Utility; 9 | StartupWMClass=org.fuentelibre.gtk_llm_Chat 10 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | # Maintain dependencies for GitHub Actions 4 | - package-ecosystem: "github-actions" 5 | directory: "/" 6 | schedule: 7 | interval: "daily" 8 | # Maintain dependencies for pip 9 | - package-ecosystem: "pip" 10 | directory: "/" 11 | schedule: 12 | interval: "daily" 13 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pygobject 2 | markdown-it-py 3 | python-ulid 4 | llm 5 | llm-gemini 6 | llm-groq 7 | llm-grok 8 | llm-deepseek 9 | llm-perplexity 10 | llm-anthropic 11 | llm-openrouter 12 | watchdog 13 | pillow 14 | pyxdg; sys_platform == "linux" 15 | pystray-freedesktop>=0.19.6a1; sys_platform == "linux" 16 | pystray; sys_platform == "darwin" 17 | pystray; sys_platform == "win32" 18 | -------------------------------------------------------------------------------- /compile_po.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Compile .po files to .mo files 4 | for lang in po/*; do 5 | if [ -d "$lang" ]; then 6 | if [ -f "$lang/LC_MESSAGES/gtk-llm-chat.po" ]; then 7 | msgfmt "$lang/LC_MESSAGES/gtk-llm-chat.po" -o "$lang/LC_MESSAGES/gtk-llm-chat.mo" 8 | echo $lang 9 | fi 10 | fi 11 | done 12 | 13 | echo "Compiled .po files to .mo files." -------------------------------------------------------------------------------- /hooks/hook-llm.py: -------------------------------------------------------------------------------- 1 | from PyInstaller.utils.hooks import collect_entry_point, collect_submodules 2 | from PyInstaller.utils.hooks import copy_metadata 3 | 4 | datas, hiddenimports = collect_entry_point('llm.register_models') 5 | datas += copy_metadata('llm') 6 | 7 | # Recoger explícitamente todos los submódulos de default_plugins 8 | hiddenimports += collect_submodules('llm.default_plugins') 9 | 10 | -------------------------------------------------------------------------------- /desktop/org.fuentelibre.gtk_llm_Applet.desktop: -------------------------------------------------------------------------------- 1 | [Desktop Entry] 2 | Name=GTK LLM Applet 3 | Comment=Un applet GTK para chatear con modelos de lenguaje (LLMs) 4 | Exec=gtk-llm-chat --applet 5 | Icon=org.fuentelibre.gtk_llm_Chat 6 | Terminal=false 7 | Type=Application 8 | Categories=GNOME;GTK;Utility; 9 | StartupWMClass=org.fuentelibre.gtk_llm_Chat 10 | X-GNOME-Autostart-enabled=true 11 | NoDisplay=true 12 | -------------------------------------------------------------------------------- /add_language.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [ -z "$1" ]; then 4 | echo "Usage: add_language.sh " 5 | exit 1 6 | fi 7 | 8 | lang=$1 9 | 10 | if [ -d "po/$lang" ]; then 11 | echo "Language '$lang' already exists." 12 | exit 1 13 | fi 14 | 15 | mkdir -p "po/$lang/LC_MESSAGES" 16 | cp "po/gtk-llm-chat.pot" "po/$lang/LC_MESSAGES/gtk-llm-chat.po" 17 | 18 | echo "Language '$lang' added." -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/scalable/devices/padlock2-symbolic.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /windows/bootstrap.sh: -------------------------------------------------------------------------------- 1 | pacman -S --noconfirm mingw-w64-$(uname -m)-gtk4 mingw-w64-$(uname -m)-python-pip mingw-w64-$(uname -m)-python3-gobject mingw-w64-$(uname -m)-libadwaita mingw-w64-$(uname -m)-rust git zlib zlib-devel mingw-w64-x86_64-python3-pillow 2 | # Usar git describe para versioning, con fallback a commit hash si no hay tag 3 | VERSION=$(git describe --tags --exact-match 2>/dev/null || git describe --tags --always || echo "dev-$(git rev-parse --short HEAD)") 4 | echo VERSION=\"$VERSION\" >> .env.ci 5 | -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/scalable/devices/padlock2-open-symbolic.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /gtk_llm_chat/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Módulo de inicialización del paquete gtk_llm_chat. 3 | Aplica parches de compatibilidad necesarios antes de importar cualquier otra cosa. 4 | """ 5 | 6 | import sys 7 | 8 | # Aplicar parches de compatibilidad Python 3.13 inmediatamente 9 | if sys.version_info >= (3, 13) and getattr(sys, 'frozen', False): 10 | try: 11 | from . import python313_compatibility 12 | python313_compatibility.apply_all_patches() 13 | except Exception as e: 14 | print(f"Warning: Could not apply Python 3.13 compatibility patches: {e}") -------------------------------------------------------------------------------- /update_po.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Update .pot file 4 | xgettext --package-name=gtk-llm-chat --package-version=0.1 --copyright-holder="Your Name" --msgid-bugs-address="your@email.com" \ 5 | --directory=. $(find gtk_llm_chat -name "*.py") -o po/gtk-llm-chat.pot 6 | 7 | # Update .po files for each language 8 | for lang in po/*; do 9 | if [ -d "$lang" ]; then 10 | if [ -f "$lang/LC_MESSAGES/gtk-llm-chat.po" ]; then 11 | msgmerge --update "$lang/LC_MESSAGES/gtk-llm-chat.po" po/gtk-llm-chat.pot 12 | fi 13 | fi 14 | done 15 | 16 | echo "Updated .pot and .po files." -------------------------------------------------------------------------------- /.github/workflows/build-all.yml: -------------------------------------------------------------------------------- 1 | # Main build workflow for Gtk LLM Chat 2 | name: Build All Platforms 3 | 4 | on: 5 | push: 6 | branches: [ main, develop ] 7 | pull_request: 8 | branches: [ main, develop ] 9 | workflow_dispatch: 10 | 11 | jobs: 12 | build-linux: 13 | name: Build Linux 14 | uses: ./.github/workflows/build-linux.yml 15 | 16 | build-windows: 17 | name: Build Windows 18 | uses: ./.github/workflows/build-windows.yml 19 | 20 | build-macos: 21 | name: Build macOS 22 | uses: ./.github/workflows/build-macos.yml 23 | 24 | build-python: 25 | name: Build Python Package 26 | uses: ./.github/workflows/build-python.yml 27 | -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/index.theme: -------------------------------------------------------------------------------- 1 | [Icon Theme] 2 | Name=GTK LLM Chat Icons 3 | Comment=Custom icons for GTK LLM Chat application 4 | Inherits=hicolor 5 | Directories=48x48/apps,256x256/apps,symbolic/apps,scalable/devices,scalable/actions 6 | 7 | [48x48/apps] 8 | Size=48 9 | Context=Applications 10 | Type=Fixed 11 | 12 | [256x256/apps] 13 | Size=256 14 | Context=Applications 15 | Type=Fixed 16 | 17 | [symbolic/apps] 18 | Size=16 19 | Context=Applications 20 | Type=Scalable 21 | MinSize=16 22 | MaxSize=512 23 | 24 | [scalable/devices] 25 | Size=16 26 | Context=Devices 27 | Type=Scalable 28 | MinSize=16 29 | MaxSize=512 30 | 31 | [scalable/actions] 32 | Size=16 33 | Context=Actions 34 | Type=Scalable 35 | MinSize=16 36 | MaxSize=512 37 | -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/scalable/actions/checkmark-symbolic.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /.github/workflows/build-python.yml: -------------------------------------------------------------------------------- 1 | # Python package build workflow for Gtk LLM Chat 2 | name: Build Python Package 3 | 4 | on: 5 | push: 6 | branches: [ main, develop ] 7 | pull_request: 8 | branches: [ main, develop ] 9 | workflow_dispatch: 10 | workflow_call: 11 | 12 | jobs: 13 | build-python: 14 | name: Build Python Package 15 | runs-on: ubuntu-latest 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 0 21 | 22 | - name: Set up Python 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: '3.x' 26 | 27 | - name: Install build dependencies 28 | run: | 29 | python3 -m pip install --upgrade pip 30 | python3 -m pip install build setuptools-scm 31 | 32 | - name: Build Python package 33 | run: | 34 | python3 -m build 35 | 36 | - name: Store Python dist 37 | uses: actions/upload-artifact@v4 38 | with: 39 | name: python-dist 40 | path: dist/ 41 | -------------------------------------------------------------------------------- /.env.ci: -------------------------------------------------------------------------------- 1 | # Universal arguments 2 | FILENAME=gtk-llm-chat 3 | APPNAME="Gtk LLM Chat" 4 | ID=org.fuentelibre.gtk_llm_Chat 5 | AUTHOR="Sebastian Silva" 6 | DESCRIPTION="Gtk LLM Chat" 7 | LICENSE=LICENSE 8 | REQUIREMENTS=requirements.txt 9 | 10 | # Windows-specific arguments 11 | WINDOWS_ICON=windows/org.fuentelibre.gtk_llm_Chat.ico 12 | NSIS=windows/build.nsi 13 | 14 | # macOS-specific arguments 15 | MACOS_ICON=macos/org.fuentelibre.gtk_llm_Chat.icns 16 | 17 | # Linux-specific arguments 18 | LINUX_ICON=linux/org.fuentelibre.gtk_llm_Chat.png 19 | # APPDATA=linux/org.fuentelibre.gtk_llm_Chat.appdata.xml 20 | LINUX_APPIMAGETOOL=linux/appimagetool-wrap.sh 21 | 22 | # Linux desktop file arguments 23 | Type=Application 24 | Version= 25 | Name="${APPNAME}" 26 | GenericName= 27 | NoDisplay= 28 | Comment="${DESCRIPTION}" 29 | Icon="${ID}" 30 | Hidden= 31 | OnlyShowIn= 32 | DBusActivatable= 33 | TryExec= 34 | Exec="${FILENAME}" 35 | Path= 36 | Terminal=false 37 | Actions= 38 | MimeType= 39 | Categories="Utility" 40 | Implements= 41 | Keywords= 42 | StartupNotify= 43 | StartupWMClass="${ID}" 44 | URL= 45 | PrefersNonDefaultGPU= 46 | SingleMainWindow= 47 | -------------------------------------------------------------------------------- /.github/workflows/build-windows.yml: -------------------------------------------------------------------------------- 1 | # Windows build workflow for Gtk LLM Chat 2 | name: Build Windows 3 | 4 | on: 5 | push: 6 | branches: [ main, develop ] 7 | pull_request: 8 | branches: [ main, develop ] 9 | workflow_dispatch: 10 | workflow_call: 11 | 12 | jobs: 13 | build-windows: 14 | name: Build for Windows 15 | runs-on: windows-2022 16 | defaults: 17 | run: 18 | shell: msys2 {0} 19 | steps: 20 | - name: Checkout 21 | uses: actions/checkout@v4 22 | with: 23 | fetch-depth: 0 24 | 25 | - name: Set up MSYS2 26 | uses: msys2/setup-msys2@v2 27 | with: 28 | update: true 29 | 30 | - name: Install dependencies 31 | run: | 32 | ./windows/bootstrap.sh 33 | 34 | - name: Set up environment 35 | run: | 36 | cp .env.ci .env 37 | 38 | - name: Build package 39 | run: | 40 | python3 build-ci.py 41 | 42 | - name: Store the distribution packages 43 | uses: actions/upload-artifact@v4 44 | with: 45 | name: windows-dist 46 | path: dist/ 47 | -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/scalable/devices/open-book-symbolic.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /linux/org.fuentelibre.gtk_llm_Chat.appdata.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | org.fuentelibre.gtk_llm_Chat 4 | CC0-1.0 5 | GPL-3.0-or-later 6 | GTK LLM Chat 7 | A GTK frontend for chatting with Large Language Models 8 | 9 |

10 | GTK LLM Chat is a modern, user-friendly desktop application that provides 11 | a graphical interface for interacting with various Large Language Models (LLMs). 12 |

13 |

14 | Features include: 15 |

16 |
    17 |
  • Support for multiple LLM providers
  • 18 |
  • Conversation history management
  • 19 |
  • System tray integration
  • 20 |
  • Modern GTK4/Libadwaita interface
  • 21 |
  • Local conversation storage
  • 22 |
23 |
24 | 25 | Sebastian Silva 26 | 27 | https://github.com/fuentelibre/gtk-llm-chat 28 | https://github.com/fuentelibre/gtk-llm-chat/issues 29 | https://github.com/fuentelibre/gtk-llm-chat 30 | 31 | gtk-llm-chat 32 | 33 | org.fuentelibre.gtk_llm_Chat.desktop 34 | 35 | Utility 36 | 37 | 38 |
39 | -------------------------------------------------------------------------------- /gtk_llm_chat/single_instance.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import atexit 4 | import platform 5 | 6 | if os.name == 'nt': 7 | import msvcrt 8 | else: 9 | import fcntl 10 | 11 | class SingleInstance: 12 | def __init__(self, lockfile): 13 | self.lockfile = os.path.abspath(lockfile) 14 | self.fp = None 15 | 16 | try: 17 | self.fp = open(self.lockfile, 'w+') 18 | 19 | if os.name == 'nt': 20 | # Windows: intenta bloquear el archivo 21 | try: 22 | msvcrt.locking(self.fp.fileno(), msvcrt.LK_NBLCK, 1) 23 | except OSError: 24 | raise RuntimeError("Another instance is already running.") 25 | else: 26 | # Unix: intenta obtener un bloqueo exclusivo 27 | try: 28 | fcntl.flock(self.fp.fileno(), fcntl.LOCK_EX | fcntl.LOCK_NB) 29 | except OSError: 30 | raise RuntimeError("Another instance is already running.") 31 | 32 | # Guarda el PID en el archivo para fines informativos 33 | self.fp.write(str(os.getpid())) 34 | self.fp.flush() 35 | 36 | # Registra cleanup 37 | atexit.register(self.cleanup) 38 | 39 | except Exception: 40 | if self.fp: 41 | self.fp.close() 42 | raise 43 | 44 | def cleanup(self): 45 | try: 46 | if self.fp: 47 | self.fp.close() 48 | if os.path.exists(self.lockfile): 49 | os.remove(self.lockfile) 50 | except Exception: 51 | pass # Ignorar errores al salir 52 | 53 | 54 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "gtk-llm-chat" 3 | description = "A GTK graphical interface for chatting with large language models (LLMs)" 4 | readme = "README.md" 5 | authors = [{ name = "Sebastian Silva", email = "sebastian@fuentelibre.org" }] 6 | license = "GPL-3.0-or-later" 7 | classifiers = [ 8 | "Programming Language :: Python :: 3", 9 | "Operating System :: OS Independent", 10 | ] 11 | keywords = ["gtk", "chat", "llm", "ai", "openai", "gemini", "groq", 12 | "grok", "deepseek", "perplexity", "anthropic", "openrouter"] 13 | dependencies = [ 14 | 'pygobject', 15 | 'llm', 16 | 'llm-gemini', 17 | 'llm-groq', 18 | 'llm-grok', 19 | 'llm-deepseek', 20 | 'llm-perplexity', 21 | 'llm-anthropic', 22 | 'llm-openrouter', 23 | 'markdown-it-py', 24 | 'python-ulid', 25 | 'pystray-freedesktop>=0.19.6a1; sys_platform == "linux"', 26 | 'watchdog', 27 | 'pyxdg; sys_platform == "linux"', 28 | 'pystray; sys_platform == "darwin"', 29 | 'pystray; sys_platform == "win32"', 30 | ] 31 | 32 | requires-python = ">=3.8" 33 | dynamic = ["version"] 34 | 35 | [project.urls] 36 | Homepage = "https://gtk-llm-chat.fuentelibre.org/" 37 | Issues = "https://github.com/icarito/gtk_llm_chat/issues" 38 | Repository = "https://github.com/icarito/gtk_llm_chat.git" 39 | 40 | [build-system] 41 | requires = ["build", "setuptools>=61.0", "setuptools-scm"] 42 | build-backend = "setuptools.build_meta" 43 | 44 | [tool.setuptools_scm] 45 | version_file = "gtk_llm_chat/_version.py" 46 | tag_regex = "^v(?P[0-9]+\\.[0-9]+\\.[0-9]+(?:dev[0-9]*|alpha[0-9]*|beta[0-9]*)?)$" 47 | local_scheme = "no-local-version" 48 | version_scheme = "python-simplified-semver" 49 | 50 | [tool.flake8] 51 | extend-ignore = ["E402"] 52 | max-line-length = 95 53 | 54 | [tool.setuptools.packages.find] 55 | include = ["gtk_llm_chat"] 56 | 57 | [tool.setuptools.package-data] 58 | "gtk_llm_chat" = [ 59 | "../po/*/*/gtk-llm-chat.mo", 60 | "../desktop/*.desktop", 61 | ] 62 | 63 | [project.scripts] 64 | gtk-llm-chat = "gtk_llm_chat.main:main" 65 | 66 | [project.entry-points."llm"] 67 | gui = "gtk_llm_chat.llm_gui" 68 | -------------------------------------------------------------------------------- /debug_icons.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Script para depurar problemas de iconos en el Flatpak 3 | 4 | # Colores para output 5 | RED='\033[0;31m' 6 | GREEN='\033[0;32m' 7 | YELLOW='\033[1;33m' 8 | BLUE='\033[0;34m' 9 | NC='\033[0m' # No Color 10 | 11 | echo -e "${BLUE}=== Herramienta de diagnóstico de iconos para GTK LLM Chat ===${NC}" 12 | 13 | # Verificar si estamos dentro de un Flatpak 14 | if [[ -f "/.flatpak-info" ]]; then 15 | echo -e "${GREEN}✓ Ejecutando dentro de un Flatpak${NC}" 16 | FLATPAK_ID=$(grep "app-id=" "/.flatpak-info" | cut -d= -f2) 17 | echo -e "${GREEN} ID de aplicación: $FLATPAK_ID${NC}" 18 | else 19 | echo -e "${YELLOW}⚠ No está ejecutando dentro de un Flatpak${NC}" 20 | fi 21 | 22 | # Verificar variables de entorno GTK 23 | echo -e "\n${BLUE}Variables de entorno GTK:${NC}" 24 | echo -e "${GREEN}GTK_THEME=${GTK_THEME}${NC}" 25 | echo -e "${GREEN}GTK_THEME_VARIANT=${GTK_THEME_VARIANT}${NC}" 26 | echo -e "${GREEN}ICON_THEME=${ICON_THEME}${NC}" 27 | echo -e "${GREEN}LLM_USER_PATH=${LLM_USER_PATH}${NC}" 28 | 29 | # Verificar directorios de iconos 30 | echo -e "\n${BLUE}Verificando directorios de iconos:${NC}" 31 | icon_dirs=( 32 | "/app/share/icons/hicolor/symbolic/apps" 33 | "/app/share/icons/hicolor/scalable/apps" 34 | "/app/share/icons/hicolor/48x48/apps" 35 | "/app/gtk_llm_chat/hicolor/symbolic/apps" 36 | "/app/gtk_llm_chat/hicolor/scalable/apps" 37 | "/app/gtk_llm_chat/hicolor/48x48/apps" 38 | ) 39 | 40 | for dir in "${icon_dirs[@]}"; do 41 | if [[ -d "$dir" ]]; then 42 | echo -e "${GREEN}✓ $dir${NC}" 43 | ls -l "$dir" | grep "org.fuentelibre" | while read -r line; do 44 | echo " $line" 45 | done 46 | else 47 | echo -e "${RED}✗ $dir (no existe)${NC}" 48 | fi 49 | done 50 | 51 | # Verificar si el icono simbólico es válido 52 | echo -e "\n${BLUE}Verificando validez del icono simbólico:${NC}" 53 | icon_symbolic="/app/share/icons/hicolor/symbolic/apps/org.fuentelibre.gtk_llm_Chat-symbolic.svg" 54 | if [[ -f "$icon_symbolic" ]]; then 55 | echo -e "${GREEN}✓ El icono simbólico existe${NC}" 56 | if grep -q "currentColor" "$icon_symbolic"; then 57 | echo -e "${GREEN}✓ El icono contiene 'currentColor' (SVG simbólico correcto)${NC}" 58 | else 59 | echo -e "${YELLOW}⚠ El icono NO contiene 'currentColor' (podría no ser un SVG simbólico adecuado)${NC}" 60 | fi 61 | else 62 | echo -e "${RED}✗ No se encontró icono simbólico en $icon_symbolic${NC}" 63 | fi 64 | -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/scalable/devices/brain-symbolic.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/scalable/devices/brain-augmented-symbolic.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | -------------------------------------------------------------------------------- /linux/appimagetool-wrap.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # appimagetool-wrap.sh 4 | # Envuelto para limpiar libssl interno y generar AppRun antes de invocar al real appimagetool 5 | 6 | # 1) rutas 7 | APPDIR="/home/runner/work/gtk-llm-chat/gtk-llm-chat/dist" 8 | REAL_TOOL="/home/runner/work/gtk-llm-chat/gtk-llm-chat/venv/lib/python3.10/site-packages/pydeployment/linux/appimagetool/appimagetool-x86_64.AppImage" # o donde esté instalado tu appimagetool 9 | 10 | # 2) eliminar OpenSSL interno de PyInstaller 11 | rm -f "$APPDIR"/_internal/libssl.so* "$APPDIR"/_internal/libcrypto.so* 12 | 13 | # 3) escribir AppRun que prioriza libs del sistema 14 | cat > "$APPDIR"/AppRun << 'EOF' 15 | #!/bin/bash 16 | 17 | # Obtener la ruta absoluta al directorio donde se está ejecutando AppRun 18 | # Esto es crucial para que las rutas internas funcionen correctamente 19 | APPDIR=$(dirname "$(readlink -f "$0")") 20 | 21 | # Configurar LD_LIBRARY_PATH para que encuentre las bibliotecas .so empaquetadas 22 | # (Es probable que ya tengas una línea similar, PyInstaller a menudo la gestiona) 23 | export LD_LIBRARY_PATH="${APPDIR}/_internal:${LD_LIBRARY_PATH}" # Ajusta si tus .so están en otro subdirectorio de _internal 24 | 25 | # >>> AÑADE O MODIFICA ESTA LÍNEA CRUCIAL <<< 26 | # Configurar GI_TYPELIB_PATH para apuntar a los archivos .typelib empaquetados 27 | export GI_TYPELIB_PATH="${APPDIR}/_internal/gi_typelibs:${GI_TYPELIB_PATH}" 28 | 29 | # Otras variables de entorno importantes que podrías necesitar: 30 | # Para que encuentre esquemas GSettings, .desktop files, iconos, etc. 31 | # Tu .desktop está en APPDIR/usr/share/applications/ 32 | # Tus iconos de app están en APPDIR/_internal/gtk_llm_chat/hicolor/ 33 | export XDG_DATA_DIRS="${APPDIR}/usr/share:${APPDIR}/_internal/share:${APPDIR}/_internal/gtk_llm_chat:${XDG_DATA_DIRS}" 34 | 35 | # Si empaquetas esquemas GSettings (recomendado, como en la versión de Ubuntu 24): 36 | # export GSETTINGS_SCHEMA_DIR="${APPDIR}/_internal/share/glib-2.0/schemas" 37 | 38 | # Para traducciones (si están en APPDIR/_internal/po y tu dominio es "gtk-llm-chat") 39 | export TEXTDOMAINDIR="${APPDIR}/_internal/po" 40 | export TEXTDOMAIN="gtk-llm-chat" # Reemplaza con tu text domain real 41 | 42 | # Ejecutar el binario principal de tu aplicación (el que creó PyInstaller) 43 | # Asegúrate de que el nombre y la ruta del ejecutable sean correctos. 44 | # Si PyInstaller creó "chat_application" y está en _internal: 45 | # ... (definición de APPDIR y exportaciones de LD_LIBRARY_PATH) ... 46 | 47 | export GI_TYPELIB_PATH="${APPDIR}/_internal/gi_typelibs:${GI_TYPELIB_PATH}" 48 | export XDG_DATA_DIRS="${APPDIR}/usr/share:${APPDIR}/_internal/share:${APPDIR}/_internal/gtk_llm_chat:${XDG_DATA_DIRS}" 49 | # ... (otras exportaciones) ... 50 | 51 | # --- Debugging lines --- 52 | echo "--- AppRun Debug ---" >&2 53 | echo "APPDIR is: ${APPDIR}" >&2 54 | echo "GI_TYPELIB_PATH is: ${GI_TYPELIB_PATH}" >&2 55 | echo "Contents of GI_TYPELIB_PATH target:" >&2 56 | ls -l "${APPDIR}/_internal/gi_typelibs/" >&2 57 | echo "Python path:" >&2 58 | "${APPDIR}/gtk-llm-chat" -c "import sys; print(sys.path)" >&2 # Asumiendo que gtk-llm-chat es el ejecutable de Python 59 | echo "--- End AppRun Debug ---" >&2 60 | # --- End Debugging lines --- 61 | 62 | exec "${APPDIR}/gtk-llm-chat" "$@" 63 | EOF 64 | chmod +x "$APPDIR"/AppRun 65 | 66 | # 4) invoca al appimagetool “real” 67 | exec "$REAL_TOOL" "$@" 68 | 69 | -------------------------------------------------------------------------------- /spec.md: -------------------------------------------------------------------------------- 1 | # Gtk Frontend for llm 2 | 3 | ## 1. Overview 4 | A native GUI frontend for the `python-llm` CLI tool enabling multi-conversation management through independent GTK4 windows. Supports real-time streaming, conversation persistence, and parameter customization while adhering to GNOME HIG. 5 | 6 | ## Core Features 7 | 8 | ### 1. Multi-Window Chat Interface 9 | - **Instance Model**: 10 | - Each application instance maintains exactly one window 11 | - No main/parent window required - single window handles all functionality 12 | - Multiple instances can run concurrently for parallel conversations 13 | 14 | - **Independent Conversations**: 15 | - Each window manages its own conversation state 16 | - Supports concurrent execution of multiple instances 17 | - Complete isolation of configurations (model, system prompt, CID) 18 | 19 | - **Conversation Display**: 20 | - Scrollable history area with distinct message styling: 21 | - User messages (right-aligned, light blue background) 22 | - LLM responses (left-aligned, light gray background) 23 | - Error messages (red exclamation mark + distinct styling) 24 | - Minimal metadata display (User/Assistant labels only) 25 | 26 | ### 2. Input Management 27 | 28 | - **Adaptive Text Input**: 29 | - Multi-line `Gtk.TextView` with dynamic height adjustment 30 | - Submit message: Enter key 31 | - New line: Shift+Enter 32 | - Auto-clear after submission 33 | 34 | - **Parameter Handling**: 35 | - Direct passthrough of CLI arguments to `llm` subprocess: 36 | - `--cid`: Continue specific conversation 37 | - `-s`: System prompt 38 | - `-m`: Model selection 39 | - `-c`: Continue most recent conversation 40 | - New conversation created when no CID provided 41 | 42 | ### 3. Subprocess Integration 43 | - **Asynchronous Execution**: 44 | - Dedicated subprocess per window using `asyncio` 45 | - Real-time stdout/stderr capture 46 | - Non-blocking UI during LLM processing 47 | 48 | - **Error Handling**: 49 | - Startup errors (invalid CID) → Terminal logging 50 | - Conversation errors → In-window display with visual indicators 51 | - Critical failures → Graceful degradation with user notification 52 | 53 | ### 4. Design & Compliance 54 | - **GNOME HIG Adherence**: 55 | - Libadwaita integration for modern styling 56 | - Consistent spacing/margins (12px default) 57 | - Accessible widget labeling 58 | - System-compliant dark/light mode support 59 | 60 | - **Visual Hierarchy**: 61 | - Clear separation between history/input areas 62 | - Progressive disclosure of advanced controls 63 | - Status indicators for active processing 64 | 65 | ## Technical Implementation 66 | 67 | ### Architecture 68 | 69 | 1. **Window Manager**: 70 | - Handles multi-instance lifecycle 71 | - Enforces conversation isolation 72 | - Manages cross-window dependencies 73 | 74 | 2. **Subprocess Controller**: 75 | - Async wrapper for `llm` executable 76 | - Stream parsing with regex pattern matching 77 | - Output buffering for partial response display 78 | 79 | 3. **UI Components**: 80 | - Custom message widgets with CSS styling 81 | - Auto-scroll management 82 | - Input sanitization pipeline 83 | 84 | ### Dependency Management 85 | 86 | - **Core Stack**: 87 | - Python 3.10+ 88 | - PyGObject (GTK4/Libadwaita) 89 | - `python-llm` package 90 | 91 | - **Patterns**: 92 | - MVC separation for UI/business logic 93 | - Observer pattern for stream updates 94 | - Factory pattern for message widgets 95 | 96 | -------------------------------------------------------------------------------- /debug_theme.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Script para diagnosticar problemas de tema en Flatpak 3 | 4 | # Colores para output 5 | RED='\033[0;31m' 6 | GREEN='\033[0;32m' 7 | YELLOW='\033[1;33m' 8 | BLUE='\033[0;34m' 9 | NC='\033[0m' # No Color 10 | 11 | echo -e "${BLUE}=== Diagnóstico de Temas para GTK LLM Chat (Flatpak) ===${NC}" 12 | 13 | # Verificar si estamos dentro de un Flatpak 14 | if [[ -f "/.flatpak-info" ]]; then 15 | echo -e "${GREEN}✓ Ejecutando dentro de un Flatpak${NC}" 16 | FLATPAK_ID=$(grep "app-id=" "/.flatpak-info" | cut -d= -f2) 17 | echo -e "${GREEN} ID de aplicación: $FLATPAK_ID${NC}" 18 | else 19 | echo -e "${YELLOW}⚠ No está ejecutando dentro de un Flatpak${NC}" 20 | echo -e "Execute este script con:" 21 | echo -e " flatpak run --command=sh org.fuentelibre.gtk_llm_Chat -c \"/app/bin/debug_theme.sh\"" 22 | exit 1 23 | fi 24 | 25 | # Verificar variables de entorno GTK 26 | echo -e "\n${BLUE}Variables de entorno GTK:${NC}" 27 | echo -e "${GREEN}GTK_THEME=${GTK_THEME}${NC}" 28 | echo -e "${GREEN}GTK_USE_PORTAL=${GTK_USE_PORTAL}${NC}" 29 | echo -e "${GREEN}ADW_DISABLE_PORTAL=${ADW_DISABLE_PORTAL}${NC}" 30 | echo -e "${GREEN}ICON_THEME=${ICON_THEME}${NC}" 31 | echo -e "${GREEN}LLM_USER_PATH=${LLM_USER_PATH}${NC}" 32 | 33 | # Verificar si libadwaita está instalada 34 | echo -e "\n${BLUE}Verificando libadwaita:${NC}" 35 | if ldconfig -p 2>/dev/null | grep -q "libadwaita"; then 36 | echo -e "${GREEN}✓ libadwaita instalada${NC}" 37 | ldconfig -p | grep "libadwaita" | while read -r line; do 38 | echo " $line" 39 | done 40 | elif [ -f "/app/lib/libadwaita-1.so" ]; then 41 | echo -e "${GREEN}✓ libadwaita encontrada en /app/lib${NC}" 42 | else 43 | echo -e "${RED}✗ No se encontró libadwaita${NC}" 44 | fi 45 | 46 | # Verificar archivos de configuración GTK 47 | echo -e "\n${BLUE}Archivos de configuración GTK:${NC}" 48 | gtk_config_files=( 49 | "/app/etc/gtk-3.0/settings.ini" 50 | "/app/etc/gtk-4.0/settings.ini" 51 | "/app/share/gtk-3.0/settings.ini" 52 | "/app/share/gtk-4.0/settings.ini" 53 | "$HOME/.config/gtk-3.0/settings.ini" 54 | "$HOME/.config/gtk-4.0/settings.ini" 55 | ) 56 | 57 | for file in "${gtk_config_files[@]}"; do 58 | if [[ -f "$file" ]]; then 59 | echo -e "${GREEN}✓ $file${NC}" 60 | echo " Contenido:" 61 | cat "$file" | while read -r line; do 62 | echo " $line" 63 | done 64 | else 65 | echo -e "${YELLOW}⚠ $file (no existe)${NC}" 66 | fi 67 | done 68 | 69 | # Verificar temas instalados 70 | echo -e "\n${BLUE}Temas GTK instalados:${NC}" 71 | theme_dirs=( 72 | "/app/share/themes" 73 | "/usr/share/themes" 74 | "$HOME/.themes" 75 | ) 76 | 77 | for dir in "${theme_dirs[@]}"; do 78 | if [[ -d "$dir" ]]; then 79 | echo -e "${GREEN}✓ $dir${NC}" 80 | ls -la "$dir" | grep -v "^total" | while read -r line; do 81 | echo " $line" 82 | done 83 | else 84 | echo -e "${YELLOW}⚠ $dir (no accesible)${NC}" 85 | fi 86 | done 87 | 88 | echo -e "\n${BLUE}Consejos para solucionar problemas de tema:${NC}" 89 | echo "1. Instale el tema Adwaita con: flatpak install org.gtk.Gtk3theme.Adwaita" 90 | echo "2. Instale el tema Adwaita-dark con: flatpak install org.gtk.Gtk3theme.Adwaita-dark" 91 | echo "3. Use --env=GTK_THEME=Adwaita:dark al ejecutar la aplicación" 92 | echo "4. Asegúrese de tener acceso a los directorios de temas" 93 | echo "5. Para libadwaita, asegúrese de que GTK_USE_PORTAL=1 y ADW_DISABLE_PORTAL=0" 94 | -------------------------------------------------------------------------------- /gtk_llm_chat/llm_gui.py: -------------------------------------------------------------------------------- 1 | import llm 2 | import click 3 | import time 4 | import sys 5 | 6 | 7 | @llm.hookimpl 8 | def register_commands(cli): 9 | 10 | @cli.command(name="gtk-applet") 11 | def run_applet(): 12 | """Runs the system tray applet without the main window""" 13 | # Lanzamos solo el applet usando nuestro nuevo sistema unificado 14 | from .platform_utils import launch_tray_applet 15 | launch_tray_applet({}) 16 | 17 | @cli.command(name="gtk-chat") 18 | @click.option("--cid", type=str, 19 | help='ID de la conversación a continuar') 20 | @click.option('-s', '--system', type=str, help='Prompt del sistema') 21 | @click.option('-m', '--model', type=str, help='Modelo a utilizar') 22 | @click.option( 23 | "-c", 24 | "--continue-last", 25 | is_flag=True, 26 | help="Continuar la última conversación.", 27 | ) 28 | @click.option('-t', '--template', type=str, 29 | help='Template a utilizar') 30 | @click.option( 31 | "-p", 32 | "--param", 33 | multiple=True, 34 | type=(str, str), 35 | metavar='KEY VALUE', 36 | help="Parámetros para el template", 37 | ) 38 | @click.option( 39 | "-o", 40 | "--option", 41 | multiple=True, 42 | type=(str, str), 43 | metavar='KEY VALUE', 44 | help="Opciones para el modelo", 45 | ) 46 | @click.option( 47 | "-f", 48 | "--fragment", 49 | multiple=True, 50 | type=str, 51 | metavar='FRAGMENT', 52 | help="Fragmento (alias, URL, hash o ruta de archivo) para agregar al prompt", 53 | ) 54 | @click.option( 55 | "--benchmark-startup", 56 | is_flag=True, 57 | help="Mide el tiempo hasta que la ventana se muestra y sale.", 58 | ) 59 | @click.option( 60 | "--applet", 61 | is_flag=True, 62 | help="Iniciar como applet en bandeja del sistema sin ventana principal", 63 | ) 64 | def run_gui(cid, system, model, continue_last, template, param, option, fragment, benchmark_startup, applet): 65 | """Runs a GUI for the chatbot""" 66 | # Record start time if benchmarking 67 | start_time = time.time() if benchmark_startup else None 68 | 69 | # Creamos la configuración en un diccionario 70 | config = { 71 | 'cid': cid, 72 | 'system': system, 73 | 'model': model, 74 | 'continue_last': continue_last, 75 | 'template': template, 76 | 'params': param, 77 | 'options': option, 78 | 'fragments': fragment, 79 | 'benchmark_startup': benchmark_startup, 80 | 'start_time': start_time, 81 | 'applet': applet 82 | } 83 | 84 | # Si solo se quiere el applet, lo lanzamos directamente 85 | if applet and not cid and not continue_last: 86 | from .platform_utils import launch_tray_applet 87 | launch_tray_applet(config) 88 | # El applet se lanza en otro proceso, así que tenemos que mantener vivo este 89 | import time 90 | while True: 91 | time.sleep(1) 92 | 93 | # De lo contrario, iniciamos la aplicación completa 94 | from .chat_application import LLMChatApplication 95 | app = LLMChatApplication(config) 96 | 97 | # Transformar la configuración en argumentos de línea de comandos 98 | cmd_args = [] 99 | if config.get('cid'): 100 | cmd_args.append(f"--cid={config['cid']}") 101 | if config.get('model'): 102 | cmd_args.append(f"--model={config['model']}") 103 | if config.get('template'): 104 | cmd_args.append(f"--template={config['template']}") 105 | if config.get('applet'): 106 | cmd_args.append(f"--applet") 107 | 108 | if cmd_args: 109 | return app.run(cmd_args) 110 | else: 111 | return app.run() 112 | -------------------------------------------------------------------------------- /gtk_llm_chat/widgets.py: -------------------------------------------------------------------------------- 1 | import gi 2 | import os 3 | import sys 4 | gi.require_version('Gtk', '4.0') 5 | gi.require_version('Adw', '1') 6 | from gi.repository import Gtk 7 | from datetime import datetime 8 | 9 | sys.path.append(os.path.dirname(os.path.abspath(__file__))) 10 | from .resource_manager import resource_manager 11 | 12 | class Message: 13 | """ 14 | Representa un mensaje 15 | """ 16 | 17 | def __init__(self, content, sender="user", timestamp=None): 18 | self.content = content 19 | self.sender = sender 20 | self.timestamp = timestamp or datetime.now() 21 | 22 | 23 | class ErrorWidget(Gtk.Box): 24 | """Widget para mostrar mensajes de error""" 25 | 26 | def __init__(self, message): 27 | super().__init__(orientation=Gtk.Orientation.HORIZONTAL, spacing=6) 28 | 29 | self.add_css_class('error-message') 30 | self.set_margin_start(6) 31 | self.set_margin_end(6) 32 | self.set_margin_top(3) 33 | self.set_margin_bottom(3) 34 | 35 | # Icono de advertencia 36 | icon = resource_manager.create_icon_widget("dialog-warning-symbolic") 37 | icon.add_css_class('error-icon') 38 | self.append(icon) 39 | 40 | # Contenedor del mensaje 41 | message_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=3) 42 | message_box.add_css_class('error-content') 43 | 44 | # Texto del error 45 | label = Gtk.Label(label=message) 46 | label.set_wrap(True) 47 | label.set_xalign(0) 48 | message_box.append(label) 49 | 50 | self.append(message_box) 51 | 52 | 53 | class MessageWidget(Gtk.Box): 54 | """Widget para mostrar un mensaje individual""" 55 | 56 | def __init__(self, message): 57 | super().__init__(orientation=Gtk.Orientation.VERTICAL, spacing=3) 58 | 59 | # Import MarkdownView here 60 | from .markdownview import MarkdownView 61 | 62 | # Configurar el estilo según el remitente 63 | is_user = message.sender == "user" 64 | self.add_css_class('message') 65 | self.add_css_class('user-message' if is_user else 'assistant-message') 66 | 67 | # Crear un contenedor con margen para centrar el contenido 68 | margin_box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL) 69 | margin_box.set_hexpand(True) 70 | margin_box.set_size_request(180, -1) # Ancho mínimo para evitar colapsos 71 | 72 | # Crear el contenedor del mensaje 73 | message_box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=3) 74 | message_box.add_css_class('message-content') 75 | message_box.set_hexpand(True) 76 | message_box.set_size_request(180, -1) 77 | 78 | # Agregar espaciadores flexibles a los lados 79 | if is_user: 80 | margin_box.append(Gtk.Box(hexpand=True)) # Espaciador izquierdo 81 | margin_box.append(message_box) 82 | # Espaciador derecho pequeño 83 | margin_box.append(Gtk.Box(hexpand=False)) 84 | else: 85 | # Espaciador izquierdo pequeño 86 | margin_box.append(Gtk.Box(hexpand=False)) 87 | margin_box.append(message_box) 88 | margin_box.append(Gtk.Box(hexpand=True)) # Espaciador derecho 89 | 90 | # Quitar el prefijo "user:" si existe 91 | content = message.content 92 | if is_user and content.startswith("user:"): 93 | content = content[5:].strip() 94 | 95 | # Usar MarkdownView para el contenido 96 | self.content_view = MarkdownView() 97 | self.content_view.set_hexpand(True) 98 | self.content_view.set_size_request(167, -1) # El warning pedía al menos 167 99 | self.content_view.set_markdown(content) 100 | message_box.append(self.content_view) 101 | 102 | # Agregar timestamp 103 | time_label = Gtk.Label( 104 | label=message.timestamp.strftime("%H:%M"), 105 | css_classes=['timestamp'] 106 | ) 107 | time_label.set_halign(Gtk.Align.END) 108 | time_label.set_size_request(60, -1) 109 | message_box.append(time_label) 110 | 111 | self.append(margin_box) 112 | 113 | def update_content(self, new_content): 114 | """Actualiza el contenido del mensaje""" 115 | self.content_view.set_markdown(new_content) 116 | -------------------------------------------------------------------------------- /numpy_python313_patch.py: -------------------------------------------------------------------------------- 1 | """ 2 | Monkey patch para solucionar problemas de compatibilidad de NumPy con Python 3.13 3 | en entornos congelados con PyInstaller. 4 | 5 | El error "argument docstring of add_docstring should be a str" se produce cuando 6 | NumPy intenta agregar docstrings pero recibe objetos no-string en Python 3.13. 7 | """ 8 | 9 | import sys 10 | import warnings 11 | 12 | 13 | def patch_numpy_add_docstring(): 14 | """ 15 | Aplica un monkey patch a la función add_docstring de NumPy para manejar 16 | argumentos no-string en Python 3.13. 17 | """ 18 | try: 19 | # Solo aplicar el patch si estamos en Python 3.13+ 20 | if sys.version_info < (3, 13): 21 | return 22 | 23 | # Intentar importar numpy 24 | import numpy 25 | 26 | # Verificar si numpy._core.overrides existe 27 | if hasattr(numpy, '_core') and hasattr(numpy._core, 'overrides'): 28 | overrides_module = numpy._core.overrides 29 | else: 30 | # Fallback para versiones más antiguas 31 | try: 32 | import numpy.core.overrides as overrides_module 33 | except ImportError: 34 | return 35 | 36 | # Verificar si add_docstring existe 37 | if not hasattr(overrides_module, 'add_docstring'): 38 | return 39 | 40 | # Guardar la función original 41 | original_add_docstring = overrides_module.add_docstring 42 | 43 | def patched_add_docstring(func, docstring): 44 | """ 45 | Versión patcheada de add_docstring que maneja argumentos no-string. 46 | """ 47 | try: 48 | # Asegurar que docstring sea un string 49 | if docstring is None: 50 | docstring = "" 51 | elif not isinstance(docstring, str): 52 | # Intentar convertir a string de forma segura 53 | try: 54 | docstring = str(docstring) if docstring else "" 55 | except: 56 | docstring = "" 57 | 58 | # Llamar a la función original con el docstring convertido 59 | return original_add_docstring(func, docstring) 60 | 61 | except Exception as e: 62 | # Si todo falla, emitir una advertencia pero no crashear 63 | warnings.warn( 64 | f"add_docstring patch failed for {func}: {e}", 65 | RuntimeWarning, 66 | stacklevel=2 67 | ) 68 | # Intentar asignar el docstring directamente como fallback 69 | try: 70 | if hasattr(func, '__doc__'): 71 | func.__doc__ = docstring 72 | except: 73 | pass 74 | return func 75 | 76 | # Aplicar el patch 77 | overrides_module.add_docstring = patched_add_docstring 78 | 79 | # print("OK Numpy add_docstring patch aplicado correctamente para Python 3.13") 80 | 81 | except Exception as e: 82 | # Si hay cualquier error en el patching, solo emitir una advertencia 83 | warnings.warn( 84 | f"No se pudo aplicar el patch de NumPy para Python 3.13: {e}", 85 | RuntimeWarning 86 | ) 87 | 88 | 89 | def apply_llm_compatibility_patches(): 90 | """ 91 | Aplica patches de compatibilidad para todos los plugins LLM en Python 3.13. 92 | """ 93 | # Aplicar el patch principal de NumPy 94 | patch_numpy_add_docstring() 95 | 96 | # Lista de plugins LLM que pueden necesitar el patch 97 | llm_plugins = [ 98 | 'llm', 99 | 'llm_groq', 100 | 'llm_gemini', 101 | 'llm_openrouter', 102 | 'llm_perplexity', 103 | 'llm_anthropic', 104 | 'llm_deepseek', 105 | 'llm_grok' 106 | ] 107 | 108 | patched_count = 0 109 | 110 | for plugin in llm_plugins: 111 | try: 112 | # Intentar importar el plugin para forzar la aplicación del patch 113 | __import__(plugin) 114 | patched_count += 1 115 | except Exception as e: 116 | if "add_docstring" in str(e): 117 | print(f"⚠ Plugin {plugin} aún tiene problemas de add_docstring: {e}") 118 | else: 119 | print(f"ⓘ Plugin {plugin} no pudo ser importado (probablemente no instalado): {e}") 120 | 121 | #print(f"OK Patches de compatibilidad aplicados a {patched_count} plugins LLM") 122 | 123 | 124 | if __name__ == "__main__": 125 | #print("Aplicando patches de compatibilidad NumPy/Python 3.13...") 126 | apply_llm_compatibility_patches() 127 | #print("Patches aplicados.") 128 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # Release workflow for Gtk LLM Chat 2 | name: Release 3 | 4 | on: 5 | push: 6 | tags: 7 | - 'v*' 8 | 9 | jobs: 10 | build-linux: 11 | name: Build Linux 12 | uses: ./.github/workflows/build-linux.yml 13 | 14 | build-windows: 15 | name: Build Windows 16 | uses: ./.github/workflows/build-windows.yml 17 | 18 | build-macos: 19 | name: Build macOS 20 | uses: ./.github/workflows/build-macos.yml 21 | 22 | build-python: 23 | name: Build Python Package 24 | uses: ./.github/workflows/build-python.yml 25 | 26 | publish-pypi-release: 27 | name: Publish to PyPI (Release) 28 | needs: build-python 29 | runs-on: ubuntu-latest 30 | # Solo versiones sin sufijos (v1.2.3) 31 | if: >- 32 | ${{ startsWith(github.ref_name, 'v') && 33 | !contains(github.ref_name, 'dev') && 34 | !contains(github.ref_name, 'alpha') && 35 | !contains(github.ref_name, 'beta') && 36 | !contains(github.ref_name, 'test') && 37 | !contains(github.ref_name, 'rc') }} 38 | permissions: 39 | id-token: write 40 | steps: 41 | - name: Download Python dist 42 | uses: actions/download-artifact@v4 43 | with: 44 | name: python-dist 45 | path: dist/ 46 | 47 | - name: Publish package to PyPI (Release) 48 | uses: pypa/gh-action-pypi-publish@release/v1 49 | 50 | publish-pypi-prerelease: 51 | name: Publish to PyPI (Prerelease) 52 | needs: build-python 53 | runs-on: ubuntu-latest 54 | # Solo versiones con sufijos (v1.2.3dev, v1.2.3alpha1, v1.2.3beta1, v1.2.3rc1) 55 | if: >- 56 | ${{ startsWith(github.ref_name, 'v') && 57 | (contains(github.ref_name, 'dev') || 58 | contains(github.ref_name, 'alpha') || 59 | contains(github.ref_name, 'beta') || 60 | contains(github.ref_name, 'rc')) }} 61 | permissions: 62 | id-token: write 63 | steps: 64 | - name: Download Python dist 65 | uses: actions/download-artifact@v4 66 | with: 67 | name: python-dist 68 | path: dist/ 69 | 70 | - name: Publish package to PyPI (Prerelease) 71 | uses: pypa/gh-action-pypi-publish@release/v1 72 | with: 73 | repository-url: https://test.pypi.org/legacy/ 74 | 75 | github-release: 76 | name: Create GitHub Release 77 | needs: [build-linux, build-windows, build-macos, build-python] 78 | runs-on: ubuntu-latest 79 | permissions: 80 | contents: write 81 | id-token: write 82 | steps: 83 | - name: Download Linux dist 84 | uses: actions/download-artifact@v4 85 | with: 86 | name: linux-dist 87 | path: dist/ 88 | 89 | - name: Download Windows dist 90 | uses: actions/download-artifact@v4 91 | with: 92 | name: windows-dist 93 | path: dist/ 94 | 95 | - name: Download macOS Intel dist 96 | uses: actions/download-artifact@v4 97 | with: 98 | name: macos-intel-dist 99 | path: dist/ 100 | 101 | - name: Download macOS ARM dist 102 | uses: actions/download-artifact@v4 103 | with: 104 | name: macos-arm-dist 105 | path: dist/ 106 | 107 | - name: Download Python dist 108 | uses: actions/download-artifact@v4 109 | with: 110 | name: python-dist 111 | path: dist/ 112 | 113 | - name: Download Flatpak bundle 114 | uses: actions/download-artifact@v4 115 | with: 116 | name: flatpak-bundle 117 | path: dist/ 118 | 119 | - name: Create GitHub Pre-release 120 | if: >- 121 | ${{ contains(github.ref_name, 'alpha') || 122 | contains(github.ref_name, 'beta') || 123 | contains(github.ref_name, 'dev') || 124 | contains(github.ref_name, 'rc') }} 125 | env: 126 | GITHUB_TOKEN: ${{ github.token }} 127 | run: >- 128 | gh release create 129 | '${{ github.ref_name }}' 130 | --repo '${{ github.repository }}' 131 | --generate-notes --prerelease 132 | 133 | - name: Create GitHub Release 134 | if: >- 135 | ${{ !(contains(github.ref_name, 'alpha') || 136 | contains(github.ref_name, 'beta') || 137 | contains(github.ref_name, 'dev') || 138 | contains(github.ref_name, 'test') || 139 | contains(github.ref_name, 'rc')) }} 140 | env: 141 | GITHUB_TOKEN: ${{ github.token }} 142 | run: >- 143 | gh release create 144 | '${{ github.ref_name }}' 145 | --repo '${{ github.repository }}' 146 | --generate-notes 147 | 148 | - name: Upload dists to GitHub Release 149 | env: 150 | GITHUB_TOKEN: ${{ github.token }} 151 | run: >- 152 | gh release upload 153 | '${{ github.ref_name }}' dist/** 154 | --repo '${{ github.repository }}' -------------------------------------------------------------------------------- /gtk_llm_chat/hicolor/symbolic/apps/org.fuentelibre.gtk_llm_Chat-symbolic.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | -------------------------------------------------------------------------------- /.github/workflows/build-macos.yml: -------------------------------------------------------------------------------- 1 | # macOS build workflow for Gtk LLM Chat 2 | name: Build macOS 3 | 4 | on: 5 | push: 6 | branches: [ main, develop ] 7 | pull_request: 8 | branches: [ main, develop ] 9 | workflow_dispatch: 10 | workflow_call: 11 | 12 | jobs: 13 | build-macos-intel: 14 | name: Build for macOS (Intel) 15 | runs-on: macos-13 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 0 21 | 22 | - name: Install dependencies 23 | run: | 24 | ./macos/bootstrap.sh 25 | python3 -m pip install pycairo 26 | python3 -m pip install PyGObject 27 | 28 | - name: Set up environment 29 | env: 30 | MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} 31 | MACOS_CERTIFICATE_PWD: ${{ secrets.MACOS_CERTIFICATE_PWD }} 32 | MACOS_CERTIFICATE_NAME: ${{ secrets.MACOS_CERTIFICATE_NAME }} 33 | MACOS_CI_KEYCHAIN_PWD: ${{ secrets.MACOS_CI_KEYCHAIN_PWD }} 34 | MACOS_NOTARIZATION_APPLE_ID: ${{ secrets.MACOS_NOTARIZATION_APPLE_ID }} 35 | MACOS_NOTARIZATION_TEAM_ID: ${{ secrets.MACOS_NOTARIZATION_TEAM_ID }} 36 | MACOS_NOTARIZATION_PWD: ${{ secrets.MACOS_NOTARIZATION_PWD }} 37 | run: | 38 | cp .env.ci .env 39 | # NOTE: Comment out or remove the following commands to disable code signing and notarization 40 | # # Decode certificate 41 | # echo $MACOS_CERTIFICATE | base64 --decode > certificate.p12 42 | # # Create keychain 43 | # security create-keychain -p "$MACOS_CI_KEYCHAIN_PWD" build.keychain 44 | # security default-keychain -s build.keychain 45 | # security unlock-keychain -p "$MACOS_CI_KEYCHAIN_PWD" build.keychain 46 | # security import certificate.p12 -k build.keychain -P "$MACOS_CERTIFICATE_PWD" -T /usr/bin/codesign 47 | # security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k "$MACOS_CI_KEYCHAIN_PWD" build.keychain 48 | # # Create keychain profile 49 | # xcrun notarytool store-credentials "notarytool-profile" --apple-id "$MACOS_NOTARIZATION_APPLE_ID" --team-id "$MACOS_NOTARIZATION_TEAM_ID" --password "$MACOS_NOTARIZATION_PWD" 50 | # # Store info in environment file 51 | # echo 'CERT="'$MACOS_CERTIFICATE_NAME'"' >> .env 52 | # echo 'KEYC=notarytool-profile' >> .env 53 | 54 | - name: Build package 55 | run: | 56 | python3 build-ci.py 57 | 58 | - name: Store the distribution packages 59 | uses: actions/upload-artifact@v4 60 | with: 61 | name: macos-intel-dist 62 | path: dist/ 63 | 64 | build-macos-arm: 65 | name: Build for macOS (ARM) 66 | runs-on: macos-14 67 | steps: 68 | - name: Checkout 69 | uses: actions/checkout@v4 70 | with: 71 | fetch-depth: 0 72 | 73 | - name: Install dependencies 74 | run: | 75 | sudo python3 -m pip config --global set global.break-system-packages true 76 | python3 -m pip install pycairo PyGObject 77 | sh ./macos/bootstrap.sh 78 | 79 | - name: Set up environment 80 | env: 81 | MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }} 82 | MACOS_CERTIFICATE_PWD: ${{ secrets.MACOS_CERTIFICATE_PWD }} 83 | MACOS_CERTIFICATE_NAME: ${{ secrets.MACOS_CERTIFICATE_NAME }} 84 | MACOS_CI_KEYCHAIN_PWD: ${{ secrets.MACOS_CI_KEYCHAIN_PWD }} 85 | MACOS_NOTARIZATION_APPLE_ID: ${{ secrets.MACOS_NOTARIZATION_APPLE_ID }} 86 | MACOS_NOTARIZATION_TEAM_ID: ${{ secrets.MACOS_NOTARIZATION_TEAM_ID }} 87 | MACOS_NOTARIZATION_PWD: ${{ secrets.MACOS_NOTARIZATION_PWD }} 88 | run: | 89 | cp .env.ci .env 90 | # NOTE: Comment out or remove the following commands to disable code signing and notarization 91 | # # Decode certificate 92 | # echo $MACOS_CERTIFICATE | base64 --decode > certificate.p12 93 | # # Create keychain 94 | # security create-keychain -p "$MACOS_CI_KEYCHAIN_PWD" build.keychain 95 | # security default-keychain -s build.keychain 96 | # security unlock-keychain -p "$MACOS_CI_KEYCHAIN_PWD" build.keychain 97 | # security import certificate.p12 -k build.keychain -P "$MACOS_CERTIFICATE_PWD" -T /usr/bin/codesign 98 | # security set-key-partition-list -S apple-tool:,apple:,codesign: -s -k "$MACOS_CI_KEYCHAIN_PWD" build.keychain 99 | # # Create keychain profile 100 | # xcrun notarytool store-credentials "notarytool-profile" --apple-id "$MACOS_NOTARIZATION_APPLE_ID" --team-id "$MACOS_NOTARIZATION_TEAM_ID" --password "$MACOS_NOTARIZATION_PWD" 101 | # # Store info in environment file 102 | # echo 'CERT="'$MACOS_CERTIFICATE_NAME'"' >> .env 103 | # echo 'KEYC=notarytool-profile' >> .env 104 | 105 | - name: Build package 106 | run: | 107 | python3 build-ci.py 108 | 109 | - name: Store the distribution packages 110 | uses: actions/upload-artifact@v4 111 | with: 112 | name: macos-arm-dist 113 | path: dist/ 114 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GTK LLM Chat 2 | 3 | A GTK graphical interface for chatting with Large Language Models (LLMs). 4 | 5 | ![screenshot](./docs/screenshot01.png) 6 | 7 | 8 | ## Key Features 9 | 10 | - Simple and easy-to-use graphical interface built with GTK 11 | - Support for multiple conversations in independent windows 12 | - Integration with python-llm for chatting with various LLM models 13 | - Modern interface using libadwaita 14 | - Support for real-time streaming responses 15 | - Message history with automatic scrolling 16 | - Windows installer, Linux AppImage, and Macos bundles available! 17 | - Markdown rendering of the responses 18 | 19 | - **Sidebar Navigation:** Modern sidebar for model/provider selection, parameters, and settings. 20 | - **Model Parameters:** Adjust temperature and system prompt per conversation. 21 | - **API Key Management:** Banner with symbolic icons for setting/changing API keys per provider. 22 | - **Keyboard Shortcuts:** 23 | - `F10`: Toggle sidebar 24 | - `F2`: Rename conversation 25 | - `Escape`: Minimize window 26 | - `Enter`: Send message 27 | - `Shift+Enter`: New line in input 28 | - `Ctrl+W`: Delete the current conversation 29 | - `Ctrl+M`: Open model selector 30 | - `Ctrl+S`: Edit system prompt 31 | - `Ctrl+N`: New conversation window 32 | - **Conversation Management:** Rename and delete conversations. 33 | - **Tray Applet:** Use a system tray applet for quick access to recent conversations. 34 | - **Error Handling:** Clear error messages displayed in the chat. 35 | - **Dynamic Input:** The input area dynamically adjusts its height. 36 | 37 | **Gtk-LLM-Chat** is a graphical frontend for the command-line llm utility. Just as `llm` integrates large language models into the [command line interface](https://llm.datasette.io/en/stable/usage.html), Gtk-LLM-Chat aims to bring that same power to the desktop environment. Its goal is to provide intuitive affordances and seamless integration for using LLMs in everyday tasks — all while remaining convenient, lightweight, and transparent in its behavior. 38 | 39 | ## Installation 40 | 41 | ### Downloadable application bundles 42 | 43 | While the command line is fun in every operating system, **Gtk-LLM-Chat** also offers prepackaged binary application bundles for all three major operating sytems: Windows installers, Linux Appimages and Macos Application Bundles are available in our [_releases_](https://github.com/icarito/gtk-llm-chat/releases) section. 44 | 45 | An effort has been made to support desktop integration across systems but _your mileage may vary_ - as the Gtk tools are still maturing outside of the GNU/Linux ecosystem. 46 | 47 | ### As an `llm` plugin 48 | 49 | Playing with LLMs in the command line is fun! I recommend you to install `llm` and play around with it to size up the possibilities. Gtk-LLM-Chat can be installed as a plugin extension for `llm` itself, thus extending the possibilities of `llm` with some graphical features. Not all features of `llm` are exposed yet. 50 | 51 | ``` 52 | pipx install llm # required by gtk-llm-chat 53 | llm install gtk-llm-chat 54 | ``` 55 | 56 | You may want to copy the provided .desktop files to your ~/.local/share/applications/ folder. A welcome assistant will do this in the future for you. 57 | 58 | 59 | ### System Requirements 60 | 61 | - [llm](https://llm.datasette.io/en/stable/) (when installing as an llm plugin) 62 | - Python 3.8 or higher 63 | - GTK 4.0 64 | - libadwaita 65 | - libayatana-appindicator (on linux) 66 | 67 | These dependency installation instructions are collected here for reference only: 68 | 69 | ``` 70 | # fedora: # sudo dnf install cairo-devel object-introspection-devel gtk4-devel pkgconf-pkg-config gcc redhat-rpm-config 71 | # debian: # sudo apt install libgtk-4-1 python3-gi python3-gi-cairo libadwaita-1-0 libayatana-appindicator3 72 | # arch: # sudo pacman -S python-gobject gtk4 73 | # windows (msys2): # pacman -S mingw-w64-$(uname -m)-gtk4 mingw-w64-$(uname -m)-python-pip mingw-w64-$(uname -m)-python3-gobject mingw-w64-$(uname -m)-libadwaita mingw-w64-x86_64-python3-pillow 74 | # macos (homebrew): # brew install pygobject3 gtk4 adwaita-icon-theme libadwaita 75 | ``` 76 | 77 | ## Usage 78 | 79 | ### Running the Application 80 | 81 | To start the applet (system tray mode): 82 | ``` 83 | llm gtk-applet 84 | ``` 85 | 86 | To start a single chat window: 87 | ``` 88 | llm gtk-chat 89 | ``` 90 | 91 | #### Optional arguments: 92 | ``` 93 | llm gtk-chat --cid CONVERSATION_ID # Continue a specific conversation 94 | llm gtk-chat -s "System prompt" # Set system prompt 95 | llm gtk-chat -m model_name # Select specific model 96 | llm gtk-chat -c # Continue last conversation 97 | ``` 98 | 99 | ## Development 100 | 101 | To set up the development environment: 102 | ``` 103 | git clone https://github.com/icarito/gtk-llm-chat.git 104 | cd gtk-llm-chat 105 | python -m venv venv 106 | source venv/bin/activate 107 | pip install -e . 108 | ``` 109 | 110 | ## Shoulders of giants 111 | 112 | This project is made possible thanks to these great components, among others: 113 | 114 | - [llm](https://llm.datasette.io/en/stable/) by @simonw 115 | - [hello-world-gtk](https://github.com/zevlee/hello-world-gtk) by @zevlee 116 | 117 | ## License 118 | 119 | GPLv3 License - See LICENSE file for more details. 120 | -------------------------------------------------------------------------------- /todo.md: -------------------------------------------------------------------------------- 1 | # LLM Frontend Project Checklist 2 | 3 | ## Phase 1: Application Foundation 4 | - [x] Create Gtk.Application subclass with unique ID 5 | - [x] Implement window creation on activation 6 | - [x] Support multiple instances (new window per launch) 7 | - [x] Style with Libadwaita 8 | - [x] Create empty window titled "LLM Chat" 9 | - [x] Set default window size to 600x700 10 | - [x] Verify window independence between instances 11 | - [x] Test application launch from CLI with multiple instances 12 | - [ ] ~~Center window on screen (Known GTK4 issue - window positioning unreliable)~~ 13 | 14 | ## Phase 2: Core UI Layout 15 | - [x] Implement vertical box layout hierarchy 16 | - [x] Create ScrolledWindow for message history 17 | - [x] Set up input area box with proper proportions 18 | - [x] Configure TextView with: 19 | - [x] Dynamic height adjustment 20 | - [x] Enter vs Shift+Enter handling 21 | - [x] Minimum/maximum line limits 22 | - [x] Add Send button with keyboard shortcut 23 | - [x] Verify UI responsiveness at different window sizes 24 | 25 | ## Phase 3: Message Handling 26 | - [x] Implement Message class with: 27 | - [x] Sender type (user/assistant/error) 28 | - [x] Content storage 29 | - [x] Timestamp tracking 30 | - [x] Create message queue system 31 | - [x] Add input sanitization pipeline 32 | - [x] Build MessageWidget components: 33 | - [x] CSS styling classes 34 | - [x] Alignment logic 35 | - [x] Content formatting 36 | - [x] Implement auto-scroll behavior 37 | - [x] Connect message submission to display system 38 | 39 | ## Phase 4: LLM Integration 40 | - [x] Create LLMProcess controller class 41 | - [x] Implement async subprocess execution 42 | - [x] Set up stdout/stderr capture system 43 | - [x] Develop CLI command builder with: 44 | - [x] Basic command construction 45 | - [x] Model parameter handling 46 | - [x] System prompt injection 47 | - [x] CID management 48 | - [x] Template support (-t) 49 | - [x] Template parameters (-p) 50 | - [x] Model options (-o) 51 | - [x] Create streaming response parser: 52 | - [x] Response buffer system 53 | - [x] Clean prompt character (">") from responses 54 | - [x] Add typing indicators 55 | - [x] Implement cancellation support 56 | - [x] Implement argument parsing: 57 | - [x] Set up argparse with: 58 | - [x] --cid: Conversation ID 59 | - [x] -s: System prompt 60 | - [x] -m: Model selection 61 | - [x] -c: Continue last conversation 62 | - [x] -t: Template selection 63 | - [x] -p: Template parameters 64 | - [x] -o: Model options 65 | - [x] Create config dictionary from parsed args 66 | - [x] Pass config to LLMProcess constructor 67 | - [x] Update LLMProcess to use config for llm chat command 68 | 69 | ## Phase 5: Error Handling & Status 70 | - [x] Create ErrorWidget components: 71 | - [x] Warning icon integration 72 | - [x] Styling hierarchy 73 | - [x] Error message formatting 74 | - [x] Implement error capture system for: 75 | - [x] Subprocess failures 76 | - [x] Invalid CIDs 77 | - [x] Model errors 78 | - [x] Add status bar with: 79 | - [x] Connection indicators (via window title) 80 | - [x] Model name display 81 | - [ ] Create retry mechanism for failed messages (need to see how that looks like on the cli) 82 | - [ ] Implement graceful degradation for critical errors (what errors?) 83 | 84 | ## Phase 6: Configuration & Persistence 85 | - [ ] Set up GSettings schema (what for?) 86 | - [ ] Create model selector dropdown (where?) 87 | - [ ] Implement system prompt editor (where?) 88 | - [ ] Add conversation ID tracking 89 | - [ ] ~~Build SQLite storage system:~~ 90 | - [ ] ~~Message schema design~~ 91 | - [ ] CID-based conversation tracking 92 | - [x] Auto-save implementation (usando persistencia nativa del LLM) 93 | - [x] Template support 94 | - [ ] Create history navigation controls (where?) 95 | - [ ] Add "New Conversation" button (where?) 96 | 97 | ## Phase 7: UI Polish 98 | - [x] Implement CSS for: 99 | - [x] Message bubble styling 100 | - [x] Error state visuals 101 | - [x] Apply GNOME HIG spacing rules 102 | - [x] Add accessibility features: 103 | - [ ] Screen reader labels 104 | - [x] Keyboard navigation 105 | - [x] Enter to send 106 | - [x] Shift+Enter for newline 107 | - [ ] ~~Ctrl+C to cancel~~ (unsupported by llm) 108 | - [x] Escape to minimize 109 | - [x] Input focus on window open 110 | - [ ] Create loading animations 111 | - [ ] Implement keyboard shortcuts overlay 112 | - [ ] ~~Verify touchpad gesture support~~ 113 | 114 | ## Testing & Validation 115 | - [ ] Create test suite for: 116 | - [ ] Message serialization 117 | - [ ] Subprocess execution 118 | - [ ] Error handling paths 119 | - [ ] Perform cross-version Python testing 120 | - [ ] Validate GNOME HIG compliance 121 | - [ ] Test persistence across restarts 122 | - [ ] Verify multi-instance resource isolation 123 | 124 | ## Documentation 125 | - [ ] Write install instructions 126 | - [ ] Create user guide for: 127 | - [ ] Basic usage 128 | - [ ] Keyboard shortcuts 129 | - [ ] Troubleshooting 130 | - [ ] Generate API documentation 131 | - [ ] Add inline docstrings 132 | - [ ] Create contribution guidelines 133 | 134 | ## Stretch Goals 135 | - [ ] Implement conversation search 136 | - [ ] Add message editing 137 | - [ ] Create export/import functionality 138 | - [ ] Develop system tray integration 139 | - [ ] Add notification support 140 | - [ ] Create Flatpak package -------------------------------------------------------------------------------- /linux/org.fuentelibre.gtk_llm_Chat.flatpak.yml: -------------------------------------------------------------------------------- 1 | # Flatpak manifest for Gtk LLM Chat 2 | app-id: org.fuentelibre.gtk_llm_Chat 3 | runtime: org.gnome.Platform 4 | runtime-version: '48' 5 | sdk: org.gnome.Sdk 6 | add-extensions: 7 | org.freedesktop.Platform.Icontheme: 8 | directory: share/icons 9 | subdirectories: true 10 | version: '23.08' 11 | command: gtk-llm-chat 12 | finish-args: 13 | - --share=network 14 | - --socket=fallback-x11 15 | - --socket=wayland 16 | - --device=dri 17 | - --filesystem=xdg-config/io.datasette.llm:rw 18 | - --filesystem=xdg-config/autostart:create 19 | - --talk-name=org.ayatana.indicator.application 20 | - --talk-name=org.freedesktop.Notifications 21 | - --own-name=org.fuentelibre.gtk_llm_Chat.* 22 | - --own-name=org.kde.StatusNotifierItem-org.fuentelibre.gtk_llm_Chat 23 | - --own-name=org.freedesktop.StatusNotifierItem-org.fuentelibre.gtk_llm_Chat 24 | - --share=ipc 25 | - --socket=session-bus 26 | - --socket=system-bus 27 | # Acceso a temas y configuración de GTK 28 | - --filesystem=xdg-config/gtk-4.0:ro 29 | - --filesystem=xdg-data/icons:ro 30 | # Variables de entorno 31 | # Permitir configuración de esquema de color 32 | - --talk-name=org.freedesktop.portal.Settings 33 | - --talk-name=org.gnome.Settings 34 | - --talk-name=com.canonical.AppMenu.Registrar 35 | - --talk-name=org.kde.StatusNotifierWatcher 36 | - --talk-name=org.freedesktop.StatusNotifierWatcher 37 | build-options: 38 | env: 39 | PKG_CONFIG_PATH: "/app/lib/pkgconfig:/app/lib64/pkgconfig:/usr/lib/pkgconfig:/usr/lib64/pkgconfig" 40 | modules: 41 | # intltool (requerido por ayatana-ido) 42 | - name: intltool 43 | cleanup: ["*"] 44 | sources: 45 | - type: archive 46 | url: https://launchpad.net/intltool/trunk/0.51.0/+download/intltool-0.51.0.tar.gz 47 | sha256: 67c74d94196b153b774ab9f89b2fa6c6ba79352407037c8c14d5aeb334e959cd 48 | - type: patch 49 | path: shared-modules/intltool/intltool-perl5.26-regex-fixes.patch 50 | 51 | # libdbusmenu (requerido por libayatana-appindicator) 52 | - name: libdbusmenu 53 | buildsystem: autotools 54 | build-options: 55 | cflags: "-Wno-error" 56 | cleanup: 57 | - "*.la" 58 | - "/include" 59 | - "/lib/pkgconfig" 60 | - "/libexec" 61 | - "/share/doc" 62 | - "/share/gtk-doc" 63 | config-opts: 64 | - "--with-gtk=3" 65 | - "--disable-dumper" 66 | - "--disable-static" 67 | - "--disable-tests" 68 | - "--disable-gtk-doc" 69 | - "--enable-introspection=no" 70 | - "--disable-vala" 71 | sources: 72 | - type: archive 73 | url: https://launchpad.net/libdbusmenu/16.04/16.04.0/+download/libdbusmenu-16.04.0.tar.gz 74 | sha256: b9cc4a2acd74509435892823607d966d424bd9ad5d0b00938f27240a1bfa878a 75 | - type: patch 76 | path: shared-modules/libayatana-appindicator/0001-Fix-HAVE_VALGRIND-AM_CONDITIONAL.patch 77 | 78 | - name: ayatana-ido 79 | buildsystem: cmake-ninja 80 | cleanup: 81 | - "/include" 82 | - "/lib/pkgconfig" 83 | config-opts: 84 | - "-DENABLE_INTROSPECTION=OFF" 85 | - "-DCMAKE_INSTALL_LIBDIR=lib" 86 | sources: 87 | - type: git 88 | url: https://github.com/AyatanaIndicators/ayatana-ido.git 89 | tag: "0.10.4" 90 | commit: f968079b09e2310fefc3fc307359025f1c74b3eb 91 | - type: patch 92 | path: shared-modules/libayatana-appindicator/0001-Make-introspection-configurable.patch 93 | 94 | 95 | 96 | - name: libayatana-indicator 97 | buildsystem: cmake-ninja 98 | cleanup: 99 | - "/include" 100 | - "/lib/pkgconfig" 101 | - "/libexec" 102 | - "/share" 103 | config-opts: 104 | - "-DCMAKE_INSTALL_LIBDIR=lib" 105 | sources: 106 | - type: git 107 | url: https://github.com/AyatanaIndicators/libayatana-indicator.git 108 | tag: "0.9.4" 109 | commit: 611bb384b73fa6311777ba4c41381a06f5b99dad 110 | 111 | - name: libayatana-appindicator 112 | buildsystem: cmake-ninja 113 | cleanup: 114 | - "/include" 115 | - "/lib/pkgconfig" 116 | config-opts: 117 | - "-DENABLE_BINDINGS_MONO=NO" 118 | - "-DENABLE_BINDINGS_VALA=NO" 119 | - "-DENABLE_GTKDOC=NO" 120 | - "-DCMAKE_INSTALL_LIBDIR=lib" 121 | sources: 122 | - type: git 123 | url: https://github.com/AyatanaIndicators/libayatana-appindicator.git 124 | tag: "0.5.94" 125 | commit: 31e8bb083b307e1cc96af4874a94707727bd1e79 126 | 127 | - name: gtk-llm-chat 128 | buildsystem: simple 129 | build-options: 130 | build-args: 131 | - --share=network 132 | build-commands: 133 | - pip3 install --prefix=/app -r requirements.txt 134 | - pip3 install --prefix=/app . 135 | # Iconos de aplicación (PNG para menús y lanzadores) 136 | - mkdir -p /app/share/icons/hicolor/scalable/apps 137 | - mkdir -p /app/share/icons/hicolor/48x48/apps 138 | - mkdir -p /app/share/icons/hicolor/symbolic/apps 139 | - mkdir -p /app/share/icons/hicolor/256x256/apps 140 | # Usar los iconos PNG disponibles 141 | - cp gtk_llm_chat/hicolor/256x256/apps/org.fuentelibre.gtk_llm_Chat.png /app/share/icons/hicolor/256x256/apps/ 142 | - cp gtk_llm_chat/hicolor/48x48/apps/org.fuentelibre.gtk_llm_Chat-symbolic.png /app/share/icons/hicolor/48x48/apps/ 143 | # Icono simbólico para tray 144 | - cp gtk_llm_chat/hicolor/symbolic/apps/org.fuentelibre.gtk_llm_Chat-symbolic.svg /app/share/icons/hicolor/symbolic/apps/ 145 | # Scripts de diagnóstico 146 | - mkdir -p /app/bin 147 | - install -Dm755 debug_icons.sh /app/bin/debug_icons.sh 148 | - install -Dm755 debug_theme.sh /app/bin/debug_theme.sh 149 | # Aplicaciones 150 | - mkdir -p /app/share/applications 151 | - cp desktop/org.fuentelibre.gtk_llm_Chat.desktop /app/share/applications/ 152 | - cp desktop/org.fuentelibre.gtk_llm_Applet.desktop /app/share/applications/ 153 | # Metadatos 154 | - mkdir -p /app/share/metainfo 155 | - cp linux/org.fuentelibre.gtk_llm_Chat.appdata.xml /app/share/metainfo/ || true 156 | # Actualizar cache de iconos 157 | - update-desktop-database /app/share/applications/ || true 158 | - gtk-update-icon-cache -f --ignore-theme-index /app/share/icons/hicolor/ || true 159 | sources: 160 | - type: dir 161 | path: .. 162 | desktop-file-name-suffix: '' 163 | rename-icon: org.fuentelibre.gtk_llm_Chat 164 | tags: 165 | - Utility 166 | -------------------------------------------------------------------------------- /.github/workflows/build-linux.yml: -------------------------------------------------------------------------------- 1 | # Linux build workflow for Gtk LLM Chat 2 | name: Build Linux 3 | 4 | on: 5 | push: 6 | branches: [ main, develop ] 7 | pull_request: 8 | branches: [ main, develop ] 9 | workflow_dispatch: 10 | workflow_call: 11 | 12 | jobs: 13 | build-linux: 14 | name: Build for Linux 15 | runs-on: ubuntu-22.04 16 | steps: 17 | - name: Checkout 18 | uses: actions/checkout@v4 19 | with: 20 | fetch-depth: 0 21 | 22 | - name: Install system dependencies 23 | run: | 24 | sudo add-apt-repository -y ppa:kubuntu-ppa/backports-extra 25 | sudo apt-get update 26 | sudo apt-get install -y build-essential ninja-build git \ 27 | libxft-dev libepoxy-dev libgles2-mesa-dev libegl1-mesa-dev \ 28 | libpolkit-agent-1-dev libpolkit-gobject-1-dev libunwind-dev \ 29 | libx11-dev libxext-dev libdrm-dev libcups2-dev libsass-dev \ 30 | linux-headers-$(uname -r) libcurl4-nss-dev libxcursor-dev \ 31 | libxkbcommon-dev libxml2-utils libxrandr-dev libxi-dev \ 32 | libwayland-dev libxinerama-dev \ 33 | gperf libappstream-dev libxmlb-dev libxdamage-dev gettext valac \ 34 | libgtk-3-dev \ 35 | libayatana-appindicator3-dev \ 36 | gir1.2-gtk-3.0 \ 37 | gir1.2-ayatanaappindicator3-0.1 \ 38 | libglib2.0-0 \ 39 | libgirepository-1.0-1 \ 40 | libdbus-1-dev \ 41 | libdbus-glib-1-dev \ 42 | gir1.2-dbusmenu-glib-0.4 \ 43 | gir1.2-dbusmenu-gtk3-0.4 \ 44 | gir1.2-glib-2.0 \ 45 | flatpak flatpak-builder appstream-util 46 | 47 | sudo pip install --upgrade pip meson 48 | 49 | - name: Compile Gtk and friends 50 | run: | 51 | echo "Compiling gobject-introspection into /usr" 52 | echo "Using PKG_CONFIG_PATH: $PKG_CONFIG_PATH" 53 | GOBJECT_INTROSPECTION_VERSION="1.80.0" 54 | wget "https://download.gnome.org/sources/gobject-introspection/${GOBJECT_INTROSPECTION_VERSION%.*}/gobject-introspection-${GOBJECT_INTROSPECTION_VERSION}.tar.xz" 55 | tar -xf "gobject-introspection-${GOBJECT_INTROSPECTION_VERSION}.tar.xz" 56 | cd "gobject-introspection-${GOBJECT_INTROSPECTION_VERSION}" 57 | meson setup _build --prefix=/usr \ 58 | --buildtype=release \ 59 | -Ddoctool=disabled 60 | ninja -C _build 61 | sudo ninja -C _build install 62 | cd .. 63 | git clone https://gitlab.gnome.org/GNOME/glib.git --branch glib-2-82 --depth 1 64 | cd glib 65 | meson setup _build --prefix=/usr \ 66 | -Dintrospection=enabled \ 67 | --buildtype=release -Ddocumentation=false -Dtests=false -Dman=false 68 | ninja -C _build 69 | sudo ninja -C _build install 70 | cd .. 71 | git clone https://gitlab.gnome.org/GNOME/pango.git --branch 1.52.2 --depth 1 72 | cd pango 73 | meson setup _build --prefix=/usr \ 74 | --buildtype=release -Dintrospection=enabled \ 75 | -Dfontconfig=enabled -Dfreetype=enabled 76 | ninja -C _build 77 | sudo ninja -C _build install 78 | cd .. 79 | wget "https://download.gnome.org/sources/graphene/1.8/graphene-1.8.6.tar.xz" 80 | tar -xf "graphene-1.8.6.tar.xz" 81 | cd "graphene-1.8.6" 82 | meson setup _build --prefix=/usr \ 83 | --buildtype=release \ 84 | -Dtests=false 85 | ninja -C _build 86 | sudo ninja -C _build install 87 | cd .. 88 | git clone https://gitlab.freedesktop.org/fontconfig/fontconfig.git --branch 2.15.0 --depth 1 89 | cd fontconfig 90 | meson setup _build --prefix=/usr \ 91 | --buildtype=release -Ddoc=disabled -Dtests=disabled -Dtools=disabled 92 | ninja -C _build 93 | sudo ninja -C _build install 94 | cd .. 95 | git clone https://gitlab.gnome.org/GNOME/gtk.git --branch gtk-4-18 --depth 1 96 | cd gtk 97 | meson setup _build --prefix=/usr \ 98 | --buildtype=release \ 99 | -Dmedia-gstreamer=disabled \ 100 | -Dbuild-tests=false \ 101 | -Dbuild-examples=false \ 102 | -Dbuild-demos=false \ 103 | -Dintrospection=enabled \ 104 | -Dvulkan=disabled 105 | ninja -C _build 106 | sudo ninja -C _build install 107 | cd .. 108 | git clone https://gitlab.gnome.org/GNOME/libadwaita.git --branch libadwaita-1-7 --depth 1 109 | cd libadwaita 110 | meson setup _build --prefix=/usr \ 111 | -Dintrospection=enabled \ 112 | --buildtype=release 113 | ninja -C _build 114 | sudo ninja -C _build install 115 | 116 | - name: Set environment for build 117 | run: | 118 | cp .env.ci .env 119 | 120 | - name: Build package 121 | run: | 122 | python3 build-ci.py 123 | 124 | - name: Store the distribution packages 125 | uses: actions/upload-artifact@v4 126 | with: 127 | name: linux-dist 128 | path: dist/ 129 | 130 | build-flatpak: 131 | name: Build Flatpak 132 | runs-on: ubuntu-latest 133 | steps: 134 | - name: Checkout 135 | uses: actions/checkout@v4 136 | with: 137 | submodules: recursive 138 | fetch-depth: 0 139 | 140 | - name: Install Flatpak dependencies 141 | run: | 142 | sudo apt install flatpak flatpak-builder python3-pip appstream-util 143 | pip3 install setuptools wheel build 144 | pip3 install . 145 | 146 | - name: Add Flathub remote 147 | run: | 148 | flatpak remote-add --user --if-not-exists flathub https://flathub.org/repo/flathub.flatpakrepo 149 | 150 | - name: Build Flatpak and create bundle 151 | run: | 152 | flatpak-builder --user --repo=repo --force-clean --install-deps-from=flathub build-dir linux/org.fuentelibre.gtk_llm_Chat.flatpak.yml 153 | flatpak build-bundle repo gtk-llm-chat.flatpak org.fuentelibre.gtk_llm_Chat 154 | 155 | - name: Test Flatpak (versión) 156 | run: | 157 | flatpak install --user --noninteractive gtk-llm-chat.flatpak 158 | flatpak run org.fuentelibre.gtk_llm_Chat --help || true 159 | 160 | - name: Upload Flatpak bundle 161 | uses: actions/upload-artifact@v4 162 | with: 163 | name: flatpak-bundle 164 | path: gtk-llm-chat.flatpak 165 | -------------------------------------------------------------------------------- /hooks/rthook_numpy_python313.py: -------------------------------------------------------------------------------- 1 | """ 2 | Runtime hook para PyInstaller que soluciona incompatibilidades de NumPy 3 | con Python 3.13 interceptando add_docstring a nivel muy bajo. 4 | """ 5 | 6 | import sys 7 | import warnings 8 | 9 | # Solo aplicar en Python 3.13+ y entornos congelados 10 | if getattr(sys, 'frozen', False) and sys.version_info >= (3, 13): 11 | 12 | # Estrategia 1: Interceptar la función C add_docstring antes de que se use 13 | try: 14 | # Monkeypatch directo en el módulo sys para interceptar add_docstring 15 | original_add_docstring = None 16 | 17 | def create_safe_add_docstring(original_func): 18 | """Crea una versión segura de add_docstring.""" 19 | def safe_add_docstring(func, docstring): 20 | # Normalizar docstring a string 21 | if docstring is None: 22 | docstring = "" 23 | elif not isinstance(docstring, str): 24 | try: 25 | # Manejar bytes o otros tipos 26 | if isinstance(docstring, bytes): 27 | docstring = docstring.decode('utf-8', errors='replace') 28 | else: 29 | docstring = str(docstring) if docstring else "" 30 | except Exception: 31 | docstring = "" 32 | 33 | try: 34 | return original_func(func, docstring) 35 | except TypeError as e: 36 | if "should be a str" in str(e): 37 | # Fallback directo 38 | try: 39 | func.__doc__ = docstring 40 | return func 41 | except Exception: 42 | return func 43 | raise 44 | except Exception: 45 | # Para cualquier otro error, intentar asignación directa 46 | try: 47 | func.__doc__ = docstring 48 | return func 49 | except Exception: 50 | return func 51 | 52 | return safe_add_docstring 53 | 54 | # Estrategia 2: Hook de importación más agresivo 55 | import builtins 56 | original_import = builtins.__import__ 57 | 58 | def aggressive_numpy_patch_import(name, globals=None, locals=None, fromlist=(), level=0): 59 | """Hook de importación que patchea numpy de forma más agresiva.""" 60 | 61 | # Verificar si se está importando algo relacionado con numpy 62 | numpy_related = ( 63 | 'numpy' in name or 64 | (isinstance(fromlist, (list, tuple)) and any('numpy' in str(f) for f in fromlist)) or 65 | name.startswith('numpy') or 66 | name.endswith('numpy') 67 | ) 68 | 69 | # Pre-patch antes de importar si es numpy 70 | if numpy_related: 71 | try: 72 | # Intentar patchear add_docstring en el namespace global 73 | import types 74 | 75 | # Buscar add_docstring en el módulo actual si existe 76 | current_module = sys.modules.get(name) 77 | if current_module: 78 | if hasattr(current_module, 'add_docstring') and not hasattr(current_module.add_docstring, '_patched'): 79 | current_module.add_docstring = create_safe_add_docstring(current_module.add_docstring) 80 | current_module.add_docstring._patched = True 81 | #print(f"OK Pre-patched add_docstring in {name}") 82 | 83 | except Exception as e: 84 | pass # Ignorar errores de pre-patch 85 | 86 | # Realizar importación normal 87 | try: 88 | module = original_import(name, globals, locals, fromlist, level) 89 | except Exception as e: 90 | if "add_docstring" in str(e) and "should be a str" in str(e): 91 | #print(f"⚠ Caught add_docstring error during import of {name}: {e}") 92 | 93 | # Intentar aplicar parche de emergencia 94 | try: 95 | # Buscar y patchear add_docstring en todos los módulos cargados 96 | for mod_name, mod in sys.modules.items(): 97 | if mod and hasattr(mod, 'add_docstring') and not hasattr(mod.add_docstring, '_emergency_patched'): 98 | try: 99 | mod.add_docstring = create_safe_add_docstring(mod.add_docstring) 100 | mod.add_docstring._emergency_patched = True 101 | print(f"OK Emergency patched add_docstring in {mod_name}") 102 | except Exception: 103 | pass 104 | 105 | # Reintentar importación 106 | module = original_import(name, globals, locals, fromlist, level) 107 | #print(f"OK Successfully imported {name} after emergency patch") 108 | 109 | except Exception as retry_e: 110 | print(f"Fail: Failed to import {name} even after emergency patch: {retry_e}") 111 | raise e # Re-lanzar el error original 112 | else: 113 | raise # Re-lanzar otros errores 114 | 115 | # Post-patch después de importar exitosamente 116 | if numpy_related and module: 117 | try: 118 | # Buscar add_docstring en el módulo importado y sus submódulos 119 | modules_to_check = [module] 120 | 121 | # Agregar submódulos conocidos 122 | if hasattr(module, '_core'): 123 | modules_to_check.append(module._core) 124 | if hasattr(module._core, 'overrides'): 125 | modules_to_check.append(module._core.overrides) 126 | 127 | for mod in modules_to_check: 128 | if mod and hasattr(mod, 'add_docstring') and not hasattr(mod.add_docstring, '_post_patched'): 129 | mod.add_docstring = create_safe_add_docstring(mod.add_docstring) 130 | mod.add_docstring._post_patched = True 131 | #print(f"OK Post-patched add_docstring in {getattr(mod, '__name__', 'unknown')}") 132 | 133 | except Exception as e: 134 | pass # Ignorar errores de post-patch 135 | 136 | return module 137 | 138 | # Aplicar el hook de importación agresivo 139 | builtins.__import__ = aggressive_numpy_patch_import 140 | #print("OK Aggressive NumPy Python 3.13 compatibility hook installed") 141 | 142 | except Exception as e: 143 | warnings.warn(f"Failed to install NumPy Python 3.13 compatibility hook: {e}", RuntimeWarning) 144 | -------------------------------------------------------------------------------- /windows/build.nsi: -------------------------------------------------------------------------------- 1 | #!/usr/bin/makensis 2 | 3 | ; BEGIN NSIS TEMPLATE HEADER 4 | !define /file APPDIR "APPDIR" 5 | !define /file FILENAME "FILENAME" 6 | !define /file APPNAME "APPNAME" 7 | !define /file VERSION "VERSION" 8 | !define /file AUTHOR "AUTHOR" 9 | !define /file PUBLISHER "PUBLISHER" 10 | !define /file DESCRIPTION "DESCRIPTION" 11 | !define /file ICON "ICON" 12 | !define /file LICENSE "LICENSE" 13 | !define /file INSTALLSIZE "INSTALLSIZE" 14 | !define /file ARCH "ARCH" 15 | ; END NSIS TEMPLATE HEADER 16 | 17 | ; Marker file to tell the uninstaller that it's a user installation 18 | !define USER_INSTALL_MARKER _user_install_marker 19 | 20 | SetCompressor lzma 21 | 22 | !if "${NSIS_PACKEDVERSION}" >= 0x03000000 23 | Unicode true 24 | ManifestDPIAware true 25 | !endif 26 | 27 | !define MULTIUSER_EXECUTIONLEVEL Highest 28 | !define MULTIUSER_INSTALLMODE_DEFAULT_CURRENTUSER 29 | !define MULTIUSER_MUI 30 | !define MULTIUSER_INSTALLMODE_COMMANDLINE 31 | !define MULTIUSER_INSTALLMODE_INSTDIR "${APPNAME}" 32 | !if ${ARCH} == "AMD64" 33 | !define MULTIUSER_INSTALLMODE_FUNCTION correctProgramFiles 34 | !endif 35 | !include MultiUser.nsh 36 | !include FileFunc.nsh 37 | 38 | ; Modern UI installer stuff 39 | !include "MUI2.nsh" 40 | !define MUI_ABORTWARNING 41 | !define MUI_ICON "${ICON}" 42 | !define MUI_UNICON "${ICON}" 43 | 44 | ; UI pages 45 | !insertmacro MUI_PAGE_WELCOME 46 | !if ${LICENSE} != "None" 47 | !insertmacro MUI_PAGE_LICENSE "${LICENSE}" 48 | !endif 49 | !insertmacro MULTIUSER_PAGE_INSTALLMODE 50 | !insertmacro MUI_PAGE_DIRECTORY 51 | !insertmacro MUI_PAGE_INSTFILES 52 | !insertmacro MUI_PAGE_FINISH 53 | !insertmacro MUI_UNPAGE_CONFIRM 54 | !insertmacro MUI_UNPAGE_INSTFILES 55 | 56 | ; UI languages 57 | !insertmacro MUI_LANGUAGE "English" 58 | !insertmacro MUI_LANGUAGE "French" 59 | !insertmacro MUI_LANGUAGE "German" 60 | !insertmacro MUI_LANGUAGE "Spanish" 61 | !insertmacro MUI_LANGUAGE "SpanishInternational" 62 | !insertmacro MUI_LANGUAGE "SimpChinese" 63 | !insertmacro MUI_LANGUAGE "TradChinese" 64 | !insertmacro MUI_LANGUAGE "Japanese" 65 | !insertmacro MUI_LANGUAGE "Italian" 66 | !insertmacro MUI_LANGUAGE "Dutch" 67 | !insertmacro MUI_LANGUAGE "Danish" 68 | !insertmacro MUI_LANGUAGE "Swedish" 69 | !insertmacro MUI_LANGUAGE "Norwegian" 70 | !insertmacro MUI_LANGUAGE "NorwegianNynorsk" 71 | !insertmacro MUI_LANGUAGE "Russian" 72 | !insertmacro MUI_LANGUAGE "Portuguese" 73 | !insertmacro MUI_LANGUAGE "PortugueseBR" 74 | !insertmacro MUI_LANGUAGE "Polish" 75 | !insertmacro MUI_LANGUAGE "Ukrainian" 76 | !insertmacro MUI_LANGUAGE "Czech" 77 | !insertmacro MUI_LANGUAGE "Slovak" 78 | !insertmacro MUI_LANGUAGE "Slovenian" 79 | !insertmacro MUI_LANGUAGE "Arabic" 80 | !insertmacro MUI_LANGUAGE "Hebrew" 81 | !insertmacro MUI_LANGUAGE "Indonesian" 82 | !insertmacro MUI_LANGUAGE "Mongolian" 83 | !insertmacro MUI_LANGUAGE "Albanian" 84 | !insertmacro MUI_LANGUAGE "Belarusian" 85 | !insertmacro MUI_LANGUAGE "Esperanto" 86 | !insertmacro MUI_LANGUAGE "Asturian" 87 | !insertmacro MUI_LANGUAGE "Basque" 88 | !insertmacro MUI_LANGUAGE "ScotsGaelic" 89 | !insertmacro MUI_LANGUAGE "Vietnamese" 90 | !insertmacro MUI_LANGUAGE "Armenian" 91 | !insertmacro MUI_LANGUAGE "Corsican" 92 | !insertmacro MUI_LANGUAGE "Tatar" 93 | !insertmacro MUI_LANGUAGE "Hindi" 94 | 95 | ; UI languages without lang strings 96 | ; !insertmacro MUI_LANGUAGE "Korean" 97 | ; !insertmacro MUI_LANGUAGE "Finnish" 98 | ; !insertmacro MUI_LANGUAGE "Greek" 99 | ; !insertmacro MUI_LANGUAGE "Croatian" 100 | ; !insertmacro MUI_LANGUAGE "Bulgarian" 101 | ; !insertmacro MUI_LANGUAGE "Hungarian" 102 | ; !insertmacro MUI_LANGUAGE "Thai" 103 | ; !insertmacro MUI_LANGUAGE "Romanian" 104 | ; !insertmacro MUI_LANGUAGE "Latvian" 105 | ; !insertmacro MUI_LANGUAGE "Macedonian" 106 | ; !insertmacro MUI_LANGUAGE "Estonian" 107 | ; !insertmacro MUI_LANGUAGE "Turkish" 108 | ; !insertmacro MUI_LANGUAGE "Lithuanian" 109 | ; !insertmacro MUI_LANGUAGE "Serbian" 110 | ; !insertmacro MUI_LANGUAGE "SerbianLatin" 111 | ; !insertmacro MUI_LANGUAGE "Farsi" 112 | ; !insertmacro MUI_LANGUAGE "Luxembourgish" 113 | ; !insertmacro MUI_LANGUAGE "Breton" 114 | ; !insertmacro MUI_LANGUAGE "Icelandic" 115 | ; !insertmacro MUI_LANGUAGE "Malay" 116 | ; !insertmacro MUI_LANGUAGE "Bosnian" 117 | ; !insertmacro MUI_LANGUAGE "Kurdish" 118 | ; !insertmacro MUI_LANGUAGE "Irish" 119 | ; !insertmacro MUI_LANGUAGE "Uzbek" 120 | ; !insertmacro MUI_LANGUAGE "Galician" 121 | ; !insertmacro MUI_LANGUAGE "Afrikaans" 122 | ; !insertmacro MUI_LANGUAGE "Catalan" 123 | ; !insertmacro MUI_LANGUAGE "Pashto" 124 | ; !insertmacro MUI_LANGUAGE "Georgian" 125 | ; !insertmacro MUI_LANGUAGE "Welsh" 126 | 127 | Name "${APPNAME} ${VERSION}" 128 | !if ${VERSION} != "None" 129 | OutFile "${FILENAME}-${VERSION}-${ARCH}.exe" 130 | !else 131 | OutFile "${FILENAME}-${ARCH}.exe" 132 | !endif 133 | ShowInstDetails show 134 | ShowUninstDetails show 135 | 136 | Var cmdLineInstallDir 137 | 138 | Section -SETTINGS 139 | SetOutPath "$INSTDIR" 140 | SetOverwrite ifnewer 141 | SectionEnd 142 | 143 | Section "!${APPNAME}" sec_app 144 | !if ${ARCH} == "AMD64" 145 | SetRegView 64 146 | !else 147 | SetRegView 32 148 | !endif 149 | SectionIn RO 150 | File ${ICON} 151 | 152 | SetOutPath "$INSTDIR" 153 | File /r "${APPDIR}\*" 154 | 155 | ; Marker file for per-user install 156 | StrCmp $MultiUser.InstallMode CurrentUser 0 +3 157 | FileOpen $0 "$INSTDIR\${USER_INSTALL_MARKER}" w 158 | FileClose $0 159 | SetFileAttributes "$INSTDIR\${USER_INSTALL_MARKER}" HIDDEN 160 | 161 | WriteUninstaller $INSTDIR\uninstall.exe 162 | 163 | CreateDirectory "$SMPROGRAMS\${APPNAME}" 164 | CreateShortCut "$SMPROGRAMS\${APPNAME}\${APPNAME}.lnk" "$INSTDIR\${FILENAME}.exe" "" "$INSTDIR\${ICON}" 165 | CreateShortCut "$SMPROGRAMS\${APPNAME}\GTK LLM Applet.lnk" "$INSTDIR\${FILENAME}.exe --applet" "" "$INSTDIR\${ICON}" 166 | 167 | ; Add ourselves to Add/Remove Programs 168 | WriteRegStr SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 169 | "DisplayName" "${APPNAME}" 170 | WriteRegStr SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 171 | "UninstallString" '"$INSTDIR\uninstall.exe"' 172 | WriteRegStr SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 173 | "InstallLocation" "$INSTDIR" 174 | WriteRegStr SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 175 | "DisplayIcon" "$INSTDIR\${ICON}" 176 | WriteRegStr SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 177 | "Publisher" "${PUBLISHER}" 178 | WriteRegStr SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 179 | "DisplayVersion" "${VERSION}" 180 | WriteRegDWORD SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 181 | "NoModify" 1 182 | WriteRegDWORD SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 183 | "NoRepair" 1 184 | WriteRegDWORD SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" \ 185 | "EstimatedSize" "${INSTALLSIZE}" 186 | 187 | ; Check if we need to reboot 188 | IfRebootFlag 0 noreboot 189 | MessageBox MB_YESNO "A reboot is required to finish the installation. Do you wish to reboot now?" \ 190 | /SD IDNO IDNO noreboot 191 | Reboot 192 | noreboot: 193 | SectionEnd 194 | 195 | Section "Uninstall" 196 | !if ${ARCH} == "AMD64" 197 | SetRegView 64 198 | !else 199 | SetRegView 32 200 | !endif 201 | SetShellVarContext all 202 | IfFileExists "$INSTDIR\${USER_INSTALL_MARKER}" 0 +3 203 | SetShellVarContext current 204 | Delete "$INSTDIR\${USER_INSTALL_MARKER}" 205 | 206 | RMDir /r /REBOOTOK "$SMPROGRAMS\${APPNAME}" 207 | RMDir /r /REBOOTOK "$INSTDIR" 208 | DeleteRegKey SHCTX "Software\Microsoft\Windows\CurrentVersion\Uninstall\${APPNAME}" 209 | SectionEnd 210 | 211 | ; Functions 212 | 213 | Function .onMouseOverSection 214 | ; Find which section the mouse is over, and set the corresponding description. 215 | FindWindow $R0 "#32770" "" $HWNDPARENT 216 | GetDlgItem $R0 $R0 1043 ; description item (must be added to the UI) 217 | 218 | StrCmp $0 ${sec_app} "" +2 219 | SendMessage $R0 ${WM_SETTEXT} 0 "STR:${APPNAME}" 220 | FunctionEnd 221 | 222 | Function .onInit 223 | ; Multiuser.nsh breaks /D command line parameter. Parse /INSTDIR instead. 224 | ; Cribbing from https://nsis-dev.github.io/NSIS-Forums/html/t-299280.html 225 | ${GetParameters} $0 226 | ClearErrors 227 | ${GetOptions} '$0' "/INSTDIR=" $1 228 | IfErrors +2 ; Error means flag not found 229 | StrCpy $cmdLineInstallDir $1 230 | ClearErrors 231 | 232 | !insertmacro MULTIUSER_INIT 233 | 234 | ; If cmd line included /INSTDIR, override the install dir set by MultiUser 235 | StrCmp $cmdLineInstallDir "" +2 236 | StrCpy $INSTDIR $cmdLineInstallDir 237 | FunctionEnd 238 | 239 | Function un.onInit 240 | !insertmacro MULTIUSER_UNINIT 241 | FunctionEnd 242 | 243 | Function correctProgramFiles 244 | ; The multiuser machinery doesn't know about the different Program files 245 | ; folder for 64-bit applications. Override the install dir it set. 246 | StrCmp $MultiUser.InstallMode AllUsers 0 +2 247 | StrCpy $INSTDIR "$PROGRAMFILES64\${MULTIUSER_INSTALLMODE_INSTDIR}" 248 | FunctionEnd 249 | -------------------------------------------------------------------------------- /po/gtk-llm-chat.pot: -------------------------------------------------------------------------------- 1 | # SOME DESCRIPTIVE TITLE. 2 | # Copyright (C) YEAR Your Name 3 | # This file is distributed under the same license as the gtk-llm-chat package. 4 | # FIRST AUTHOR , YEAR. 5 | # 6 | #, fuzzy 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: gtk-llm-chat 0.1\n" 10 | "Report-Msgid-Bugs-To: your@email.com\n" 11 | "POT-Creation-Date: 2025-06-04 18:35-0500\n" 12 | "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" 13 | "Last-Translator: FULL NAME \n" 14 | "Language-Team: LANGUAGE \n" 15 | "Language: \n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | 20 | #: gtk_llm_chat/chat_window.py:135 21 | msgid "Model Settings" 22 | msgstr "" 23 | 24 | #: gtk_llm_chat/chat_window.py:141 25 | msgid "Rename" 26 | msgstr "" 27 | 28 | #: gtk_llm_chat/chat_window.py:211 29 | msgid "Send" 30 | msgstr "" 31 | 32 | #: gtk_llm_chat/chat_sidebar.py:65 gtk_llm_chat/chat_sidebar.py:278 33 | #: gtk_llm_chat/wide_model_selector.py:454 34 | msgid "Model" 35 | msgstr "" 36 | 37 | #: gtk_llm_chat/chat_sidebar.py:69 38 | msgid "Change Model" 39 | msgstr "" 40 | 41 | #: gtk_llm_chat/chat_sidebar.py:70 gtk_llm_chat/chat_sidebar.py:312 42 | #: gtk_llm_chat/chat_sidebar.py:432 gtk_llm_chat/wide_model_selector.py:516 43 | msgid "Provider" 44 | msgstr "" 45 | 46 | #: gtk_llm_chat/chat_sidebar.py:79 47 | msgid "Set as Default Model" 48 | msgstr "" 49 | 50 | #: gtk_llm_chat/chat_sidebar.py:88 gtk_llm_chat/chat_sidebar.py:138 51 | msgid "Model Parameters" 52 | msgstr "" 53 | 54 | #: gtk_llm_chat/chat_sidebar.py:97 55 | msgid "Conversation" 56 | msgstr "" 57 | 58 | #: gtk_llm_chat/chat_sidebar.py:100 gtk_llm_chat/chat_application.py:417 59 | msgid "Delete Conversation" 60 | msgstr "" 61 | 62 | #: gtk_llm_chat/chat_sidebar.py:110 63 | msgid "Information" 64 | msgstr "" 65 | 66 | #: gtk_llm_chat/chat_sidebar.py:112 67 | msgid "About" 68 | msgstr "" 69 | 70 | #: gtk_llm_chat/chat_sidebar.py:121 71 | msgid "Actions" 72 | msgstr "" 73 | 74 | #: gtk_llm_chat/chat_sidebar.py:129 75 | msgid "Model Selector" 76 | msgstr "" 77 | 78 | #: gtk_llm_chat/chat_sidebar.py:145 79 | msgid "Temperature" 80 | msgstr "" 81 | 82 | #: gtk_llm_chat/chat_sidebar.py:158 83 | msgid "System Prompt" 84 | msgstr "" 85 | 86 | #: gtk_llm_chat/chat_sidebar.py:165 87 | msgid "Parameters" 88 | msgstr "" 89 | 90 | #: gtk_llm_chat/chat_sidebar.py:241 gtk_llm_chat/chat_sidebar.py:308 91 | #: gtk_llm_chat/chat_sidebar.py:310 gtk_llm_chat/chat_sidebar.py:432 92 | #: gtk_llm_chat/model_selection.py:129 93 | msgid "Unknown Provider" 94 | msgstr "" 95 | 96 | #: gtk_llm_chat/chat_sidebar.py:247 97 | msgid "Set Default Model" 98 | msgstr "" 99 | 100 | #: gtk_llm_chat/chat_sidebar.py:248 101 | msgid "Do you want to set" 102 | msgstr "" 103 | 104 | #: gtk_llm_chat/chat_sidebar.py:248 105 | msgid "from" 106 | msgstr "" 107 | 108 | #: gtk_llm_chat/chat_sidebar.py:248 109 | msgid "as the default model for new conversations?" 110 | msgstr "" 111 | 112 | #: gtk_llm_chat/chat_sidebar.py:251 gtk_llm_chat/chat_sidebar.py:330 113 | #: gtk_llm_chat/model_selector.py:291 gtk_llm_chat/chat_application.py:420 114 | #: gtk_llm_chat/wide_model_selector.py:310 115 | msgid "Cancel" 116 | msgstr "" 117 | 118 | #: gtk_llm_chat/chat_sidebar.py:252 119 | msgid "Set as Default" 120 | msgstr "" 121 | 122 | #: gtk_llm_chat/chat_sidebar.py:278 123 | msgid "set as default" 124 | msgstr "" 125 | 126 | #: gtk_llm_chat/chat_sidebar.py:327 127 | msgid "Set System Prompt" 128 | msgstr "" 129 | 130 | #: gtk_llm_chat/chat_sidebar.py:328 131 | msgid "Enter the system prompt for the AI model:" 132 | msgstr "" 133 | 134 | #: gtk_llm_chat/chat_sidebar.py:331 135 | msgid "Set" 136 | msgstr "" 137 | 138 | #: gtk_llm_chat/chat_sidebar.py:381 139 | msgid "Current" 140 | msgstr "" 141 | 142 | #: gtk_llm_chat/chat_sidebar.py:383 143 | msgid "Not set" 144 | msgstr "" 145 | 146 | #: gtk_llm_chat/chat_sidebar.py:413 147 | msgid "This is the current default model" 148 | msgstr "" 149 | 150 | #: gtk_llm_chat/chat_sidebar.py:417 151 | msgid "Set as default model" 152 | msgstr "" 153 | 154 | #: gtk_llm_chat/model_selector.py:46 155 | msgid "Providers" 156 | msgstr "" 157 | 158 | #: gtk_llm_chat/model_selector.py:50 159 | msgid "Models" 160 | msgstr "" 161 | 162 | #: gtk_llm_chat/model_selector.py:68 163 | msgid "Select Provider" 164 | msgstr "" 165 | 166 | #: gtk_llm_chat/model_selector.py:95 167 | msgid "Select Model" 168 | msgstr "" 169 | 170 | #: gtk_llm_chat/model_selector.py:124 171 | msgid "No models found" 172 | msgstr "" 173 | 174 | #: gtk_llm_chat/model_selector.py:145 175 | msgid "models" 176 | msgstr "" 177 | 178 | #: gtk_llm_chat/model_selector.py:150 179 | msgid "API key required" 180 | msgstr "" 181 | 182 | #: gtk_llm_chat/model_selector.py:152 183 | msgid "No models" 184 | msgstr "" 185 | 186 | #: gtk_llm_chat/model_selector.py:210 gtk_llm_chat/model_selector.py:335 187 | msgid "API Key is configured" 188 | msgstr "" 189 | 190 | #: gtk_llm_chat/model_selector.py:211 gtk_llm_chat/model_selector.py:336 191 | msgid "Change Key" 192 | msgstr "" 193 | 194 | #: gtk_llm_chat/model_selector.py:216 gtk_llm_chat/model_selector.py:340 195 | msgid "API Key Required" 196 | msgstr "" 197 | 198 | #: gtk_llm_chat/model_selector.py:217 gtk_llm_chat/model_selector.py:292 199 | #: gtk_llm_chat/model_selector.py:341 gtk_llm_chat/wide_model_selector.py:311 200 | msgid "Set Key" 201 | msgstr "" 202 | 203 | #: gtk_llm_chat/model_selector.py:235 204 | msgid "No models available" 205 | msgstr "" 206 | 207 | #: gtk_llm_chat/model_selector.py:236 208 | msgid "Configure an API key to access models from this provider" 209 | msgstr "" 210 | 211 | #: gtk_llm_chat/model_selector.py:241 gtk_llm_chat/wide_model_selector.py:251 212 | msgid "No models found for this provider" 213 | msgstr "" 214 | 215 | #: gtk_llm_chat/model_selector.py:288 gtk_llm_chat/wide_model_selector.py:307 216 | msgid "Enter API Key" 217 | msgstr "" 218 | 219 | #: gtk_llm_chat/model_selector.py:289 gtk_llm_chat/wide_model_selector.py:308 220 | msgid "Enter the API key for" 221 | msgstr "" 222 | 223 | #: gtk_llm_chat/model_selector.py:298 gtk_llm_chat/wide_model_selector.py:317 224 | msgid "Paste your API key here" 225 | msgstr "" 226 | 227 | #: gtk_llm_chat/model_selector.py:356 228 | msgid "Model Selector Test" 229 | msgstr "" 230 | 231 | #: gtk_llm_chat/tray_applet.py:103 gtk_llm_chat/tray_applet.py:239 232 | #: gtk_llm_chat/welcome.py:59 gtk_llm_chat/llm_client.py:22 233 | msgid "New Conversation" 234 | msgstr "" 235 | 236 | #: gtk_llm_chat/tray_applet.py:107 gtk_llm_chat/tray_applet.py:241 237 | msgid "Quit" 238 | msgstr "" 239 | 240 | #: gtk_llm_chat/tray_applet.py:210 gtk_llm_chat/chat_application.py:68 241 | msgid "" 242 | "\n" 243 | "Closing application..." 244 | msgstr "" 245 | 246 | #: gtk_llm_chat/tray_applet.py:225 247 | msgid "LLM Conversations" 248 | msgstr "" 249 | 250 | #: gtk_llm_chat/welcome.py:20 251 | msgid "Tray applet" 252 | msgstr "" 253 | 254 | #: gtk_llm_chat/welcome.py:20 255 | msgid "Default Model" 256 | msgstr "" 257 | 258 | #: gtk_llm_chat/welcome.py:53 259 | msgid "Next" 260 | msgstr "" 261 | 262 | #: gtk_llm_chat/welcome.py:117 263 | msgid "Own the conversation." 264 | msgstr "" 265 | 266 | #: gtk_llm_chat/welcome.py:118 267 | msgid "Use any model you want. Your conversations are stored locally." 268 | msgstr "" 269 | 270 | #: gtk_llm_chat/welcome.py:119 271 | msgid "This wizard will guide you through the initial setup" 272 | msgstr "" 273 | 274 | #: gtk_llm_chat/welcome.py:125 275 | msgid "Start" 276 | msgstr "" 277 | 278 | #: gtk_llm_chat/welcome.py:157 279 | msgid "Access conversations from the convenience of your system tray" 280 | msgstr "" 281 | 282 | #: gtk_llm_chat/welcome.py:163 283 | msgid "Would you like to start the applet with your session?" 284 | msgstr "" 285 | 286 | #: gtk_llm_chat/welcome.py:173 287 | msgid "Yes, with my session" 288 | msgstr "" 289 | 290 | #: gtk_llm_chat/welcome.py:174 291 | msgid "No, only when I start the app" 292 | msgstr "" 293 | 294 | #: gtk_llm_chat/welcome.py:209 295 | msgid "Loading model selection..." 296 | msgstr "" 297 | 298 | #: gtk_llm_chat/welcome.py:219 299 | msgid "Ready to start!" 300 | msgstr "" 301 | 302 | #: gtk_llm_chat/welcome.py:429 303 | msgid "Set API Key" 304 | msgstr "" 305 | 306 | #: gtk_llm_chat/welcome.py:429 307 | msgid "Change API Key" 308 | msgstr "" 309 | 310 | #: gtk_llm_chat/chat_application.py:193 311 | msgid "Error: _version.py not found" 312 | msgstr "" 313 | 314 | #: gtk_llm_chat/chat_application.py:418 315 | msgid "Are you sure you want to delete the conversation?" 316 | msgstr "" 317 | 318 | #: gtk_llm_chat/chat_application.py:421 319 | msgid "Delete" 320 | msgstr "" 321 | 322 | #: gtk_llm_chat/chat_application.py:443 323 | msgid "Gtk LLM Chat" 324 | msgstr "" 325 | 326 | #: gtk_llm_chat/chat_application.py:446 327 | msgid "A frontend for LLM" 328 | msgstr "" 329 | 330 | #: gtk_llm_chat/wide_model_selector.py:92 331 | msgid "" 332 | "Please select a provider from the list on the left.\n" 333 | "Then, choose a model from the list that appears here." 334 | msgstr "" 335 | 336 | #: gtk_llm_chat/wide_model_selector.py:127 337 | msgid "Most AI models require an API key" 338 | msgstr "" 339 | 340 | #: gtk_llm_chat/wide_model_selector.py:134 341 | msgid "" 342 | "You'll need to register with each provider to obtain these authentication " 343 | "tokens." 344 | msgstr "" 345 | 346 | #: gtk_llm_chat/wide_model_selector.py:147 347 | msgid "No Selection" 348 | msgstr "" 349 | 350 | #: gtk_llm_chat/wide_model_selector.py:162 351 | msgid "No models or providers found." 352 | msgstr "" 353 | 354 | #: gtk_llm_chat/wide_model_selector.py:163 355 | msgid "Error" 356 | msgstr "" 357 | 358 | #: gtk_llm_chat/wide_model_selector.py:442 359 | msgid "Model information not available" 360 | msgstr "" 361 | 362 | #: gtk_llm_chat/wide_model_selector.py:443 363 | msgid "Unable to retrieve model details" 364 | msgstr "" 365 | 366 | #: gtk_llm_chat/wide_model_selector.py:467 367 | msgid "Aliases" 368 | msgstr "" 369 | 370 | #: gtk_llm_chat/wide_model_selector.py:478 371 | msgid "API Key" 372 | msgstr "" 373 | 374 | #: gtk_llm_chat/wide_model_selector.py:487 375 | msgid "Required • Set" 376 | msgstr "" 377 | 378 | #: gtk_llm_chat/wide_model_selector.py:493 379 | msgid "Required • Not set" 380 | msgstr "" 381 | 382 | #: gtk_llm_chat/wide_model_selector.py:499 383 | msgid "Not required" 384 | msgstr "" 385 | 386 | #: gtk_llm_chat/wide_model_selector.py:510 387 | #: gtk_llm_chat/wide_model_selector.py:511 388 | msgid "Unknown" 389 | msgstr "" 390 | 391 | #: gtk_llm_chat/wide_model_selector.py:512 392 | msgid "Plugin" 393 | msgstr "" 394 | 395 | #: gtk_llm_chat/single_instance.py:26 gtk_llm_chat/single_instance.py:32 396 | msgid "Another instance is already running." 397 | msgstr "" 398 | 399 | #: gtk_llm_chat/llm_client.py:234 400 | msgid "LLMClient: Ignoring invalid temperature:" 401 | msgstr "" 402 | 403 | #: gtk_llm_chat/llm_client.py:269 404 | msgid "LLMClient: Starting stream processing..." 405 | msgstr "" 406 | 407 | #: gtk_llm_chat/llm_client.py:272 408 | msgid "LLMClient: Stream processing cancelled externally." 409 | msgstr "" 410 | 411 | #: gtk_llm_chat/llm_client.py:278 412 | msgid "LLMClient: Stream finished normally." 413 | msgstr "" 414 | 415 | #: gtk_llm_chat/model_selection.py:119 gtk_llm_chat/model_selection.py:125 416 | msgid "Local/Other" 417 | msgstr "" 418 | -------------------------------------------------------------------------------- /gtk_llm_chat/main.py: -------------------------------------------------------------------------------- 1 | """ 2 | Gtk LLM Chat - A frontend for `llm` 3 | """ 4 | # Si se ejecuta como script directo, configurar paquete para permitir imports relativos 5 | if __name__ == '__main__' and __package__ is None: 6 | import os, sys 7 | # Añadir el directorio raíz del proyecto al path para resolver el paquete 8 | script_path = os.path.abspath(__file__) 9 | package_dir = os.path.dirname(script_path) # .../gtk_llm_chat 10 | project_root = os.path.dirname(package_dir) # .../gtk-llm-chat 11 | sys.path.insert(0, project_root) 12 | __package__ = 'gtk_llm_chat' 13 | import argparse 14 | import sys 15 | import time 16 | 17 | # Imports que funcionan tanto como módulo como script directo 18 | try: 19 | # Si se ejecuta como módulo del paquete 20 | from .debug_utils import debug_print 21 | from .platform_utils import launch_tray_applet, fork_or_spawn_applet 22 | # Postponer import de chat_application hasta que sea necesario 23 | except ImportError: 24 | # Si se ejecuta como script directo, añadir el directorio actual al path 25 | import os 26 | sys.path.insert(0, os.path.dirname(os.path.abspath(__file__))) 27 | from debug_utils import debug_print 28 | from platform_utils import launch_tray_applet, fork_or_spawn_applet 29 | # Postponer import de chat_application hasta que sea necesario 30 | 31 | # Aplicar patch de compatibilidad NumPy/Python 3.13 lo antes posible 32 | def apply_numpy_python313_compatibility_patch(): 33 | """ 34 | Aplica un patch de compatibilidad PREVENTIVO para NumPy en Python 3.13 35 | para solucionar errores de add_docstring en entornos congelados. 36 | Este debe ejecutarse ANTES de cualquier importación relacionada con numpy. 37 | """ 38 | if sys.version_info >= (3, 13) and getattr(sys, 'frozen', False): 39 | try: 40 | # Estrategia: interceptar builtins.__import__ ANTES de que numpy se importe 41 | import builtins 42 | 43 | if not hasattr(builtins.__import__, '_numpy_python313_patched'): 44 | original_import = builtins.__import__ 45 | 46 | def safe_import_wrapper(name, globals=None, locals=None, fromlist=(), level=0): 47 | """Wrapper que intercepta errores de add_docstring durante importación.""" 48 | 49 | try: 50 | # Importación normal 51 | module = original_import(name, globals, locals, fromlist, level) 52 | 53 | # Si es algo relacionado con numpy, aplicar patch inmediatamente 54 | if ('numpy' in name or 55 | (fromlist and any('numpy' in str(f) for f in fromlist))): 56 | _patch_module_add_docstring(module, name) 57 | 58 | return module 59 | 60 | except Exception as e: 61 | if "add_docstring" in str(e) and "should be a str" in str(e): 62 | debug_print(f"Intercepted add_docstring error in {name}: {e}") 63 | 64 | # Parche de emergencia: buscar y patchear add_docstring globalmente 65 | _emergency_patch_add_docstring() 66 | 67 | # Reintentar importación 68 | try: 69 | return original_import(name, globals, locals, fromlist, level) 70 | except Exception as retry_e: 71 | debug_print(f"Failed to import {name} even after emergency patch: {retry_e}") 72 | raise e 73 | else: 74 | raise 75 | 76 | def _patch_module_add_docstring(module, module_name): 77 | """Patchea add_docstring en un módulo específico.""" 78 | if not module: 79 | return 80 | 81 | # Lista de posibles ubicaciones de add_docstring 82 | locations = [ 83 | (module, 'add_docstring'), 84 | (getattr(module, '_core', None), 'add_docstring'), 85 | (getattr(getattr(module, '_core', None), 'overrides', None), 'add_docstring') 86 | ] 87 | 88 | for obj, attr in locations: 89 | if obj and hasattr(obj, attr) and not hasattr(getattr(obj, attr), '_python313_safe'): 90 | try: 91 | original_func = getattr(obj, attr) 92 | 93 | def safe_add_docstring(func, docstring): 94 | # Normalizar docstring 95 | if docstring is None: 96 | docstring = "" 97 | elif not isinstance(docstring, str): 98 | try: 99 | docstring = str(docstring) if docstring else "" 100 | except: 101 | docstring = "" 102 | 103 | try: 104 | return original_func(func, docstring) 105 | except TypeError as e: 106 | if "should be a str" in str(e): 107 | # Asignación directa como fallback 108 | try: 109 | func.__doc__ = docstring 110 | except: 111 | pass 112 | return func 113 | raise 114 | 115 | safe_add_docstring._python313_safe = True 116 | setattr(obj, attr, safe_add_docstring) 117 | debug_print(f"[OK] Patched add_docstring in {module_name}.{attr}") 118 | 119 | except Exception as patch_e: 120 | debug_print(f"Warning: Could not patch {module_name}.{attr}: {patch_e}") 121 | 122 | def _emergency_patch_add_docstring(): 123 | """Parche de emergencia que busca add_docstring en todos los módulos.""" 124 | for mod_name, mod in list(sys.modules.items()): 125 | if mod and 'numpy' in mod_name: 126 | _patch_module_add_docstring(mod, mod_name) 127 | 128 | # Aplicar el wrapper 129 | builtins.__import__ = safe_import_wrapper 130 | builtins.__import__._numpy_python313_patched = True 131 | debug_print("[OK] NumPy Python 3.13 compatibility wrapper installed") 132 | 133 | except Exception as e: 134 | debug_print(f"Warning: Could not install NumPy compatibility patch: {e}") 135 | 136 | # Ejecutar el patch inmediatamente al cargar este módulo 137 | apply_numpy_python313_compatibility_patch() 138 | 139 | # Benchmark 140 | benchmark_startup = '--benchmark-startup' in sys.argv 141 | start_time = time.time() if benchmark_startup else None 142 | 143 | def parse_args(argv): 144 | """Parsea los argumentos de la línea de comandos""" 145 | parser = argparse.ArgumentParser(description='GTK Frontend para LLM') 146 | parser.add_argument('--cid', type=str, help='ID de la conversación a continuar') 147 | parser.add_argument('-s', '--system', type=str, help='Prompt del sistema') 148 | parser.add_argument('-m', '--model', type=str, help='Modelo a utilizar') 149 | parser.add_argument('-c', '--continue-last', action='store_true', help='Continuar última conversación') 150 | parser.add_argument('-t', '--template', type=str, help='Template a utilizar') 151 | parser.add_argument('-p', '--param', nargs=2, action='append', metavar=('KEY', 'VALUE'), help='Parámetros para el template') 152 | parser.add_argument('-o', '--option', nargs=2, action='append', metavar=('KEY', 'VALUE'), help='Opciones para el modelo') 153 | parser.add_argument('-f', '--fragment', action='append', metavar='FRAGMENT', help='Fragmento (alias, URL, hash o ruta de archivo) para agregar al prompt') 154 | parser.add_argument('--benchmark-startup', action='store_true', help='Mide el tiempo hasta que la ventana se muestra y sale.') 155 | parser.add_argument('--applet', action='store_true', help='Inicia el applet de bandeja') 156 | args = parser.parse_args(argv[1:]) 157 | config = { 158 | 'cid': args.cid, 159 | 'system': args.system, 160 | 'model': args.model, 161 | 'continue_last': args.continue_last, 162 | 'template': args.template, 163 | 'params': args.param, 164 | 'options': args.option, 165 | 'fragments': args.fragment, 166 | 'benchmark_startup': args.benchmark_startup, 167 | 'start_time': start_time, 168 | 'applet': args.applet 169 | } 170 | return config 171 | 172 | def main(argv=None): 173 | """ 174 | Punto de entrada principal 175 | """ 176 | if argv is None: 177 | argv = sys.argv 178 | config = parse_args(argv) 179 | 180 | if config.get('applet'): 181 | # This process is designated to be the applet. 182 | # launch_tray_applet will call tray_applet.main(), 183 | # which in turn calls ensure_single_instance("gtk_llm_applet"). 184 | launch_tray_applet(config) 185 | return 0 # The applet process should exit here after launch_tray_applet returns. 186 | else: 187 | # This is a GUI process. 188 | # GUI processes do not manage a single_instance lock for themselves. 189 | # Only the initial GUI launch (no --cid) should spawn the applet. 190 | if not config.get('cid'): 191 | fork_or_spawn_applet(config) 192 | 193 | # Lanzar la aplicación principal 194 | # This part is reached by initial GUI instances and fallback GUI instances (with --cid). 195 | # Importamos chat_application aquí para evitar cargar GTK4 innecesariamente si solo se lanza el applet 196 | try: 197 | from .chat_application import LLMChatApplication 198 | except ImportError: 199 | # Fallback para script directo 200 | from chat_application import LLMChatApplication 201 | 202 | chat_app = LLMChatApplication(config) 203 | cmd_args = [] 204 | if config.get('cid'): 205 | cmd_args.append(f"--cid={config['cid']}") 206 | if config.get('model'): 207 | cmd_args.append(f"--model={config['model']}") 208 | if config.get('template'): 209 | cmd_args.append(f"--template={config['template']}") 210 | return chat_app.run(cmd_args) 211 | 212 | if __name__ == "__main__": 213 | result = main() 214 | sys.exit(result) 215 | -------------------------------------------------------------------------------- /gtk_llm_chat/python313_compatibility.py: -------------------------------------------------------------------------------- 1 | """ 2 | python313_compatibility.py - Parches de compatibilidad para Python 3.13.3 3 | Soluciona problemas de compatibilidad con plugins LLM que usan NumPy/SciPy 4 | 5 | Este módulo implementa parches específicos para resolver el error: 6 | "argument docstring of add_docstring should be a str" 7 | 8 | El problema surge porque Python 3.13.3 cambió el comportamiento de add_docstring, 9 | pero las extensiones C de NumPy/SciPy en los plugins LLM no fueron actualizadas. 10 | """ 11 | 12 | import sys 13 | import os 14 | from functools import wraps 15 | import warnings 16 | 17 | DEBUG = os.environ.get('DEBUG') or False 18 | 19 | def debug_print(*args, **kwargs): 20 | if DEBUG: 21 | print("[Python313Compat]", *args, **kwargs) 22 | 23 | def patch_add_docstring(): 24 | """ 25 | Parcha la función add_docstring para ser compatible con Python 3.13.3. 26 | 27 | El problema: En Python 3.13.3, add_docstring requiere que el parámetro 28 | docstring sea estrictamente un str, pero algunas extensiones C pasan 29 | otros tipos (como bytes o None). 30 | 31 | La solución: Interceptamos llamadas a add_docstring y convertimos 32 | automáticamente el parámetro docstring a string si es necesario. 33 | """ 34 | try: 35 | # Intentar parchear en numpy si está disponible 36 | import numpy as np 37 | 38 | # Guardar la función original 39 | if hasattr(np, 'add_docstring') and not hasattr(np.add_docstring, '_python313_patched'): 40 | original_add_docstring = np.add_docstring 41 | 42 | @wraps(original_add_docstring) 43 | def patched_add_docstring(obj, docstring, warn_on_python=True): 44 | """ 45 | Versión parchada de numpy.add_docstring compatible con Python 3.13.3 46 | """ 47 | # Convertir docstring a str si no lo es 48 | if docstring is not None and not isinstance(docstring, str): 49 | if isinstance(docstring, bytes): 50 | docstring = docstring.decode('utf-8', errors='replace') 51 | else: 52 | docstring = str(docstring) 53 | 54 | # Asegurar que docstring sea una cadena válida 55 | if docstring is None: 56 | docstring = "" 57 | 58 | try: 59 | return original_add_docstring(obj, docstring, warn_on_python) 60 | except TypeError as e: 61 | if "should be a str" in str(e): 62 | debug_print(f"Interceptado error add_docstring, forzando conversión: {e}") 63 | # Si aún falla, usar cadena vacía 64 | return original_add_docstring(obj, "", warn_on_python) 65 | raise 66 | 67 | # Marcar como parchado 68 | patched_add_docstring._python313_patched = True 69 | np.add_docstring = patched_add_docstring 70 | debug_print("[OK] Parche numpy.add_docstring aplicado exitosamente") 71 | return True 72 | 73 | except ImportError: 74 | debug_print("NumPy no está disponible, omitiendo parche add_docstring") 75 | return False 76 | except Exception as e: 77 | debug_print(f"Error aplicando parche add_docstring: {e}") 78 | return False 79 | 80 | return False 81 | 82 | def patch_scipy_extensions(): 83 | """ 84 | Parcha extensiones específicas de SciPy que pueden causar problemas 85 | """ 86 | try: 87 | # Intentar parchear problemas conocidos de SciPy 88 | import scipy 89 | debug_print("[OK] SciPy detectado, aplicando parches preventivos") 90 | 91 | # Parche para scipy.special si está disponible 92 | try: 93 | import scipy.special 94 | debug_print("[OK] scipy.special importado correctamente") 95 | except Exception as e: 96 | if "add_docstring" in str(e): 97 | debug_print(f"Problema con scipy.special detectado: {e}") 98 | # Crear un módulo stub básico si es necesario 99 | pass 100 | 101 | return True 102 | 103 | except ImportError: 104 | debug_print("SciPy no está disponible, omitiendo parches") 105 | return False 106 | except Exception as e: 107 | debug_print(f"Error aplicando parches de SciPy: {e}") 108 | return False 109 | 110 | def patch_llm_plugin_imports(): 111 | """ 112 | Parcha la importación de plugins LLM específicos que fallan con Python 3.13.3 113 | """ 114 | known_problematic_plugins = [ 115 | 'llm_groq', 'llm_gemini', 'llm_openrouter', 116 | 'llm_perplexity', 'llm_anthropic', 'llm_deepseek', 'llm_grok' 117 | ] 118 | 119 | patched_count = 0 120 | 121 | for plugin_name in known_problematic_plugins: 122 | try: 123 | # Intentar importar el plugin 124 | plugin = __import__(plugin_name) 125 | debug_print(f"[OK] Plugin {plugin_name} importado correctamente") 126 | patched_count += 1 127 | 128 | except Exception as e: 129 | if "add_docstring" in str(e): 130 | debug_print(f"Plugin {plugin_name} falló con error add_docstring: {e}") 131 | 132 | # Estrategia de recuperación: intentar reimportar después de parches 133 | try: 134 | # Aplicar parches adicionales si es necesario 135 | patch_add_docstring() 136 | 137 | # Intentar importar nuevamente 138 | if plugin_name in sys.modules: 139 | del sys.modules[plugin_name] 140 | 141 | plugin = __import__(plugin_name) 142 | debug_print(f"OK Plugin {plugin_name} recuperado exitosamente") 143 | patched_count += 1 144 | 145 | except Exception as retry_e: 146 | debug_print(f"Fail: Plugin {plugin_name} no pudo ser recuperado: {retry_e}") 147 | else: 148 | debug_print(f"Fail: Plugin {plugin_name} falló por otra razón: {e}") 149 | 150 | debug_print(f"Plugins LLM procesados exitosamente: {patched_count}/{len(known_problematic_plugins)}") 151 | return patched_count > 0 152 | 153 | def create_safe_llm_wrapper(): 154 | """ 155 | Crea un wrapper seguro para el módulo LLM que maneja errores de plugins 156 | """ 157 | try: 158 | import llm 159 | 160 | # Verificar si necesitamos crear wrapper 161 | original_get_models = llm.get_models 162 | 163 | def safe_get_models(): 164 | """Versión segura de get_models que maneja errores de plugins""" 165 | try: 166 | return list(original_get_models()) 167 | except Exception as e: 168 | if "add_docstring" in str(e): 169 | debug_print(f"Error en get_models interceptado: {e}") 170 | debug_print("Intentando solución alternativa...") 171 | 172 | # Aplicar parches y reintentar 173 | patch_add_docstring() 174 | patch_scipy_extensions() 175 | 176 | try: 177 | return list(original_get_models()) 178 | except: 179 | debug_print("Devolviendo lista vacía como fallback") 180 | return [] 181 | raise 182 | 183 | # Solo reemplazar si no está ya parchado 184 | if not hasattr(llm.get_models, '_python313_patched'): 185 | llm.get_models = safe_get_models 186 | llm.get_models._python313_patched = True 187 | debug_print("OK Wrapper seguro LLM.get_models aplicado") 188 | 189 | return True 190 | 191 | except ImportError: 192 | debug_print("LLM no está disponible para wrapper") 193 | return False 194 | except Exception as e: 195 | debug_print(f"Error creando wrapper LLM: {e}") 196 | return False 197 | 198 | def monkey_patch_c_extensions(): 199 | """ 200 | Aplica monkey patches a nivel bajo para extensiones C problemáticas 201 | """ 202 | try: 203 | # Parche a nivel de importación para interceptar errores 204 | original_import = __builtins__.__import__ 205 | 206 | def safe_import(name, globals=None, locals=None, fromlist=(), level=0): 207 | """Importación segura que intercepta errores de add_docstring""" 208 | try: 209 | return original_import(name, globals, locals, fromlist, level) 210 | except Exception as e: 211 | if "add_docstring" in str(e) and any(pkg in name for pkg in ['numpy', 'scipy', 'llm']): 212 | debug_print(f"Error de importación interceptado para {name}: {e}") 213 | 214 | # Aplicar parches antes de reintentar 215 | patch_add_docstring() 216 | 217 | try: 218 | return original_import(name, globals, locals, fromlist, level) 219 | except: 220 | debug_print(f"Importación de {name} falló completamente") 221 | raise 222 | raise 223 | 224 | # Solo aplicar el parche si Python 3.13+ y no está ya aplicado 225 | if sys.version_info >= (3, 13) and not hasattr(__builtins__.__import__, '_python313_patched'): 226 | __builtins__.__import__ = safe_import 227 | __builtins__.__import__._python313_patched = True 228 | debug_print("OK Monkey patch de importación aplicado") 229 | return True 230 | 231 | except Exception as e: 232 | debug_print(f"Error aplicando monkey patch: {e}") 233 | return False 234 | 235 | return False 236 | 237 | def apply_all_patches(): 238 | """ 239 | Aplica todos los parches de compatibilidad para Python 3.13.3 240 | 241 | Returns: 242 | dict: Resultados de cada parche aplicado 243 | """ 244 | if sys.version_info < (3, 13): 245 | debug_print("Python < 3.13 detectado, omitiendo parches de compatibilidad") 246 | return {"skipped": True, "reason": "python_version"} 247 | 248 | debug_print(f"Python {sys.version} detectado, aplicando parches de compatibilidad...") 249 | 250 | results = {} 251 | 252 | # Aplicar parches en orden de importancia 253 | try: 254 | results['monkey_patch'] = monkey_patch_c_extensions() 255 | results['add_docstring'] = patch_add_docstring() 256 | results['scipy'] = patch_scipy_extensions() 257 | results['llm_wrapper'] = create_safe_llm_wrapper() 258 | results['llm_plugins'] = patch_llm_plugin_imports() 259 | 260 | success_count = sum(1 for v in results.values() if v) 261 | total_count = len(results) 262 | 263 | debug_print(f"Parches aplicados: {success_count}/{total_count}") 264 | 265 | if success_count > 0: 266 | debug_print("OK Sistema de compatibilidad Python 3.13.3 activado") 267 | else: 268 | debug_print("⚠ Ningún parche fue aplicado exitosamente") 269 | 270 | except Exception as e: 271 | debug_print(f"Error aplicando parches: {e}") 272 | results['error'] = str(e) 273 | 274 | return results 275 | 276 | def is_python313_compatible(): 277 | """ 278 | Verifica si el sistema actual es compatible con los parches 279 | 280 | Returns: 281 | bool: True si es compatible y los parches pueden aplicarse 282 | """ 283 | return ( 284 | sys.version_info >= (3, 13) and 285 | hasattr(sys, 'frozen') and 286 | getattr(sys, 'frozen', False) 287 | ) 288 | 289 | # Auto-aplicar parches si estamos en un entorno congelado con Python 3.13+ 290 | if is_python313_compatible() and DEBUG: 291 | debug_print("Aplicando parches de compatibilidad automáticamente...") 292 | apply_all_patches() 293 | -------------------------------------------------------------------------------- /po/zh/LC_MESSAGES/gtk-llm-chat.po: -------------------------------------------------------------------------------- 1 | # Chinese (Simplified) translations for gtk-llm-chat package. 2 | # Copyright (C) 2025 THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the gtk-llm-chat package. 4 | # Sebastian Silva , 2025. 5 | # 6 | #, fuzzy 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: gtk-llm-chat 0.1\n" 10 | "Report-Msgid-Bugs-To: your@email.com\n" 11 | "POT-Creation-Date: 2025-06-04 18:35-0500\n" 12 | "PO-Revision-Date: 2025-06-04 18:55-0500\n" 13 | "Last-Translator: Sebastian Silva \n" 14 | "Language-Team: Chinese (Simplified) \n" 15 | "Language: zh_CN\n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Plural-Forms: nplurals=1; plural=0;\n" 20 | 21 | #: gtk_llm_chat/chat_window.py:135 22 | msgid "Model Settings" 23 | msgstr "模型设置" 24 | 25 | #: gtk_llm_chat/chat_window.py:141 26 | msgid "Rename" 27 | msgstr "重命名" 28 | 29 | #: gtk_llm_chat/chat_window.py:211 30 | msgid "Send" 31 | msgstr "发送" 32 | 33 | #: gtk_llm_chat/chat_sidebar.py:65 gtk_llm_chat/chat_sidebar.py:278 34 | #: gtk_llm_chat/wide_model_selector.py:454 35 | msgid "Model" 36 | msgstr "模型" 37 | 38 | #: gtk_llm_chat/chat_sidebar.py:69 39 | msgid "Change Model" 40 | msgstr "更改模型" 41 | 42 | #: gtk_llm_chat/chat_sidebar.py:70 gtk_llm_chat/chat_sidebar.py:312 43 | #: gtk_llm_chat/chat_sidebar.py:432 gtk_llm_chat/wide_model_selector.py:516 44 | msgid "Provider" 45 | msgstr "提供者" 46 | 47 | #: gtk_llm_chat/chat_sidebar.py:79 48 | msgid "Set as Default Model" 49 | msgstr "设为默认模型" 50 | 51 | #: gtk_llm_chat/chat_sidebar.py:88 gtk_llm_chat/chat_sidebar.py:138 52 | msgid "Model Parameters" 53 | msgstr "模型参数" 54 | 55 | #: gtk_llm_chat/chat_sidebar.py:97 56 | msgid "Conversation" 57 | msgstr "对话" 58 | 59 | #: gtk_llm_chat/chat_sidebar.py:100 gtk_llm_chat/chat_application.py:417 60 | msgid "Delete Conversation" 61 | msgstr "删除对话" 62 | 63 | #: gtk_llm_chat/chat_sidebar.py:110 64 | msgid "Information" 65 | msgstr "信息" 66 | 67 | #: gtk_llm_chat/chat_sidebar.py:112 68 | msgid "About" 69 | msgstr "关于" 70 | 71 | #: gtk_llm_chat/chat_sidebar.py:121 72 | msgid "Actions" 73 | msgstr "操作" 74 | 75 | #: gtk_llm_chat/chat_sidebar.py:129 76 | msgid "Model Selector" 77 | msgstr "模型选择器" 78 | 79 | #: gtk_llm_chat/chat_sidebar.py:145 80 | msgid "Temperature" 81 | msgstr "温度" 82 | 83 | #: gtk_llm_chat/chat_sidebar.py:158 84 | msgid "System Prompt" 85 | msgstr "系统提示" 86 | 87 | #: gtk_llm_chat/chat_sidebar.py:165 88 | msgid "Parameters" 89 | msgstr "参数" 90 | 91 | #: gtk_llm_chat/chat_sidebar.py:241 gtk_llm_chat/chat_sidebar.py:308 92 | #: gtk_llm_chat/chat_sidebar.py:310 gtk_llm_chat/chat_sidebar.py:432 93 | #: gtk_llm_chat/model_selection.py:129 94 | msgid "Unknown Provider" 95 | msgstr "未知提供者" 96 | 97 | #: gtk_llm_chat/chat_sidebar.py:247 98 | msgid "Set Default Model" 99 | msgstr "设置默认模型" 100 | 101 | #: gtk_llm_chat/chat_sidebar.py:248 102 | msgid "Do you want to set" 103 | msgstr "您想将模型" 104 | 105 | #: gtk_llm_chat/chat_sidebar.py:248 106 | msgid "from" 107 | msgstr "(来自提供者:" 108 | 109 | #: gtk_llm_chat/chat_sidebar.py:248 110 | msgid "as the default model for new conversations?" 111 | msgstr ")设置成新对话的默认模型吗?" 112 | 113 | #: gtk_llm_chat/chat_sidebar.py:251 gtk_llm_chat/chat_sidebar.py:330 114 | #: gtk_llm_chat/model_selector.py:291 gtk_llm_chat/chat_application.py:420 115 | #: gtk_llm_chat/wide_model_selector.py:310 116 | msgid "Cancel" 117 | msgstr "取消" 118 | 119 | #: gtk_llm_chat/chat_sidebar.py:252 120 | msgid "Set as Default" 121 | msgstr "设为默认" 122 | 123 | #: gtk_llm_chat/chat_sidebar.py:278 124 | msgid "set as default" 125 | msgstr "已设为默认" 126 | 127 | #: gtk_llm_chat/chat_sidebar.py:327 128 | msgid "Set System Prompt" 129 | msgstr "设置系统提示" 130 | 131 | #: gtk_llm_chat/chat_sidebar.py:328 132 | msgid "Enter the system prompt for the AI model:" 133 | msgstr "请输入 AI 模型的系统提示:" 134 | 135 | #: gtk_llm_chat/chat_sidebar.py:331 136 | msgid "Set" 137 | msgstr "设置" 138 | 139 | #: gtk_llm_chat/chat_sidebar.py:381 140 | msgid "Current" 141 | msgstr "当前" 142 | 143 | #: gtk_llm_chat/chat_sidebar.py:383 144 | msgid "Not set" 145 | msgstr "未设置" 146 | 147 | #: gtk_llm_chat/chat_sidebar.py:413 148 | msgid "This is the current default model" 149 | msgstr "这是当前的默认模型" 150 | 151 | #: gtk_llm_chat/chat_sidebar.py:417 152 | msgid "Set as default model" 153 | msgstr "设为默认模型" 154 | 155 | #: gtk_llm_chat/model_selector.py:46 156 | msgid "Providers" 157 | msgstr "提供者" 158 | 159 | #: gtk_llm_chat/model_selector.py:50 160 | msgid "Models" 161 | msgstr "模型" 162 | 163 | #: gtk_llm_chat/model_selector.py:68 164 | msgid "Select Provider" 165 | msgstr "选择提供者" 166 | 167 | #: gtk_llm_chat/model_selector.py:95 168 | msgid "Select Model" 169 | msgstr "选择模型" 170 | 171 | #: gtk_llm_chat/model_selector.py:124 172 | msgid "No models found" 173 | msgstr "未找到模型" 174 | 175 | #: gtk_llm_chat/model_selector.py:145 176 | msgid "models" 177 | msgstr "模型" 178 | 179 | #: gtk_llm_chat/model_selector.py:150 180 | msgid "API key required" 181 | msgstr "需要 API 密钥" 182 | 183 | #: gtk_llm_chat/model_selector.py:152 184 | msgid "No models" 185 | msgstr "无模型" 186 | 187 | #: gtk_llm_chat/model_selector.py:210 gtk_llm_chat/model_selector.py:335 188 | msgid "API Key is configured" 189 | msgstr "API 密钥已配置" 190 | 191 | #: gtk_llm_chat/model_selector.py:211 gtk_llm_chat/model_selector.py:336 192 | msgid "Change Key" 193 | msgstr "更改密钥" 194 | 195 | #: gtk_llm_chat/model_selector.py:216 gtk_llm_chat/model_selector.py:340 196 | msgid "API Key Required" 197 | msgstr "需要 API 密钥" 198 | 199 | #: gtk_llm_chat/model_selector.py:217 gtk_llm_chat/model_selector.py:292 200 | #: gtk_llm_chat/model_selector.py:341 gtk_llm_chat/wide_model_selector.py:311 201 | msgid "Set Key" 202 | msgstr "设置密钥" 203 | 204 | #: gtk_llm_chat/model_selector.py:235 205 | msgid "No models available" 206 | msgstr "无可用模型" 207 | 208 | #: gtk_llm_chat/model_selector.py:236 209 | msgid "Configure an API key to access models from this provider" 210 | msgstr "配置 API 密钥以访问此提供者的模型" 211 | 212 | #: gtk_llm_chat/model_selector.py:241 gtk_llm_chat/wide_model_selector.py:251 213 | msgid "No models found for this provider" 214 | msgstr "此提供者未找到模型" 215 | 216 | #: gtk_llm_chat/model_selector.py:288 gtk_llm_chat/wide_model_selector.py:307 217 | msgid "Enter API Key" 218 | msgstr "输入 API 密钥" 219 | 220 | #: gtk_llm_chat/model_selector.py:289 gtk_llm_chat/wide_model_selector.py:308 221 | msgid "Enter the API key for" 222 | msgstr "请输入 API 密钥:" 223 | 224 | #: gtk_llm_chat/model_selector.py:298 gtk_llm_chat/wide_model_selector.py:317 225 | msgid "Paste your API key here" 226 | msgstr "请在此粘贴您的 API 密钥" 227 | 228 | #: gtk_llm_chat/model_selector.py:356 229 | msgid "Model Selector Test" 230 | msgstr "模型选择器测试" 231 | 232 | #: gtk_llm_chat/tray_applet.py:103 gtk_llm_chat/tray_applet.py:239 233 | #: gtk_llm_chat/welcome.py:59 gtk_llm_chat/llm_client.py:22 234 | msgid "New Conversation" 235 | msgstr "新的对话" 236 | 237 | #: gtk_llm_chat/tray_applet.py:107 gtk_llm_chat/tray_applet.py:241 238 | msgid "Quit" 239 | msgstr "退出" 240 | 241 | #: gtk_llm_chat/tray_applet.py:210 gtk_llm_chat/chat_application.py:68 242 | msgid "" 243 | "\n" 244 | "Closing application..." 245 | msgstr "" 246 | "\n" 247 | "正在关闭应用程序..." 248 | 249 | #: gtk_llm_chat/tray_applet.py:225 250 | msgid "LLM Conversations" 251 | msgstr "LLM 对话" 252 | 253 | #: gtk_llm_chat/welcome.py:20 254 | msgid "Tray applet" 255 | msgstr "托盘应用" 256 | 257 | #: gtk_llm_chat/welcome.py:20 258 | msgid "Default Model" 259 | msgstr "默认模型" 260 | 261 | #: gtk_llm_chat/welcome.py:53 262 | msgid "Next" 263 | msgstr "下一步" 264 | 265 | #: gtk_llm_chat/welcome.py:117 266 | msgid "Own the conversation." 267 | msgstr "掌控对话。" 268 | 269 | #: gtk_llm_chat/welcome.py:118 270 | msgid "Use any model you want. Your conversations are stored locally." 271 | msgstr "使用任何您想要的模型。您的对话存储在本地。" 272 | 273 | #: gtk_llm_chat/welcome.py:119 274 | msgid "This wizard will guide you through the initial setup" 275 | msgstr "此向导将引导您完成初始设置" 276 | 277 | #: gtk_llm_chat/welcome.py:125 278 | msgid "Start" 279 | msgstr "开始" 280 | 281 | #: gtk_llm_chat/welcome.py:157 282 | msgid "Access conversations from the convenience of your system tray" 283 | msgstr "从系统托盘便捷访问对话" 284 | 285 | #: gtk_llm_chat/welcome.py:163 286 | msgid "Would you like to start the applet with your session?" 287 | msgstr "您想随会话启动此应用吗?" 288 | 289 | #: gtk_llm_chat/welcome.py:173 290 | msgid "Yes, with my session" 291 | msgstr "是的,随会话启动" 292 | 293 | #: gtk_llm_chat/welcome.py:174 294 | msgid "No, only when I start the app" 295 | msgstr "不,仅当我启动应用时" 296 | 297 | #: gtk_llm_chat/welcome.py:209 298 | msgid "Loading model selection..." 299 | msgstr "正在加载模型选择..." 300 | 301 | #: gtk_llm_chat/welcome.py:219 302 | msgid "Ready to start!" 303 | msgstr "准备开始!" 304 | 305 | #: gtk_llm_chat/welcome.py:429 306 | msgid "Set API Key" 307 | msgstr "设置 API 密钥" 308 | 309 | #: gtk_llm_chat/welcome.py:429 310 | msgid "Change API Key" 311 | msgstr "更改 API 密钥" 312 | 313 | #: gtk_llm_chat/chat_application.py:193 314 | msgid "Error: _version.py not found" 315 | msgstr "错误:找不到 _version.py" 316 | 317 | #: gtk_llm_chat/chat_application.py:418 318 | msgid "Are you sure you want to delete the conversation?" 319 | msgstr "你确定要删除对话吗?" 320 | 321 | #: gtk_llm_chat/chat_application.py:421 322 | msgid "Delete" 323 | msgstr "删除" 324 | 325 | #: gtk_llm_chat/chat_application.py:443 326 | msgid "Gtk LLM Chat" 327 | msgstr "Gtk LLM 聊天" 328 | 329 | #: gtk_llm_chat/chat_application.py:446 330 | msgid "A frontend for LLM" 331 | msgstr "LLM 的前端" 332 | 333 | #: gtk_llm_chat/wide_model_selector.py:92 334 | msgid "" 335 | "Please select a provider from the list on the left.\n" 336 | "Then, choose a model from the list that appears here." 337 | msgstr "请从左侧列表中选择一个提供商。\n然后,从这里出现的列表中选择一个模型。" 338 | 339 | #: gtk_llm_chat/wide_model_selector.py:127 340 | msgid "Most AI models require an API key" 341 | msgstr "大多数 AI 模型需要 API 密钥" 342 | 343 | #: gtk_llm_chat/wide_model_selector.py:134 344 | msgid "" 345 | "You'll need to register with each provider to obtain these authentication " 346 | "tokens." 347 | msgstr "您需要向每个提供者注册以获取这些身份验证令牌。" 348 | 349 | #: gtk_llm_chat/wide_model_selector.py:147 350 | msgid "No Selection" 351 | msgstr "未选择" 352 | 353 | #: gtk_llm_chat/wide_model_selector.py:162 354 | msgid "No models or providers found." 355 | msgstr "未找到模型或提供者。" 356 | 357 | #: gtk_llm_chat/wide_model_selector.py:163 358 | msgid "Error" 359 | msgstr "错误" 360 | 361 | #: gtk_llm_chat/wide_model_selector.py:442 362 | msgid "Model information not available" 363 | msgstr "模型信息不可用" 364 | 365 | #: gtk_llm_chat/wide_model_selector.py:443 366 | msgid "Unable to retrieve model details" 367 | msgstr "无法检索模型详细信息" 368 | 369 | #: gtk_llm_chat/wide_model_selector.py:467 370 | msgid "Aliases" 371 | msgstr "别名" 372 | 373 | #: gtk_llm_chat/wide_model_selector.py:478 374 | msgid "API Key" 375 | msgstr "API 密钥" 376 | 377 | #: gtk_llm_chat/wide_model_selector.py:487 378 | msgid "Required • Set" 379 | msgstr "必需 • 已设置" 380 | 381 | #: gtk_llm_chat/wide_model_selector.py:493 382 | msgid "Required • Not set" 383 | msgstr "必需 • 未设置" 384 | 385 | #: gtk_llm_chat/wide_model_selector.py:499 386 | msgid "Not required" 387 | msgstr "非必需" 388 | 389 | #: gtk_llm_chat/wide_model_selector.py:510 390 | #: gtk_llm_chat/wide_model_selector.py:511 391 | msgid "Unknown" 392 | msgstr "未知" 393 | 394 | #: gtk_llm_chat/wide_model_selector.py:512 395 | msgid "Plugin" 396 | msgstr "插件" 397 | 398 | #: gtk_llm_chat/single_instance.py:26 gtk_llm_chat/single_instance.py:32 399 | msgid "Another instance is already running." 400 | msgstr "另一个实例已在运行。" 401 | 402 | #: gtk_llm_chat/llm_client.py:234 403 | msgid "LLMClient: Ignoring invalid temperature:" 404 | msgstr "LLMClient:忽略无效温度:" 405 | 406 | #: gtk_llm_chat/llm_client.py:269 407 | msgid "LLMClient: Starting stream processing..." 408 | msgstr "LLMClient:开始流处理..." 409 | 410 | #: gtk_llm_chat/llm_client.py:272 411 | msgid "LLMClient: Stream processing cancelled externally." 412 | msgstr "LLMClient:流处理已在外部取消。" 413 | 414 | #: gtk_llm_chat/llm_client.py:278 415 | msgid "LLMClient: Stream finished normally." 416 | msgstr "LLMClient:流已正常完成。" 417 | 418 | #: gtk_llm_chat/model_selection.py:119 gtk_llm_chat/model_selection.py:125 419 | msgid "Local/Other" 420 | msgstr "本地/其他" 421 | 422 | #~ msgid "Settings" 423 | #~ msgstr "设置" 424 | 425 | #~ msgid "Error reading keys file" 426 | #~ msgstr "读取密钥文件出错" 427 | 428 | #~ msgid "Check File" 429 | #~ msgstr "检查文件" 430 | 431 | #~ msgid "Error accessing keys file" 432 | #~ msgstr "访问密钥文件出错" 433 | 434 | #~ msgid "Check Permissions" 435 | #~ msgstr "检查权限" 436 | 437 | #~ msgid "LLMClient: Cancel request received." 438 | #~ msgstr "LLMClient:收到取消请求。" 439 | 440 | #~ msgid "LLMClient: Terminating active stream thread." 441 | #~ msgstr "LLMClient:正在终止活动流线程。" 442 | 443 | #~ msgid "LLMClient: No active stream thread to cancel." 444 | #~ msgstr "LLMClient:没有要取消的活动流线程。" 445 | 446 | #~ msgid "LLMClient: Error - Conversación no disponible para cargar historial." 447 | #~ msgstr "LLMClient:错误 - 无法加载历史记录,对话不可用。" 448 | 449 | #, fuzzy 450 | #~ msgid "LLMClient: Historial cargado. Total de respuestas en conversación: " 451 | #~ msgstr "LLMClient:历史记录已加载。对话中的总回复数:" 452 | 453 | #~ msgid "Exiting..." 454 | #~ msgstr "正在退出..." 455 | 456 | #~ msgid "LLM Chat" 457 | #~ msgstr "LLM 聊天" 458 | 459 | #~ msgid "Error: conversation_id is required to add to history." 460 | #~ msgstr "错误:需要 conversation_id 才能添加到历史记录。" 461 | 462 | #~ msgid "Error: conversation_id is required to create the conversation." 463 | #~ msgstr "错误:需要 conversation_id 才能创建对话。" 464 | 465 | #~ msgid "" 466 | #~ "LLMClient: Error - Attempting to load history with model initialization " 467 | #~ "error." 468 | #~ msgstr "LLMClient:错误 - 尝试使用模型初始化错误加载历史记录。" 469 | 470 | #~ msgid "" 471 | #~ "LLMClient: Error - Attempting to load history without initialized " 472 | #~ "conversation." 473 | #~ msgstr "LLMClient:错误 - 尝试在没有初始化对话的情况下加载历史记录。" 474 | 475 | #~ msgid "" 476 | #~ "LLMClient: Warning - Assistant response without previous user prompt in " 477 | #~ "history." 478 | #~ msgstr "LLMClient:警告 - 历史记录中没有先前用户提示的助理回复。" 479 | -------------------------------------------------------------------------------- /gtk_llm_chat/resource_manager.py: -------------------------------------------------------------------------------- 1 | """ 2 | resource_manager.py - Gestor centralizado de recursos para GTK LLM Chat 3 | 4 | Maneja rutas de imágenes, iconos y otros recursos de forma consistente 5 | tanto en entornos de desarrollo como congelados (PyInstaller). 6 | """ 7 | 8 | import os 9 | import sys 10 | from typing import Optional 11 | import gi 12 | gi.require_version('Gtk', '4.0') 13 | gi.require_version('GdkPixbuf', '2.0') 14 | gi.require_version('Gdk', '4.0') 15 | from gi.repository import Gtk, GdkPixbuf, Gio, Gdk, GLib 16 | from .debug_utils import debug_print 17 | 18 | 19 | class ResourceManager: 20 | """Gestor centralizado de recursos para la aplicación.""" 21 | 22 | def __init__(self): 23 | self._is_frozen = getattr(sys, 'frozen', False) 24 | # self._base_path se usa para desarrollo y PyInstaller. 25 | # Para Flatpak, la base de recursos principal es /app. 26 | if self._is_frozen and hasattr(sys, '_MEIPASS'): # PyInstaller 27 | self._base_path = sys._MEIPASS 28 | elif not self._is_frozen: # Desarrollo 29 | self._base_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 30 | else: # Frozen, pero no PyInstaller (podría ser Flatpak si no se maneja explícitamente o Nuitka, etc.) 31 | self._base_path = os.path.dirname(sys.executable) 32 | 33 | self._icon_theme_configured = False 34 | 35 | def _get_base_path(self) -> str: 36 | """Obtiene la ruta base de la aplicación según el entorno.""" 37 | # Esta función ahora es más simple, ya que la lógica principal está en __init__ 38 | # y la detección de Flatpak se hace donde se necesita. 39 | is_flatpak_env = os.environ.get('FLATPAK_ID') or os.path.exists('/.flatpak-info') 40 | if is_flatpak_env: 41 | return '/app' 42 | return self._base_path 43 | 44 | def get_image_path(self, relative_path: str) -> Optional[str]: 45 | """ 46 | Obtiene la ruta completa de una imagen. 47 | 48 | Args: 49 | relative_path: Ruta relativa desde la base del proyecto 50 | 51 | Returns: 52 | Ruta completa al archivo de imagen o None si no existe 53 | """ 54 | current_search_base = self._get_base_path() # Usa la ruta base correcta (ej. /app para flatpak) 55 | 56 | # Si la ruta relativa ya es absoluta (ej. /app/share/...), úsala directamente. 57 | if os.path.isabs(relative_path) and os.path.exists(relative_path): 58 | return relative_path 59 | 60 | possible_paths = [] 61 | 62 | # Ruta directa desde la base actual 63 | possible_paths.append(os.path.join(current_search_base, relative_path)) 64 | 65 | if self._is_frozen and not (os.environ.get('FLATPAK_ID') or os.path.exists('/.flatpak-info')): # PyInstaller 66 | # Lógica específica de PyInstaller si es necesario, por ejemplo, si los recursos están anidados 67 | if not os.path.isabs(relative_path) and not relative_path.startswith("gtk_llm_chat/"): 68 | possible_paths.append(os.path.join(current_search_base, "gtk_llm_chat", "hicolor", "48x48", "apps", relative_path)) 69 | 70 | for path in possible_paths: 71 | if os.path.exists(path): 72 | return path 73 | 74 | debug_print(f"Warning: Resource not found: {relative_path}") 75 | debug_print(f"Searched in: {possible_paths} (current_search_base: {current_search_base})") 76 | return None 77 | 78 | def get_icon_pixbuf(self, icon_path: str, size: int = 64) -> Optional[GdkPixbuf.Pixbuf]: 79 | """ 80 | Carga un icono como GdkPixbuf con el tamaño especificado. 81 | 82 | Args: 83 | icon_path: Ruta relativa al icono 84 | size: Tamaño del icono en pixels 85 | 86 | Returns: 87 | GdkPixbuf.Pixbuf o None si no se puede cargar 88 | """ 89 | full_path = self.get_image_path(icon_path) 90 | if not full_path: 91 | return None 92 | 93 | try: 94 | return GdkPixbuf.Pixbuf.new_from_file_at_scale( 95 | full_path, size, size, True 96 | ) 97 | except Exception as e: 98 | debug_print(f"Error loading icon {full_path}: {e}") 99 | return None 100 | 101 | def setup_icon_theme(self): 102 | """Configura el tema de iconos para incluir los iconos personalizados.""" 103 | # Thread-safe check y configuración 104 | if self._icon_theme_configured: 105 | return 106 | 107 | if not Gtk.is_initialized(): 108 | debug_print("[FAIL] GTK not initialized, skipping icon theme setup") 109 | return 110 | 111 | # Asegurar que esto solo se ejecute en el hilo principal de GTK 112 | try: 113 | from gi.repository import GLib 114 | if not GLib.main_context_default().is_owner(): 115 | debug_print("[WARN] setup_icon_theme called from non-main thread, scheduling for main thread") 116 | GLib.idle_add(self.setup_icon_theme) 117 | return 118 | except Exception as e: 119 | debug_print(f"[WARN] Could not check main thread context: {e}") 120 | 121 | try: 122 | display = Gdk.Display.get_default() 123 | if not display: 124 | debug_print("[FAIL] No default display available") 125 | return 126 | 127 | icon_theme = Gtk.IconTheme.get_for_display(display) 128 | 129 | is_flatpak_env = os.environ.get('FLATPAK_ID') or os.path.exists('/.flatpak-info') 130 | 131 | if is_flatpak_env: 132 | # En Flatpak, los iconos están en /app/share/icons/. 133 | # Gtk.IconTheme debería encontrarlos automáticamente debido a XDG_DATA_DIRS 134 | # y la caché de iconos actualizada. 135 | # Añadir /app/share/icons explícitamente es una medida de seguridad. 136 | flatpak_icon_share_dir = "/app/share/icons" 137 | if os.path.exists(flatpak_icon_share_dir): 138 | icon_theme.add_search_path(flatpak_icon_share_dir) 139 | debug_print(f"[OK] Added Flatpak icon search path: {flatpak_icon_share_dir}") 140 | else: 141 | debug_print(f"[WARN] Flatpak icon share dir not found: {flatpak_icon_share_dir}") 142 | 143 | elif self._is_frozen: # PyInstaller (no Flatpak) 144 | # self._base_path es sys._MEIPASS para PyInstaller 145 | # Asumimos que los iconos están en una carpeta 'hicolor' relativa a 'gtk_llm_chat' dentro de MEIPASS 146 | pyinstaller_icon_hicolor_path = os.path.join(self._base_path, "gtk_llm_chat", "hicolor") 147 | if os.path.exists(pyinstaller_icon_hicolor_path): 148 | icon_theme.add_search_path(pyinstaller_icon_hicolor_path) # Añadir el directorio hicolor 149 | debug_print(f"[OK] Added PyInstaller hicolor search path: {pyinstaller_icon_hicolor_path}") 150 | else: 151 | debug_print(f"[WARN] PyInstaller hicolor path not found: {pyinstaller_icon_hicolor_path}") 152 | # Considerar también una estructura de tema completo si existe 153 | custom_theme_dir = os.path.join(self._base_path, "gtk_llm_chat", "my_custom_theme") 154 | if os.path.exists(os.path.join(custom_theme_dir, "index.theme")): 155 | icon_theme.add_search_path(custom_theme_dir) 156 | debug_print(f"[OK] Added custom theme path for PyInstaller: {custom_theme_dir}") 157 | 158 | else: # Desarrollo 159 | # self._base_path es la raíz del proyecto 160 | dev_icon_hicolor_path = os.path.join(self._base_path, "gtk_llm_chat", "hicolor") 161 | if os.path.exists(dev_icon_hicolor_path): 162 | icon_theme.add_search_path(dev_icon_hicolor_path) # Añadir el directorio hicolor 163 | debug_print(f"[OK] Added development hicolor search path: {dev_icon_hicolor_path}") 164 | else: 165 | debug_print(f"[WARN] Development hicolor path not found: {dev_icon_hicolor_path}") 166 | 167 | self._icon_theme_configured = True 168 | debug_print("[OK] Icon theme configured successfully") 169 | 170 | except Exception as e: 171 | debug_print(f"[FAIL] Error configuring icon theme: {e}") 172 | 173 | def create_image_widget(self, image_path: str, size: int = -1) -> Gtk.Image: 174 | """ 175 | Crea un widget Gtk.Image desde una ruta de imagen. 176 | 177 | Args: 178 | image_path: Ruta relativa a la imagen 179 | size: Tamaño del icono (-1 para tamaño original) 180 | 181 | Returns: 182 | Widget Gtk.Image 183 | """ 184 | full_path = self.get_image_path(image_path) 185 | 186 | if full_path and os.path.exists(full_path): 187 | if size > 0: 188 | pixbuf = self.get_icon_pixbuf(image_path, size) 189 | if pixbuf: 190 | image = Gtk.Image.new_from_pixbuf(pixbuf) 191 | else: 192 | image = Gtk.Image.new_from_icon_name("image-missing") 193 | else: 194 | image = Gtk.Image.new_from_file(full_path) 195 | else: 196 | # Fallback a icono del sistema 197 | image = Gtk.Image.new_from_icon_name("image-missing") 198 | debug_print(f"Using fallback icon for: {image_path}") 199 | 200 | return image 201 | 202 | def set_widget_icon_name(self, widget, icon_name: str, fallback: str = "image-missing"): 203 | """ 204 | Establece el icono de un widget (que soporte set_icon_name) de forma segura. 205 | Si el icono no existe en el tema, usa un fallback y registra una advertencia. 206 | """ 207 | self.setup_icon_theme() 208 | display = Gdk.Display.get_default() 209 | icon_theme = Gtk.IconTheme.get_for_display(display) if display else None 210 | if icon_theme and icon_theme.has_icon(icon_name): 211 | widget.set_icon_name(icon_name) 212 | else: 213 | debug_print(f"[WARN] Icono '{icon_name}' no encontrado, usando fallback '{fallback}'") 214 | widget.set_icon_name(fallback) 215 | 216 | def create_icon_widget(self, icon_name: str, size: int = 48) -> Gtk.Image: 217 | """ 218 | Crea un widget Gtk.Image desde un nombre de icono. 219 | 220 | Args: 221 | icon_name: Nombre del icono (ej: "org.fuentelibre.gtk_llm_Chat") 222 | size: Tamaño del icono 223 | 224 | Returns: 225 | Widget Gtk.Image 226 | """ 227 | self.setup_icon_theme() 228 | display = Gdk.Display.get_default() 229 | icon_theme = Gtk.IconTheme.get_for_display(display) if display else None 230 | if icon_theme and icon_theme.has_icon(icon_name): 231 | image = Gtk.Image.new_from_icon_name(icon_name) 232 | image.set_pixel_size(size) 233 | return image 234 | # Intentar fallbacks de archivo si el icono no existe en el tema 235 | debug_print(f"[WARN] Icono '{icon_name}' no encontrado en el tema GTK. Intentando rutas alternativas...") 236 | current_search_base = self._get_base_path() 237 | is_flatpak_env = os.environ.get('FLATPAK_ID') or os.path.exists('/.flatpak-info') 238 | fallback_paths_to_try = [] 239 | if icon_name.endswith('-symbolic'): 240 | if is_flatpak_env: 241 | fallback_paths_to_try.append(f"share/icons/hicolor/symbolic/apps/{icon_name}.svg") 242 | else: 243 | fallback_paths_to_try.append(f"gtk_llm_chat/hicolor/symbolic/apps/{icon_name}.svg") 244 | else: 245 | if is_flatpak_env: 246 | fallback_paths_to_try.extend([ 247 | f"share/icons/hicolor/256x256/apps/{icon_name}.png", 248 | f"share/icons/hicolor/scalable/apps/{icon_name}.svg", 249 | f"share/icons/hicolor/48x48/apps/{icon_name}.png", 250 | ]) 251 | else: 252 | fallback_paths_to_try.extend([ 253 | f"gtk_llm_chat/hicolor/256x256/apps/{icon_name}.png", 254 | f"gtk_llm_chat/hicolor/scalable/apps/{icon_name}.svg", 255 | f"gtk_llm_chat/hicolor/48x48/apps/{icon_name}.png", 256 | ]) 257 | for fallback_path_str in fallback_paths_to_try: 258 | resolved_path = self.get_image_path(fallback_path_str) 259 | if resolved_path: 260 | debug_print(f"[OK] Fallback para '{icon_name}' encontrado: {resolved_path}") 261 | return self.create_image_widget(resolved_path, size) 262 | debug_print(f"[FAIL] Icono '{icon_name}' no encontrado en ningún lado. Usando 'image-missing'.") 263 | image = Gtk.Image.new_from_icon_name("image-missing") 264 | image.set_pixel_size(size) 265 | return image 266 | 267 | def debug_resources(self): 268 | """Imprime información de debug sobre la ubicación de recursos.""" 269 | print("=== RESOURCE MANAGER DEBUG ===") 270 | print(f"Frozen: {self._is_frozen}") 271 | print(f"Base path: {self._base_path}") 272 | 273 | if hasattr(sys, '_MEIPASS'): 274 | print(f"_MEIPASS: {sys._MEIPASS}") 275 | 276 | # Verificar recursos comunes 277 | test_resources = [ 278 | "gtk_llm_chat/hicolor/48x48/apps/org.fuentelibre.gtk_llm_Chat.png", 279 | "gtk_llm_chat/hicolor", 280 | ] 281 | 282 | for resource in test_resources: 283 | path = self.get_image_path(resource) 284 | exists = path and os.path.exists(path) 285 | debug_print(f"Resource {resource}: {'[OK]' if exists else '[FAIL]'} ({path})") 286 | 287 | debug_print("=== END RESOURCE DEBUG ===") 288 | 289 | 290 | # Instancia global del gestor de recursos 291 | resource_manager = ResourceManager() 292 | -------------------------------------------------------------------------------- /gtk_llm_chat/markdownview.py: -------------------------------------------------------------------------------- 1 | from markdown_it import MarkdownIt 2 | import re 3 | import gi 4 | gi.require_version('Gtk', '4.0') 5 | from gi.repository import Gtk, Pango # noqa: E402 6 | 7 | 8 | class MarkdownView(Gtk.TextView): 9 | def __init__(self): 10 | super().__init__() 11 | self.set_wrap_mode(Gtk.WrapMode.WORD_CHAR) 12 | self.set_editable(False) 13 | self.set_cursor_visible(False) 14 | self.buffer = self.get_buffer() 15 | self.md = MarkdownIt().enable('strikethrough') 16 | 17 | # Get the TextTagTable from the buffer 18 | self.tag_table = self.buffer.get_tag_table() # Corrected line 19 | 20 | # Create TextTags and add them to the table 21 | self.bold_tag = Gtk.TextTag(name="bold") 22 | self.bold_tag.set_property("weight", Pango.Weight.BOLD) 23 | self.tag_table.add(self.bold_tag) 24 | 25 | self.italic_tag = Gtk.TextTag(name="italic") 26 | self.italic_tag.set_property("style", Pango.Style.ITALIC) 27 | self.tag_table.add(self.italic_tag) 28 | 29 | self.strike_tag = Gtk.TextTag(name="strike") 30 | self.strike_tag.set_property("strikethrough", True) 31 | self.tag_table.add(self.strike_tag) 32 | 33 | self.hr_tag = Gtk.TextTag(name="hr_line") 34 | self.hr_tag.set_property("foreground", "#666666") 35 | self.hr_tag.set_property("scale", 0.3) 36 | self.hr_tag.set_property("rise", -500) 37 | self.hr_tag.set_property("justification", Gtk.Justification.CENTER) 38 | self.tag_table.add(self.hr_tag) 39 | 40 | self.heading_tags = { 41 | '1': Gtk.TextTag(name="h1"), 42 | '2': Gtk.TextTag(name="h2"), 43 | '3': Gtk.TextTag(name="h3"), 44 | '4': Gtk.TextTag(name="h4"), 45 | '5': Gtk.TextTag(name="h5"), 46 | } 47 | self.heading_tags['1'].set_property("weight", Pango.Weight.BOLD) 48 | self.heading_tags['1'].set_property("size", 24 * Pango.SCALE) 49 | self.heading_tags['2'].set_property("weight", Pango.Weight.BOLD) 50 | self.heading_tags['2'].set_property("size", 20 * Pango.SCALE) 51 | self.heading_tags['3'].set_property("weight", Pango.Weight.BOLD) 52 | self.heading_tags['3'].set_property("size", 16 * Pango.SCALE) 53 | self.heading_tags['4'].set_property("weight", Pango.Weight.BOLD) 54 | self.heading_tags['4'].set_property("size", 12 * Pango.SCALE) 55 | self.heading_tags['5'].set_property("weight", Pango.Weight.BOLD) 56 | self.heading_tags['5'].set_property("size", 10 * Pango.SCALE) 57 | for tag in self.heading_tags.values(): 58 | self.tag_table.add(tag) 59 | 60 | self.code_tag = Gtk.TextTag(name="code") 61 | self.code_tag.set_property("family", "monospace") 62 | self.code_tag.set_property("background", "gray") 63 | self.tag_table.add(self.code_tag) 64 | 65 | self.code_inline_tag = Gtk.TextTag(name="code_inline") 66 | self.code_inline_tag.set_property("family", "monospace") 67 | self.code_inline_tag.set_property("background", "#444444") 68 | self.tag_table.add(self.code_inline_tag) 69 | 70 | self.thinking_tag = Gtk.TextTag(name="thinking") 71 | self.thinking_tag.set_property("style", Pango.Style.ITALIC) 72 | self.thinking_tag.set_property("scale", 0.8) 73 | self.thinking_tag.set_property("left-margin", 20) 74 | self.thinking_tag.set_property("right-margin", 20) 75 | self.tag_table.add(self.thinking_tag) 76 | 77 | self.blockquote_tag = Gtk.TextTag(name="blockquote") 78 | self.blockquote_tag.set_property("left-margin", 30) 79 | self.blockquote_tag.set_property("style", Pango.Style.ITALIC) 80 | self.blockquote_tag.set_property("background", "gray") 81 | self.tag_table.add(self.blockquote_tag) 82 | 83 | self.list_tags = { 84 | 1: Gtk.TextTag(name="list_1"), 85 | 2: Gtk.TextTag(name="list_2"), 86 | 3: Gtk.TextTag(name="list_3"), 87 | } 88 | self.list_tags[1].set_property("left-margin", 30) 89 | self.list_tags[2].set_property("left-margin", 50) 90 | self.list_tags[3].set_property("left-margin", 70) 91 | for tag in self.list_tags.values(): 92 | self.tag_table.add(tag) 93 | 94 | self.in_list_item = False 95 | self.in_ordered_list = False 96 | self.current_tags = [] 97 | self.list_level = 0 98 | 99 | def set_markdown(self, text): 100 | return self.render_markdown(text) 101 | 102 | def process_thinking_tags(self, text): 103 | """ 104 | Procesa las etiquetas o en el texto. 105 | Devuelve una lista de fragmentos alternando texto normal y pensamiento. 106 | Cada fragmento es una tupla (texto, es_pensamiento). 107 | """ 108 | fragments = [] 109 | think_pattern = re.compile(r'(.*?)', re.DOTALL) 110 | thinking_pattern = re.compile(r'(.*?)', re.DOTALL) 111 | 112 | all_matches = [] 113 | for pattern in [think_pattern, thinking_pattern]: 114 | for match in pattern.finditer(text): 115 | all_matches.append( 116 | (match.start(), match.end(), match.group(1))) 117 | 118 | all_matches.sort(key=lambda x: x[0]) 119 | 120 | last_end = 0 121 | for start, end, content in all_matches: 122 | if start > last_end: 123 | fragments.append((text[last_end:start], False)) 124 | fragments.append((content, True)) 125 | last_end = end 126 | 127 | if last_end < len(text): 128 | fragments.append((text[last_end:], False)) 129 | 130 | return fragments 131 | 132 | def render_markdown(self, text): 133 | self.buffer.set_text("", 0) 134 | fragments = self.process_thinking_tags(text) 135 | 136 | for fragment_text, is_thinking in fragments: 137 | if is_thinking: 138 | self.insert_thinking(fragment_text) 139 | else: 140 | self.render_markdown_fragment(fragment_text) 141 | 142 | def render_markdown_fragment(self, text): 143 | tokens = self.md.parse(text) 144 | self.apply_pango_format(tokens) 145 | 146 | def apply_pango_format(self, tokens): 147 | for token in tokens: 148 | if token.type == 'strong_open': 149 | self.apply_tag(self.bold_tag) 150 | elif token.type == 'strong_close': 151 | self.remove_tag(self.bold_tag) 152 | elif token.type == 'em_open': 153 | self.apply_tag(self.italic_tag) 154 | elif token.type == 'em_close': 155 | self.remove_tag(self.italic_tag) 156 | elif token.type == 's_open': 157 | self.apply_tag(self.strike_tag) 158 | elif token.type == 's_close': 159 | self.remove_tag(self.strike_tag) 160 | 161 | elif token.type == 'text': 162 | self.insert_text(token.content) 163 | elif token.type == 'paragraph_open': 164 | pass 165 | elif token.type == 'paragraph_close': 166 | self.insert_text("\n\n") 167 | 168 | elif token.type == 'heading_open': 169 | level = token.tag[1] 170 | if level in self.heading_tags: 171 | self.apply_tag(self.heading_tags[level]) 172 | elif token.type == 'heading_close': 173 | level = token.tag[1] 174 | self.remove_tag(self.heading_tags[level]) 175 | self.insert_text("\n\n") 176 | elif token.type == 'fence': 177 | self.apply_tag(self.code_tag) 178 | self.insert_text(token.content) 179 | self.remove_tag(self.code_tag) 180 | self.insert_text("\n") 181 | elif token.type == 'inline': 182 | for child in token.children: 183 | if child.type == 'text': 184 | self.insert_text(child.content) 185 | elif child.type == 'em_open': 186 | self.apply_tag(self.italic_tag) 187 | elif child.type == 'em_close': 188 | self.remove_tag(self.italic_tag) 189 | elif child.type == 'strong_open': 190 | self.apply_tag(self.bold_tag) 191 | elif child.type == 'strong_close': 192 | self.remove_tag(self.bold_tag) 193 | elif child.type == 'code_inline': 194 | self.apply_tag(self.code_inline_tag) 195 | self.insert_text(child.content) 196 | self.remove_tag(self.code_inline_tag) 197 | # Manejar tachado en elementos inline 198 | elif child.type == 's_open': 199 | self.apply_tag(self.strike_tag) 200 | elif child.type == 's_close': 201 | self.remove_tag(self.strike_tag) 202 | elif token.type == 'blockquote_open': 203 | self.insert_text("\n") 204 | self.apply_tag(self.blockquote_tag) 205 | elif token.type == 'blockquote_close': 206 | self.remove_tag(self.blockquote_tag) 207 | self.insert_text("\n") 208 | elif token.type == 'bullet_list_open': 209 | self.list_level += 1 210 | self.apply_tag(self.list_tags[min(self.list_level, 3)]) 211 | elif token.type == 'bullet_list_close': 212 | self.list_level -= 1 213 | current_level = min(self.list_level + 1, 3) 214 | self.remove_tag(self.list_tags[current_level]) 215 | elif token.type == 'ordered_list_open': 216 | self.list_level += 1 217 | self.in_ordered_list = True 218 | self.apply_tag(self.list_tags[min(self.list_level, 3)]) 219 | elif token.type == 'ordered_list_close': 220 | self.list_level -= 1 221 | self.in_ordered_list = False 222 | current_level = min(self.list_level + 1, 3) 223 | self.remove_tag(self.list_tags[current_level]) 224 | elif token.type == 'list_item_open': 225 | self.in_list_item = True 226 | if self.in_ordered_list: 227 | item_number = token.info 228 | self.insert_text(f"{item_number}. ") 229 | else: 230 | if self.list_level == 1: 231 | self.insert_text("• ") 232 | elif self.list_level == 2: 233 | self.insert_text("◦ ") 234 | else: 235 | self.insert_text("▪ ") 236 | elif token.type == 'list_item_close': 237 | self.in_list_item = False 238 | elif token.type == 'hr': 239 | self.insert_text("\n") 240 | self.apply_tag(self.hr_tag) 241 | self.insert_text("-" * 35) 242 | self.remove_tag(self.hr_tag) 243 | self.insert_text("\n\n") 244 | elif token.type == 'html_block': 245 | pass 246 | elif token.type == 'code_block': 247 | self.insert_text("\n") 248 | self.insert_text(token.content) 249 | self.insert_text("\n") 250 | else: 251 | print("Unknown markdown token:", token.type, flush=True) 252 | 253 | 254 | def insert_text(self, text): 255 | buf = self.buffer 256 | buf.create_mark("insert_start", buf.get_end_iter(), left_gravity=True) 257 | buf.insert(buf.get_end_iter(), text, -1) 258 | start = buf.get_iter_at_mark(buf.get_mark("insert_start")) 259 | end = start.copy() 260 | end.forward_chars(len(text)) 261 | for tag in self.current_tags: 262 | buf.apply_tag(tag, start, end) 263 | buf.delete_mark(buf.get_mark("insert_start")) 264 | 265 | def insert_thinking(self, text): 266 | buf = self.buffer 267 | buf.create_mark("think_start", buf.get_end_iter(), left_gravity=True) 268 | buf.insert(buf.get_end_iter(), text, -1) 269 | start = buf.get_iter_at_mark(buf.get_mark("think_start")) 270 | end = start.copy() 271 | end.forward_chars(len(text)) 272 | buf.apply_tag(self.thinking_tag, start, end) 273 | buf.delete_mark(buf.get_mark("think_start")) 274 | buf.insert(buf.get_end_iter(), "\n", -1) 275 | 276 | def apply_tag(self, tag): 277 | if tag not in self.current_tags: 278 | self.current_tags.append(tag) 279 | start, end = self.buffer.get_bounds() 280 | if not start.equal(end): 281 | self.buffer.apply_tag(tag, end, self.buffer.get_end_iter()) 282 | 283 | def remove_tag(self, tag): 284 | if tag in self.current_tags: 285 | self.current_tags.remove(tag) 286 | start, end = self.buffer.get_bounds() 287 | self.buffer.remove_tag(tag, end, self.buffer.get_end_iter()) 288 | 289 | 290 | if __name__ == "__main__": 291 | app = Gtk.Application(application_id='org.fuentelibre.MarkdownDemo') 292 | 293 | def on_activate(app): 294 | win = Gtk.ApplicationWindow(application=app) 295 | win.set_title("Markdown TextView") 296 | win.set_default_size(400, 300) 297 | 298 | markdown_text = """# Título 1\n## Título 2\n### Título 3\nEste es un 299 | **texto en negrita** y _cursiva_. 300 | \n```\n" 301 | Este es un bloque de código.\n" 302 | var x = 10;\n" 303 | ```\n" 304 | \nLista de ejemplo:\n" 305 | * Elemento 1\n * Subelemento 1.1\n * Subelemento 1.2\n* Elemento 2 306 | * Elemento 3\n" 307 | \nLista numerada:\n" 308 | 1. Primer elemento\n" 309 | 2. Segundo elemento\n" 310 | 1. Subelemento 2.1\n" 311 | \nTexto con `código en línea` y emoji 😊\n" 312 | hola `amigo` 😊\n""" 313 | 314 | markdown_view = MarkdownView() 315 | scrolled_window = Gtk.ScrolledWindow() 316 | scrolled_window.set_child(markdown_view) 317 | win.set_child(scrolled_window) 318 | 319 | markdown_view.render_markdown(markdown_text) 320 | win.present() 321 | 322 | app.connect('activate', on_activate) 323 | app.run() 324 | -------------------------------------------------------------------------------- /po/pt/LC_MESSAGES/gtk-llm-chat.po: -------------------------------------------------------------------------------- 1 | # Portuguese translations for gtk-llm-chat package. 2 | # Copyright (C) 2025 THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the gtk-llm-chat package. 4 | # Sebastian Silva , 2025. 5 | # 6 | #, fuzzy 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: gtk-llm-chat 0.1\n" 10 | "Report-Msgid-Bugs-To: your@email.com\n" 11 | "POT-Creation-Date: 2025-06-04 18:35-0500\n" 12 | "PO-Revision-Date: 2025-06-04 18:50-0500\n" 13 | "Last-Translator: Sebastian Silva \n" 14 | "Language-Team: Portuguese \n" 15 | "Language: pt\n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Plural-Forms: nplurals=2; plural=(n != 1);\n" 20 | 21 | #: gtk_llm_chat/chat_window.py:135 22 | msgid "Model Settings" 23 | msgstr "Configurações do modelo" 24 | 25 | #: gtk_llm_chat/chat_window.py:141 26 | msgid "Rename" 27 | msgstr "Renomear" 28 | 29 | #: gtk_llm_chat/chat_window.py:211 30 | msgid "Send" 31 | msgstr "Enviar" 32 | 33 | #: gtk_llm_chat/chat_sidebar.py:65 gtk_llm_chat/chat_sidebar.py:278 34 | #: gtk_llm_chat/wide_model_selector.py:454 35 | msgid "Model" 36 | msgstr "Modelo" 37 | 38 | #: gtk_llm_chat/chat_sidebar.py:69 39 | msgid "Change Model" 40 | msgstr "Alterar modelo" 41 | 42 | #: gtk_llm_chat/chat_sidebar.py:70 gtk_llm_chat/chat_sidebar.py:312 43 | #: gtk_llm_chat/chat_sidebar.py:432 gtk_llm_chat/wide_model_selector.py:516 44 | msgid "Provider" 45 | msgstr "Provedor" 46 | 47 | #: gtk_llm_chat/chat_sidebar.py:79 48 | msgid "Set as Default Model" 49 | msgstr "Definir como modelo padrão" 50 | 51 | #: gtk_llm_chat/chat_sidebar.py:88 gtk_llm_chat/chat_sidebar.py:138 52 | msgid "Model Parameters" 53 | msgstr "Parâmetros do modelo" 54 | 55 | #: gtk_llm_chat/chat_sidebar.py:97 56 | #, fuzzy 57 | msgid "Conversation" 58 | msgstr "Conversa" 59 | 60 | #: gtk_llm_chat/chat_sidebar.py:100 gtk_llm_chat/chat_application.py:417 61 | msgid "Delete Conversation" 62 | msgstr "Apagar conversa" 63 | 64 | #: gtk_llm_chat/chat_sidebar.py:110 65 | msgid "Information" 66 | msgstr "Informação" 67 | 68 | #: gtk_llm_chat/chat_sidebar.py:112 69 | msgid "About" 70 | msgstr "Sobre" 71 | 72 | #: gtk_llm_chat/chat_sidebar.py:121 73 | msgid "Actions" 74 | msgstr "Ações" 75 | 76 | #: gtk_llm_chat/chat_sidebar.py:145 77 | msgid "Temperature" 78 | msgstr "Temperatura" 79 | 80 | #: gtk_llm_chat/chat_sidebar.py:158 81 | msgid "System Prompt" 82 | msgstr "Prompt do sistema" 83 | 84 | #: gtk_llm_chat/chat_sidebar.py:165 85 | msgid "Parameters" 86 | msgstr "Parâmetros" 87 | 88 | #: gtk_llm_chat/chat_sidebar.py:241 gtk_llm_chat/chat_sidebar.py:308 89 | #: gtk_llm_chat/chat_sidebar.py:310 gtk_llm_chat/chat_sidebar.py:432 90 | #: gtk_llm_chat/model_selection.py:129 91 | msgid "Unknown Provider" 92 | msgstr "Provedor desconhecido" 93 | 94 | #: gtk_llm_chat/chat_sidebar.py:247 95 | msgid "Set Default Model" 96 | msgstr "Definir modelo padrão" 97 | 98 | #: gtk_llm_chat/chat_sidebar.py:248 99 | msgid "Do you want to set" 100 | msgstr "Deseja definir" 101 | 102 | #: gtk_llm_chat/chat_sidebar.py:248 103 | msgid "from" 104 | msgstr "de" 105 | 106 | #: gtk_llm_chat/chat_sidebar.py:248 107 | msgid "as the default model for new conversations?" 108 | msgstr "como modelo padrão para novas conversas?" 109 | 110 | #: gtk_llm_chat/chat_sidebar.py:251 gtk_llm_chat/chat_sidebar.py:330 111 | #: gtk_llm_chat/model_selector.py:291 gtk_llm_chat/chat_application.py:420 112 | #: gtk_llm_chat/wide_model_selector.py:310 113 | msgid "Cancel" 114 | msgstr "Cancelar" 115 | 116 | #: gtk_llm_chat/chat_sidebar.py:252 117 | msgid "Set as Default" 118 | msgstr "Definir como padrão" 119 | 120 | #: gtk_llm_chat/chat_sidebar.py:278 121 | msgid "set as default" 122 | msgstr "definido como padrão" 123 | 124 | #: gtk_llm_chat/chat_sidebar.py:327 125 | msgid "Set System Prompt" 126 | msgstr "Definir prompt do sistema" 127 | 128 | #: gtk_llm_chat/chat_sidebar.py:328 129 | msgid "Enter the system prompt for the AI model:" 130 | msgstr "Digite o prompt do sistema para o modelo de IA:" 131 | 132 | #: gtk_llm_chat/chat_sidebar.py:331 133 | msgid "Set" 134 | msgstr "Definir" 135 | 136 | #: gtk_llm_chat/chat_sidebar.py:381 137 | msgid "Current" 138 | msgstr "Atual" 139 | 140 | #: gtk_llm_chat/chat_sidebar.py:383 141 | msgid "Not set" 142 | msgstr "Não definido" 143 | 144 | #: gtk_llm_chat/chat_sidebar.py:413 145 | msgid "This is the current default model" 146 | msgstr "Este é o modelo padrão atual" 147 | 148 | #: gtk_llm_chat/chat_sidebar.py:417 149 | msgid "Set as default model" 150 | msgstr "Definir como modelo padrão" 151 | 152 | #: gtk_llm_chat/model_selector.py:46 153 | msgid "Providers" 154 | msgstr "Provedores" 155 | 156 | #: gtk_llm_chat/model_selector.py:50 157 | msgid "Models" 158 | msgstr "Modelos" 159 | 160 | #: gtk_llm_chat/model_selector.py:68 161 | msgid "Select Provider" 162 | msgstr "Selecionar provedor" 163 | 164 | #: gtk_llm_chat/model_selector.py:95 165 | msgid "Select Model" 166 | msgstr "Selecionar modelo" 167 | 168 | #: gtk_llm_chat/model_selector.py:124 169 | msgid "No models found" 170 | msgstr "Nenhum modelo encontrado" 171 | 172 | #: gtk_llm_chat/model_selector.py:145 173 | msgid "models" 174 | msgstr "Modelos" 175 | 176 | #: gtk_llm_chat/model_selector.py:150 177 | msgid "API key required" 178 | msgstr "Chave API obrigatória" 179 | 180 | #: gtk_llm_chat/model_selector.py:152 181 | msgid "No models" 182 | msgstr "Nenhum modelo encontrado" 183 | 184 | #: gtk_llm_chat/model_selector.py:210 gtk_llm_chat/model_selector.py:335 185 | msgid "API Key is configured" 186 | msgstr "Chave API configurada" 187 | 188 | #: gtk_llm_chat/model_selector.py:211 gtk_llm_chat/model_selector.py:336 189 | msgid "Change Key" 190 | msgstr "Alterar chave" 191 | 192 | #: gtk_llm_chat/model_selector.py:216 gtk_llm_chat/model_selector.py:340 193 | msgid "API Key Required" 194 | msgstr "Chave API obrigatória" 195 | 196 | #: gtk_llm_chat/model_selector.py:217 gtk_llm_chat/model_selector.py:292 197 | #: gtk_llm_chat/model_selector.py:341 gtk_llm_chat/wide_model_selector.py:311 198 | msgid "Set Key" 199 | msgstr "Definir chave" 200 | 201 | #: gtk_llm_chat/model_selector.py:235 202 | msgid "No models available" 203 | msgstr "Nenhum modelo disponível" 204 | 205 | #: gtk_llm_chat/model_selector.py:236 206 | msgid "Configure an API key to access models from this provider" 207 | msgstr "Configure uma chave API para aceder a modelos deste provedor" 208 | 209 | #: gtk_llm_chat/model_selector.py:241 gtk_llm_chat/wide_model_selector.py:251 210 | msgid "No models found for this provider" 211 | msgstr "Nenhum modelo encontrado para este provedor" 212 | 213 | #: gtk_llm_chat/model_selector.py:288 gtk_llm_chat/wide_model_selector.py:307 214 | msgid "Enter API Key" 215 | msgstr "Digite a chave API" 216 | 217 | #: gtk_llm_chat/model_selector.py:289 gtk_llm_chat/wide_model_selector.py:308 218 | msgid "Enter the API key for" 219 | msgstr "Digite a chave API para" 220 | 221 | #: gtk_llm_chat/model_selector.py:298 gtk_llm_chat/wide_model_selector.py:317 222 | msgid "Paste your API key here" 223 | msgstr "Cole sua chave API aqui" 224 | 225 | #: gtk_llm_chat/model_selector.py:356 226 | msgid "Model Selector Test" 227 | msgstr "Teste do Seletor de Modelo" 228 | 229 | #: gtk_llm_chat/tray_applet.py:103 gtk_llm_chat/tray_applet.py:239 230 | #: gtk_llm_chat/welcome.py:59 gtk_llm_chat/llm_client.py:22 231 | msgid "New Conversation" 232 | msgstr "Nova Conversa" 233 | 234 | #: gtk_llm_chat/tray_applet.py:107 gtk_llm_chat/tray_applet.py:241 235 | msgid "Quit" 236 | msgstr "Sair" 237 | 238 | #: gtk_llm_chat/tray_applet.py:210 gtk_llm_chat/chat_application.py:68 239 | msgid "" 240 | "\n" 241 | "Closing application..." 242 | msgstr "" 243 | "\n" 244 | "A fechar a aplicação..." 245 | 246 | #: gtk_llm_chat/tray_applet.py:225 247 | msgid "LLM Conversations" 248 | msgstr "Conversas LLM" 249 | 250 | #: gtk_llm_chat/welcome.py:20 251 | msgid "Tray applet" 252 | msgstr "Applet da bandeja" 253 | 254 | #: gtk_llm_chat/welcome.py:20 255 | msgid "Default Model" 256 | msgstr "Modelo Padrão" 257 | 258 | #: gtk_llm_chat/welcome.py:53 259 | msgid "Next" 260 | msgstr "Próximo" 261 | 262 | #: gtk_llm_chat/welcome.py:117 263 | msgid "Own the conversation." 264 | msgstr "Controle a conversa." 265 | 266 | #: gtk_llm_chat/welcome.py:118 267 | msgid "Use any model you want. Your conversations are stored locally." 268 | msgstr "Use qualquer modelo que desejar. Suas conversas são armazenadas localmente." 269 | 270 | #: gtk_llm_chat/welcome.py:119 271 | msgid "This wizard will guide you through the initial setup" 272 | msgstr "Este assistente irá guiá-lo através da configuração inicial" 273 | 274 | #: gtk_llm_chat/welcome.py:125 275 | msgid "Start" 276 | msgstr "Iniciar" 277 | 278 | #: gtk_llm_chat/welcome.py:157 279 | msgid "Access conversations from the convenience of your system tray" 280 | msgstr "Aceda a conversas a partir da conveniência da sua bandeja do sistema" 281 | 282 | #: gtk_llm_chat/welcome.py:163 283 | msgid "Would you like to start the applet with your session?" 284 | msgstr "Gostaria de iniciar o applet com a sua sessão?" 285 | 286 | #: gtk_llm_chat/welcome.py:173 287 | msgid "Yes, with my session" 288 | msgstr "Sim, com a minha sessão" 289 | 290 | #: gtk_llm_chat/welcome.py:174 291 | msgid "No, only when I start the app" 292 | msgstr "Não, apenas quando eu iniciar a aplicação" 293 | 294 | #: gtk_llm_chat/welcome.py:209 295 | msgid "Loading model selection..." 296 | msgstr "A carregar seleção de modelo..." 297 | 298 | #: gtk_llm_chat/welcome.py:219 299 | msgid "Ready to start!" 300 | msgstr "Pronto para começar!" 301 | 302 | #: gtk_llm_chat/welcome.py:429 303 | #, fuzzy 304 | msgid "Set API Key" 305 | msgstr "Definir chave" 306 | 307 | #: gtk_llm_chat/welcome.py:429 308 | #, fuzzy 309 | msgid "Change API Key" 310 | msgstr "Alterar chave" 311 | 312 | #: gtk_llm_chat/chat_application.py:193 313 | msgid "Error: _version.py not found" 314 | msgstr "Erro: _version.py não encontrado" 315 | 316 | #: gtk_llm_chat/chat_application.py:418 317 | msgid "Are you sure you want to delete the conversation?" 318 | msgstr "Tem a certeza que quer apagar a conversa?" 319 | 320 | #: gtk_llm_chat/chat_application.py:421 321 | msgid "Delete" 322 | msgstr "Apagar" 323 | 324 | #: gtk_llm_chat/chat_application.py:443 325 | msgid "Gtk LLM Chat" 326 | msgstr "Gtk LLM Chat" 327 | 328 | #: gtk_llm_chat/chat_application.py:446 329 | msgid "A frontend for LLM" 330 | msgstr "Um frontend para LLM" 331 | 332 | #: gtk_llm_chat/wide_model_selector.py:92 333 | msgid "" 334 | "Please select a provider from the list on the left.\n" 335 | "Then, choose a model from the list that appears here." 336 | msgstr "Por favor, selecione um provedor da lista à esquerda.\nDepois, escolha um modelo da lista que aparece aqui." 337 | 338 | #: gtk_llm_chat/wide_model_selector.py:127 339 | msgid "Most AI models require an API key" 340 | msgstr "A maioria dos modelos de IA requer uma chave API" 341 | 342 | #: gtk_llm_chat/wide_model_selector.py:134 343 | msgid "" 344 | "You'll need to register with each provider to obtain these authentication " 345 | "tokens." 346 | msgstr "Precisará de se registar em cada provedor para obter estes tokens de autenticação." 347 | 348 | #: gtk_llm_chat/wide_model_selector.py:147 349 | msgid "No Selection" 350 | msgstr "Nenhuma seleção" 351 | 352 | #: gtk_llm_chat/wide_model_selector.py:162 353 | msgid "No models or providers found." 354 | msgstr "Nenhum modelo encontrado" 355 | 356 | #: gtk_llm_chat/wide_model_selector.py:163 357 | msgid "Error" 358 | msgstr "Erro" 359 | 360 | #: gtk_llm_chat/wide_model_selector.py:442 361 | msgid "Model information not available" 362 | msgstr "Informação do modelo não disponível" 363 | 364 | #: gtk_llm_chat/wide_model_selector.py:443 365 | msgid "Unable to retrieve model details" 366 | msgstr "Não foi possível obter os detalhes do modelo" 367 | 368 | #: gtk_llm_chat/wide_model_selector.py:467 369 | msgid "Aliases" 370 | msgstr "Aliases" 371 | 372 | #: gtk_llm_chat/wide_model_selector.py:478 373 | msgid "API Key" 374 | msgstr "Chave API" 375 | 376 | #: gtk_llm_chat/wide_model_selector.py:487 377 | msgid "Required • Set" 378 | msgstr "Obrigatório • Definido" 379 | 380 | #: gtk_llm_chat/wide_model_selector.py:493 381 | msgid "Required • Not set" 382 | msgstr "Obrigatório • Não definido" 383 | 384 | #: gtk_llm_chat/wide_model_selector.py:499 385 | msgid "Not required" 386 | msgstr "Não obrigatório" 387 | 388 | #: gtk_llm_chat/wide_model_selector.py:510 389 | #: gtk_llm_chat/wide_model_selector.py:511 390 | #, fuzzy 391 | msgid "Unknown" 392 | msgstr "Provedor desconhecido" 393 | 394 | #: gtk_llm_chat/wide_model_selector.py:512 395 | msgid "Plugin" 396 | msgstr "Plugin" 397 | 398 | #: gtk_llm_chat/single_instance.py:26 gtk_llm_chat/single_instance.py:32 399 | msgid "Another instance is already running." 400 | msgstr "Outra instância já está em execução." 401 | 402 | #: gtk_llm_chat/llm_client.py:234 403 | msgid "LLMClient: Ignoring invalid temperature:" 404 | msgstr "LLMClient: A ignorar temperatura inválida:" 405 | 406 | #: gtk_llm_chat/llm_client.py:269 407 | msgid "LLMClient: Starting stream processing..." 408 | msgstr "LLMClient: A iniciar o processamento de stream..." 409 | 410 | #: gtk_llm_chat/llm_client.py:272 411 | msgid "LLMClient: Stream processing cancelled externally." 412 | msgstr "LLMClient: Processamento de stream cancelado externamente." 413 | 414 | #: gtk_llm_chat/llm_client.py:278 415 | msgid "LLMClient: Stream finished normally." 416 | msgstr "LLMClient: Stream terminado normalmente." 417 | 418 | #: gtk_llm_chat/model_selection.py:119 gtk_llm_chat/model_selection.py:125 419 | msgid "Local/Other" 420 | msgstr "Local/Outro" 421 | 422 | #~ msgid "Settings" 423 | #~ msgstr "Configurações" 424 | 425 | #~ msgid "Error reading keys file" 426 | #~ msgstr "Erro ao ler o arquivo de chaves" 427 | 428 | #~ msgid "Check File" 429 | #~ msgstr "Verificar arquivo" 430 | 431 | #~ msgid "Error accessing keys file" 432 | #~ msgstr "Erro ao acessar o arquivo de chaves" 433 | 434 | #~ msgid "Check Permissions" 435 | #~ msgstr "Verificar permissões" 436 | 437 | #~ msgid "LLMClient: Cancel request received." 438 | #~ msgstr "LLMClient: Pedido de cancelamento recebido." 439 | 440 | #~ msgid "LLMClient: Terminating active stream thread." 441 | #~ msgstr "LLMClient: A terminar a thread de stream ativa." 442 | 443 | #~ msgid "LLMClient: No active stream thread to cancel." 444 | #~ msgstr "LLMClient: Nenhuma thread de stream ativa para cancelar." 445 | 446 | #~ msgid "LLMClient: Error - Conversación no disponible para cargar historial." 447 | #~ msgstr "LLMClient: Erro - Conversa não disponível para carregar histórico." 448 | 449 | #, fuzzy 450 | #~ msgid "LLMClient: Historial cargado. Total de respuestas en conversación: " 451 | #~ msgstr "LLMClient: Histórico carregado. Total de respostas na conversa: " 452 | 453 | #~ msgid "Exiting..." 454 | #~ msgstr "Saindo..." 455 | 456 | #~ msgid "LLM Chat" 457 | #~ msgstr "LLM Chat" 458 | 459 | #~ msgid "Error: conversation_id is required to add to history." 460 | #~ msgstr "Erro: o conversation_id é obrigatório para adicionar ao histórico." 461 | 462 | #~ msgid "Error: conversation_id is required to create the conversation." 463 | #~ msgstr "Erro: o conversation_id é obrigatório para criar a conversa." 464 | 465 | #~ msgid "" 466 | #~ "LLMClient: Error - Attempting to load history with model initialization " 467 | #~ "error." 468 | #~ msgstr "" 469 | #~ "LLMClient: Erro - A tentar carregar o histórico com erro de inicialização " 470 | #~ "do modelo." 471 | 472 | #~ msgid "" 473 | #~ "LLMClient: Error - Attempting to load history without initialized " 474 | #~ "conversation." 475 | #~ msgstr "" 476 | #~ "LLMClient: Erro - A tentar carregar o histórico sem conversa inicializada." 477 | 478 | #~ msgid "" 479 | #~ "LLMClient: Warning - Assistant response without previous user prompt in " 480 | #~ "history." 481 | #~ msgstr "" 482 | #~ "LLMClient: Aviso - Resposta do assistente sem pedido anterior do " 483 | #~ "utilizador no histórico." 484 | -------------------------------------------------------------------------------- /po/de/LC_MESSAGES/gtk-llm-chat.po: -------------------------------------------------------------------------------- 1 | # German translations for gtk-llm-chat package. 2 | # Copyright (C) 2025 THE PACKAGE'S COPYRIGHT HOLDER 3 | # This file is distributed under the same license as the gtk-llm-chat package. 4 | # Sebastian Silva , 2025. 5 | # 6 | #, fuzzy 7 | msgid "" 8 | msgstr "" 9 | "Project-Id-Version: gtk-llm-chat 0.1\n" 10 | "Report-Msgid-Bugs-To: your@email.com\n" 11 | "POT-Creation-Date: 2025-06-04 18:35-0500\n" 12 | "PO-Revision-Date: 2025-06-04 18:45-0500\n" 13 | "Last-Translator: Sebastian Silva \n" 14 | "Language-Team: German \n" 15 | "Language: de\n" 16 | "MIME-Version: 1.0\n" 17 | "Content-Type: text/plain; charset=UTF-8\n" 18 | "Content-Transfer-Encoding: 8bit\n" 19 | "Plural-Forms: nplurals=2; plural=(n != 1);\n" 20 | 21 | #: gtk_llm_chat/chat_window.py:135 22 | msgid "Model Settings" 23 | msgstr "Modelleinstellungen" 24 | 25 | #: gtk_llm_chat/chat_window.py:141 26 | msgid "Rename" 27 | msgstr "Umbenennen" 28 | 29 | #: gtk_llm_chat/chat_window.py:211 30 | msgid "Send" 31 | msgstr "Senden" 32 | 33 | #: gtk_llm_chat/chat_sidebar.py:65 gtk_llm_chat/chat_sidebar.py:278 34 | #: gtk_llm_chat/wide_model_selector.py:454 35 | msgid "Model" 36 | msgstr "Modell" 37 | 38 | #: gtk_llm_chat/chat_sidebar.py:69 39 | msgid "Change Model" 40 | msgstr "Modell wechseln" 41 | 42 | #: gtk_llm_chat/chat_sidebar.py:70 gtk_llm_chat/chat_sidebar.py:312 43 | #: gtk_llm_chat/chat_sidebar.py:432 gtk_llm_chat/wide_model_selector.py:516 44 | msgid "Provider" 45 | msgstr "Anbieter" 46 | 47 | #: gtk_llm_chat/chat_sidebar.py:79 48 | msgid "Set as Default Model" 49 | msgstr "Als Standardmodell festlegen" 50 | 51 | #: gtk_llm_chat/chat_sidebar.py:88 gtk_llm_chat/chat_sidebar.py:138 52 | msgid "Model Parameters" 53 | msgstr "Modellparameter" 54 | 55 | #: gtk_llm_chat/chat_sidebar.py:97 56 | #, fuzzy 57 | msgid "Conversation" 58 | msgstr "Unterhaltung" 59 | 60 | #: gtk_llm_chat/chat_sidebar.py:100 gtk_llm_chat/chat_application.py:417 61 | msgid "Delete Conversation" 62 | msgstr "Konversation löschen" 63 | 64 | #: gtk_llm_chat/chat_sidebar.py:110 65 | msgid "Information" 66 | msgstr "Informationen" 67 | 68 | #: gtk_llm_chat/chat_sidebar.py:112 69 | msgid "About" 70 | msgstr "Über" 71 | 72 | #: gtk_llm_chat/chat_sidebar.py:121 73 | msgid "Actions" 74 | msgstr "Aktionen" 75 | 76 | #: gtk_llm_chat/chat_sidebar.py:145 77 | msgid "Temperature" 78 | msgstr "Temperatur" 79 | 80 | #: gtk_llm_chat/chat_sidebar.py:158 81 | msgid "System Prompt" 82 | msgstr "System-Prompt" 83 | 84 | #: gtk_llm_chat/chat_sidebar.py:165 85 | msgid "Parameters" 86 | msgstr "Parameter" 87 | 88 | #: gtk_llm_chat/chat_sidebar.py:241 gtk_llm_chat/chat_sidebar.py:308 89 | #: gtk_llm_chat/chat_sidebar.py:310 gtk_llm_chat/chat_sidebar.py:432 90 | #: gtk_llm_chat/model_selection.py:129 91 | msgid "Unknown Provider" 92 | msgstr "Unbekannter Anbieter" 93 | 94 | #: gtk_llm_chat/chat_sidebar.py:247 95 | msgid "Set Default Model" 96 | msgstr "Standardmodell festlegen" 97 | 98 | #: gtk_llm_chat/chat_sidebar.py:248 99 | msgid "Do you want to set" 100 | msgstr "Möchten Sie festlegen" 101 | 102 | #: gtk_llm_chat/chat_sidebar.py:248 103 | msgid "from" 104 | msgstr "von" 105 | 106 | #: gtk_llm_chat/chat_sidebar.py:248 107 | msgid "as the default model for new conversations?" 108 | msgstr "als Standardmodell für neue Unterhaltungen?" 109 | 110 | #: gtk_llm_chat/chat_sidebar.py:251 gtk_llm_chat/chat_sidebar.py:330 111 | #: gtk_llm_chat/model_selector.py:291 gtk_llm_chat/chat_application.py:420 112 | #: gtk_llm_chat/wide_model_selector.py:310 113 | msgid "Cancel" 114 | msgstr "Abbrechen" 115 | 116 | #: gtk_llm_chat/chat_sidebar.py:252 117 | msgid "Set as Default" 118 | msgstr "Als Standard festlegen" 119 | 120 | #: gtk_llm_chat/chat_sidebar.py:278 121 | msgid "set as default" 122 | msgstr "als Standard festgelegt" 123 | 124 | #: gtk_llm_chat/chat_sidebar.py:327 125 | msgid "Set System Prompt" 126 | msgstr "System-Prompt festlegen" 127 | 128 | #: gtk_llm_chat/chat_sidebar.py:328 129 | msgid "Enter the system prompt for the AI model:" 130 | msgstr "Geben Sie den System-Prompt für das KI-Modell ein:" 131 | 132 | #: gtk_llm_chat/chat_sidebar.py:331 133 | msgid "Set" 134 | msgstr "Festlegen" 135 | 136 | #: gtk_llm_chat/chat_sidebar.py:381 137 | msgid "Current" 138 | msgstr "Aktuell" 139 | 140 | #: gtk_llm_chat/chat_sidebar.py:383 141 | msgid "Not set" 142 | msgstr "Nicht festgelegt" 143 | 144 | #: gtk_llm_chat/chat_sidebar.py:413 145 | msgid "This is the current default model" 146 | msgstr "Dies ist das aktuelle Standardmodell" 147 | 148 | #: gtk_llm_chat/chat_sidebar.py:417 149 | msgid "Set as default model" 150 | msgstr "Als Standardmodell festlegen" 151 | 152 | #: gtk_llm_chat/model_selector.py:46 153 | msgid "Providers" 154 | msgstr "Anbieter" 155 | 156 | #: gtk_llm_chat/model_selector.py:50 157 | msgid "Models" 158 | msgstr "Modelle" 159 | 160 | #: gtk_llm_chat/model_selector.py:68 161 | msgid "Select Provider" 162 | msgstr "Anbieter auswählen" 163 | 164 | #: gtk_llm_chat/model_selector.py:95 165 | msgid "Select Model" 166 | msgstr "Modell auswählen" 167 | 168 | #: gtk_llm_chat/model_selector.py:124 169 | msgid "No models found" 170 | msgstr "Keine Modelle gefunden" 171 | 172 | #: gtk_llm_chat/model_selector.py:145 173 | msgid "models" 174 | msgstr "Modelle" 175 | 176 | #: gtk_llm_chat/model_selector.py:150 177 | msgid "API key required" 178 | msgstr "API-Schlüssel erforderlich" 179 | 180 | #: gtk_llm_chat/model_selector.py:152 181 | msgid "No models" 182 | msgstr "Keine Modelle gefunden" 183 | 184 | #: gtk_llm_chat/model_selector.py:210 gtk_llm_chat/model_selector.py:335 185 | msgid "API Key is configured" 186 | msgstr "API-Schlüssel ist konfiguriert" 187 | 188 | #: gtk_llm_chat/model_selector.py:211 gtk_llm_chat/model_selector.py:336 189 | msgid "Change Key" 190 | msgstr "Schlüssel ändern" 191 | 192 | #: gtk_llm_chat/model_selector.py:216 gtk_llm_chat/model_selector.py:340 193 | msgid "API Key Required" 194 | msgstr "API-Schlüssel erforderlich" 195 | 196 | #: gtk_llm_chat/model_selector.py:217 gtk_llm_chat/model_selector.py:292 197 | #: gtk_llm_chat/model_selector.py:341 gtk_llm_chat/wide_model_selector.py:311 198 | msgid "Set Key" 199 | msgstr "Schlüssel festlegen" 200 | 201 | #: gtk_llm_chat/model_selector.py:235 202 | msgid "No models available" 203 | msgstr "Keine Modelle verfügbar" 204 | 205 | #: gtk_llm_chat/model_selector.py:236 206 | msgid "Configure an API key to access models from this provider" 207 | msgstr "Konfigurieren Sie einen API-Schlüssel, um auf Modelle dieses Anbieters zuzugreifen" 208 | 209 | #: gtk_llm_chat/model_selector.py:241 gtk_llm_chat/wide_model_selector.py:251 210 | msgid "No models found for this provider" 211 | msgstr "Für diesen Anbieter wurden keine Modelle gefunden" 212 | 213 | #: gtk_llm_chat/model_selector.py:288 gtk_llm_chat/wide_model_selector.py:307 214 | msgid "Enter API Key" 215 | msgstr "API-Schlüssel eingeben" 216 | 217 | #: gtk_llm_chat/model_selector.py:289 gtk_llm_chat/wide_model_selector.py:308 218 | msgid "Enter the API key for" 219 | msgstr "API-Schlüssel eingeben für" 220 | 221 | #: gtk_llm_chat/model_selector.py:298 gtk_llm_chat/wide_model_selector.py:317 222 | msgid "Paste your API key here" 223 | msgstr "Fügen Sie hier Ihren API-Schlüssel ein" 224 | 225 | #: gtk_llm_chat/model_selector.py:356 226 | msgid "Model Selector Test" 227 | msgstr "Modellauswahl-Test" 228 | 229 | #: gtk_llm_chat/tray_applet.py:103 gtk_llm_chat/tray_applet.py:239 230 | #: gtk_llm_chat/welcome.py:59 gtk_llm_chat/llm_client.py:22 231 | msgid "New Conversation" 232 | msgstr "Neue Konversation" 233 | 234 | #: gtk_llm_chat/tray_applet.py:107 gtk_llm_chat/tray_applet.py:241 235 | msgid "Quit" 236 | msgstr "Beenden" 237 | 238 | #: gtk_llm_chat/tray_applet.py:210 gtk_llm_chat/chat_application.py:68 239 | msgid "" 240 | "\n" 241 | "Closing application..." 242 | msgstr "" 243 | "\n" 244 | "Anwendung wird geschlossen..." 245 | 246 | #: gtk_llm_chat/tray_applet.py:225 247 | msgid "LLM Conversations" 248 | msgstr "LLM-Unterhaltungen" 249 | 250 | #: gtk_llm_chat/welcome.py:20 251 | msgid "Tray applet" 252 | msgstr "Tray-Applet" 253 | 254 | #: gtk_llm_chat/welcome.py:20 255 | msgid "Default Model" 256 | msgstr "Standardmodell" 257 | 258 | #: gtk_llm_chat/welcome.py:53 259 | msgid "Next" 260 | msgstr "Weiter" 261 | 262 | #: gtk_llm_chat/welcome.py:117 263 | msgid "Own the conversation." 264 | msgstr "Behalten Sie die Kontrolle über die Unterhaltung." 265 | 266 | #: gtk_llm_chat/welcome.py:118 267 | msgid "Use any model you want. Your conversations are stored locally." 268 | msgstr "Verwenden Sie jedes gewünschte Modell. Ihre Unterhaltungen werden lokal gespeichert." 269 | 270 | #: gtk_llm_chat/welcome.py:119 271 | msgid "This wizard will guide you through the initial setup" 272 | msgstr "Dieser Assistent führt Sie durch die Ersteinrichtung" 273 | 274 | #: gtk_llm_chat/welcome.py:125 275 | msgid "Start" 276 | msgstr "Start" 277 | 278 | #: gtk_llm_chat/welcome.py:157 279 | msgid "Access conversations from the convenience of your system tray" 280 | msgstr "Greifen Sie bequem über Ihr System-Tray auf Unterhaltungen zu" 281 | 282 | #: gtk_llm_chat/welcome.py:163 283 | msgid "Would you like to start the applet with your session?" 284 | msgstr "Möchten Sie das Applet mit Ihrer Sitzung starten?" 285 | 286 | #: gtk_llm_chat/welcome.py:173 287 | msgid "Yes, with my session" 288 | msgstr "Ja, mit meiner Sitzung" 289 | 290 | #: gtk_llm_chat/welcome.py:174 291 | msgid "No, only when I start the app" 292 | msgstr "Nein, nur wenn ich die App starte" 293 | 294 | #: gtk_llm_chat/welcome.py:209 295 | msgid "Loading model selection..." 296 | msgstr "Modellauswahl wird geladen..." 297 | 298 | #: gtk_llm_chat/welcome.py:219 299 | msgid "Ready to start!" 300 | msgstr "Bereit zum Start!" 301 | 302 | #: gtk_llm_chat/welcome.py:429 303 | #, fuzzy 304 | msgid "Set API Key" 305 | msgstr "Schlüssel festlegen" 306 | 307 | #: gtk_llm_chat/welcome.py:429 308 | #, fuzzy 309 | msgid "Change API Key" 310 | msgstr "Schlüssel ändern" 311 | 312 | #: gtk_llm_chat/chat_application.py:193 313 | msgid "Error: _version.py not found" 314 | msgstr "Fehler: _version.py nicht gefunden" 315 | 316 | #: gtk_llm_chat/chat_application.py:418 317 | msgid "Are you sure you want to delete the conversation?" 318 | msgstr "Sind Sie sicher, dass Sie die Konversation löschen möchten?" 319 | 320 | #: gtk_llm_chat/chat_application.py:421 321 | msgid "Delete" 322 | msgstr "Löschen" 323 | 324 | #: gtk_llm_chat/chat_application.py:443 325 | msgid "Gtk LLM Chat" 326 | msgstr "Gtk LLM Chat" 327 | 328 | #: gtk_llm_chat/chat_application.py:446 329 | msgid "A frontend for LLM" 330 | msgstr "Ein Frontend für LLM" 331 | 332 | #: gtk_llm_chat/wide_model_selector.py:92 333 | msgid "" 334 | "Please select a provider from the list on the left.\n" 335 | "Then, choose a model from the list that appears here." 336 | msgstr "Bitte wählen Sie einen Anbieter aus der Liste links aus.\nDann wählen Sie ein Modell aus der hier erscheinenden Liste." 337 | 338 | #: gtk_llm_chat/wide_model_selector.py:127 339 | msgid "Most AI models require an API key" 340 | msgstr "Die meisten KI-Modelle erfordern einen API-Schlüssel" 341 | 342 | #: gtk_llm_chat/wide_model_selector.py:134 343 | msgid "" 344 | "You'll need to register with each provider to obtain these authentication " 345 | "tokens." 346 | msgstr "Sie müssen sich bei jedem Anbieter registrieren, um diese Authentifizierungstoken zu erhalten." 347 | 348 | #: gtk_llm_chat/wide_model_selector.py:147 349 | msgid "No Selection" 350 | msgstr "Keine Auswahl" 351 | 352 | #: gtk_llm_chat/wide_model_selector.py:162 353 | msgid "No models or providers found." 354 | msgstr "Keine Modelle gefunden" 355 | 356 | #: gtk_llm_chat/wide_model_selector.py:163 357 | msgid "Error" 358 | msgstr "Fehler" 359 | 360 | #: gtk_llm_chat/wide_model_selector.py:442 361 | msgid "Model information not available" 362 | msgstr "Modellinformationen nicht verfügbar" 363 | 364 | #: gtk_llm_chat/wide_model_selector.py:443 365 | msgid "Unable to retrieve model details" 366 | msgstr "Modelldetails konnten nicht abgerufen werden" 367 | 368 | #: gtk_llm_chat/wide_model_selector.py:467 369 | msgid "Aliases" 370 | msgstr "Aliase" 371 | 372 | #: gtk_llm_chat/wide_model_selector.py:478 373 | msgid "API Key" 374 | msgstr "API-Schlüssel eingeben" 375 | 376 | #: gtk_llm_chat/wide_model_selector.py:487 377 | msgid "Required • Set" 378 | msgstr "Erforderlich • Festgelegt" 379 | 380 | #: gtk_llm_chat/wide_model_selector.py:493 381 | msgid "Required • Not set" 382 | msgstr "Erforderlich • Nicht festgelegt" 383 | 384 | #: gtk_llm_chat/wide_model_selector.py:499 385 | msgid "Not required" 386 | msgstr "Nicht erforderlich" 387 | 388 | #: gtk_llm_chat/wide_model_selector.py:510 389 | #: gtk_llm_chat/wide_model_selector.py:511 390 | #, fuzzy 391 | msgid "Unknown" 392 | msgstr "Unbekannter Anbieter" 393 | 394 | #: gtk_llm_chat/wide_model_selector.py:512 395 | msgid "Plugin" 396 | msgstr "Plugin" 397 | 398 | #: gtk_llm_chat/single_instance.py:26 gtk_llm_chat/single_instance.py:32 399 | msgid "Another instance is already running." 400 | msgstr "Eine andere Instanz läuft bereits." 401 | 402 | #: gtk_llm_chat/llm_client.py:234 403 | msgid "LLMClient: Ignoring invalid temperature:" 404 | msgstr "LLMClient: Ignoriere ungültige Temperatur:" 405 | 406 | #: gtk_llm_chat/llm_client.py:269 407 | msgid "LLMClient: Starting stream processing..." 408 | msgstr "LLMClient: Starte Stream-Verarbeitung..." 409 | 410 | #: gtk_llm_chat/llm_client.py:272 411 | msgid "LLMClient: Stream processing cancelled externally." 412 | msgstr "LLMClient: Stream-Verarbeitung wurde extern abgebrochen." 413 | 414 | #: gtk_llm_chat/llm_client.py:278 415 | msgid "LLMClient: Stream finished normally." 416 | msgstr "LLMClient: Stream wurde normal beendet." 417 | 418 | #: gtk_llm_chat/model_selection.py:119 gtk_llm_chat/model_selection.py:125 419 | msgid "Local/Other" 420 | msgstr "Lokal/Andere" 421 | 422 | #~ msgid "Settings" 423 | #~ msgstr "Einstellungen" 424 | 425 | #~ msgid "Error reading keys file" 426 | #~ msgstr "Fehler beim Lesen der Schlüsseldatei" 427 | 428 | #~ msgid "Check File" 429 | #~ msgstr "Datei prüfen" 430 | 431 | #~ msgid "Error accessing keys file" 432 | #~ msgstr "Fehler beim Zugriff auf die Schlüsseldatei" 433 | 434 | #~ msgid "Check Permissions" 435 | #~ msgstr "Berechtigungen prüfen" 436 | 437 | #~ msgid "LLMClient: Cancel request received." 438 | #~ msgstr "LLMClient: Abbruchanfrage empfangen." 439 | 440 | #~ msgid "LLMClient: Terminating active stream thread." 441 | #~ msgstr "LLMClient: Beende aktiven Stream-Thread." 442 | 443 | #~ msgid "LLMClient: No active stream thread to cancel." 444 | #~ msgstr "LLMClient: Kein aktiver Stream-Thread zum Abbrechen." 445 | 446 | #~ msgid "LLMClient: Error - Conversación no disponible para cargar historial." 447 | #~ msgstr "" 448 | #~ "LLMClient: Fehler - Konversation nicht verfügbar, um Verlauf zu laden." 449 | 450 | #~ msgid "LLMClient: Historial cargado. Total de respuestas en conversación: " 451 | #~ msgstr "" 452 | #~ "LLMClient: Verlauf geladen. Anzahl der Antworten in der Konversation: " 453 | 454 | #~ msgid "Exiting..." 455 | #~ msgstr "Beende..." 456 | 457 | #~ msgid "LLM Chat" 458 | #~ msgstr "LLM Chat" 459 | 460 | #~ msgid "Error: conversation_id is required to add to history." 461 | #~ msgstr "" 462 | #~ "Fehler: conversation_id ist erforderlich, um der Historie hinzuzufügen." 463 | 464 | #~ msgid "Error: conversation_id is required to create the conversation." 465 | #~ msgstr "" 466 | #~ "Fehler: conversation_id ist erforderlich, um die Konversation zu " 467 | #~ "erstellen." 468 | 469 | #~ msgid "" 470 | #~ "LLMClient: Error - Attempting to load history with model initialization " 471 | #~ "error." 472 | #~ msgstr "" 473 | #~ "LLMClient: Fehler - Versuch, den Verlauf mit der Modellinitialisierung zu " 474 | #~ "laden." 475 | 476 | #~ msgid "" 477 | #~ "LLMClient: Error - Attempting to load history without initialized " 478 | #~ "conversation." 479 | #~ msgstr "" 480 | #~ "LLMClient: Fehler - Versuch, den Verlauf ohne initialisierte Konversation " 481 | #~ "zu laden." 482 | 483 | #~ msgid "" 484 | #~ "LLMClient: Warning - Assistant response without previous user prompt in " 485 | #~ "history." 486 | #~ msgstr "" 487 | #~ "LLMClient: Warnung - Antwort des Assistenten ohne vorherige " 488 | #~ "Benutzeraufforderung im Verlauf." 489 | --------------------------------------------------------------------------------