diff --git a/.config/emacs/init.el b/.config/emacs/init.el index 3309767..5a3e8cb 100644 --- a/.config/emacs/init.el +++ b/.config/emacs/init.el @@ -307,12 +307,11 @@ (defun list-ollama-models () "Query the local Ollama server for the list of installed models." - (let* ((tags-buffer (url-retrieve-synchronously "http://ollama:11434/api/tags")) - (raw-response (with-current-buffer tags-buffer (buffer-string))) - (tags-payload (nth 1 (split-string raw-response "\n\n"))) - (models (gethash "models" (json-parse-string tags-payload))) - (model-names (mapcar (lambda (model) (gethash "name" model)) models))) - model-names)) + (condition-case error-var + (mapcar #'intern (mapcar #'car (mapcar #'split-string (cdr (process-lines "podllama" "list"))))) + (error + (message "Failed to list local models: %s" error-var) + ()))) (defun enrich-ollama-models (available library) "Enrich the available models with metadata from the library of known models." @@ -331,12 +330,11 @@ ;; keep-sorted end :custom ;; keep-sorted start - (gptel--system-message "You are a sassy, sharp-tongued personal assistant. I need you to assist me in crafting responses to questions, dripping with ascerbic wit and sarcasm.") (gptel-backend (gptel-get-backend "Ollama")) (gptel-default-mode 'org-mode) (gptel-expert-commands t) (gptel-highlight-methods '(face margin)) - (gptel-model 'hf.co/unsloth/Devstral-Small-2507-GGUF:latest) + (gptel-model 'llama3.2:latest) ;; keep-sorted end :preface (load "local-models.el") @@ -345,7 +343,6 @@ :key 'gptel-api-key-from-auth-source ) (gptel-make-ollama "Ollama" - :host "ollama:11434" :stream t :models (enrich-ollama-models (list-ollama-models) gptel--local-models) ) diff --git a/.config/emacs/site-lisp/local-models.el b/.config/emacs/site-lisp/local-models.el index 93635a9..cddf6e8 100644 --- a/.config/emacs/site-lisp/local-models.el +++ b/.config/emacs/site-lisp/local-models.el @@ -1,5 +1,3 @@ -;; -*- lexical-binding: t; -*- - (defconst gptel--local-models '( ;; keep-sorted start @@ -20,11 +18,6 @@ :context-window 32 :cutoff-date "2023-10" ) - ( - hf.co/unsloth/Devstral-Small-2507-GGUF:latest - :description "Agentic LLM for software engineering tasks" - :context-window 128 - ) ( llama3.2:latest :description "Instruction-tuned model optimized for multilingual dialogue" diff --git a/.config/environment.d/ollama.conf b/.config/environment.d/ollama.conf deleted file mode 100644 index 6105efa..0000000 --- a/.config/environment.d/ollama.conf +++ /dev/null @@ -1,3 +0,0 @@ -# keep-sorted start -HOSTALIASES=~/.config/hosts -# keep-sorted end diff --git a/.config/hosts b/.config/hosts deleted file mode 100644 index 4aa6d0b..0000000 --- a/.config/hosts +++ /dev/null @@ -1 +0,0 @@ -ollama localhost diff --git a/.local/bin/vaulter b/.local/bin/vaulter index 876832f..f12dd73 100755 --- a/.local/bin/vaulter +++ b/.local/bin/vaulter @@ -8,9 +8,8 @@ podman --transient-store run --rm -ti \ --env XDG_RUNTIME_DIR=/tmp \ --hostname localhost \ --mount type=bind,source="${XDG_RUNTIME_DIR}"/"${WAYLAND_DISPLAY}",target=/tmp/"${WAYLAND_DISPLAY}" \ - --mount type=bind,source="$(systemd-path user-configuration)"/emacs/site-lisp/local-models.el,target=/root/.config/emacs/site-lisp/local-models.el \ --mount type=bind,readonly=true,source="$(systemd-path user)"/.keys/vaults,target=/root/.age/key \ --mount type=bind,readonly=true,source="$(systemd-path user)"/.keys/vaults.pub,target=/root/.age/key.pub \ --mount type=bind,source="$(systemd-path user)"/Vaults,target=/root/Vaults \ - --network systemd-ollama \ + --network none \ vaulter:latest