diff --git a/.config/emacs/init.el b/.config/emacs/init.el index 5a3e8cb..3309767 100644 --- a/.config/emacs/init.el +++ b/.config/emacs/init.el @@ -307,11 +307,12 @@ (defun list-ollama-models () "Query the local Ollama server for the list of installed models." - (condition-case error-var - (mapcar #'intern (mapcar #'car (mapcar #'split-string (cdr (process-lines "podllama" "list"))))) - (error - (message "Failed to list local models: %s" error-var) - ()))) + (let* ((tags-buffer (url-retrieve-synchronously "http://ollama:11434/api/tags")) + (raw-response (with-current-buffer tags-buffer (buffer-string))) + (tags-payload (nth 1 (split-string raw-response "\n\n"))) + (models (gethash "models" (json-parse-string tags-payload))) + (model-names (mapcar (lambda (model) (gethash "name" model)) models))) + model-names)) (defun enrich-ollama-models (available library) "Enrich the available models with metadata from the library of known models." @@ -330,11 +331,12 @@ ;; keep-sorted end :custom ;; keep-sorted start + (gptel--system-message "You are a sassy, sharp-tongued personal assistant. I need you to assist me in crafting responses to questions, dripping with ascerbic wit and sarcasm.") (gptel-backend (gptel-get-backend "Ollama")) (gptel-default-mode 'org-mode) (gptel-expert-commands t) (gptel-highlight-methods '(face margin)) - (gptel-model 'llama3.2:latest) + (gptel-model 'hf.co/unsloth/Devstral-Small-2507-GGUF:latest) ;; keep-sorted end :preface (load "local-models.el") @@ -343,6 +345,7 @@ :key 'gptel-api-key-from-auth-source ) (gptel-make-ollama "Ollama" + :host "ollama:11434" :stream t :models (enrich-ollama-models (list-ollama-models) gptel--local-models) ) diff --git a/.config/emacs/site-lisp/local-models.el b/.config/emacs/site-lisp/local-models.el index cddf6e8..93635a9 100644 --- a/.config/emacs/site-lisp/local-models.el +++ b/.config/emacs/site-lisp/local-models.el @@ -1,3 +1,5 @@ +;; -*- lexical-binding: t; -*- + (defconst gptel--local-models '( ;; keep-sorted start @@ -18,6 +20,11 @@ :context-window 32 :cutoff-date "2023-10" ) + ( + hf.co/unsloth/Devstral-Small-2507-GGUF:latest + :description "Agentic LLM for software engineering tasks" + :context-window 128 + ) ( llama3.2:latest :description "Instruction-tuned model optimized for multilingual dialogue" diff --git a/.config/environment.d/ollama.conf b/.config/environment.d/ollama.conf new file mode 100644 index 0000000..6105efa --- /dev/null +++ b/.config/environment.d/ollama.conf @@ -0,0 +1,3 @@ +# keep-sorted start +HOSTALIASES=~/.config/hosts +# keep-sorted end diff --git a/.config/hosts b/.config/hosts new file mode 100644 index 0000000..4aa6d0b --- /dev/null +++ b/.config/hosts @@ -0,0 +1 @@ +ollama localhost diff --git a/.local/bin/vaulter b/.local/bin/vaulter index f12dd73..876832f 100755 --- a/.local/bin/vaulter +++ b/.local/bin/vaulter @@ -8,8 +8,9 @@ podman --transient-store run --rm -ti \ --env XDG_RUNTIME_DIR=/tmp \ --hostname localhost \ --mount type=bind,source="${XDG_RUNTIME_DIR}"/"${WAYLAND_DISPLAY}",target=/tmp/"${WAYLAND_DISPLAY}" \ + --mount type=bind,source="$(systemd-path user-configuration)"/emacs/site-lisp/local-models.el,target=/root/.config/emacs/site-lisp/local-models.el \ --mount type=bind,readonly=true,source="$(systemd-path user)"/.keys/vaults,target=/root/.age/key \ --mount type=bind,readonly=true,source="$(systemd-path user)"/.keys/vaults.pub,target=/root/.age/key.pub \ --mount type=bind,source="$(systemd-path user)"/Vaults,target=/root/Vaults \ - --network none \ + --network systemd-ollama \ vaulter:latest