diff --git a/.config/emacs/init.el b/.config/emacs/init.el index ac7a094..5a3e8cb 100644 --- a/.config/emacs/init.el +++ b/.config/emacs/init.el @@ -306,13 +306,20 @@ ) (defun list-ollama-models () - "Query the local Ollama server for the list of installed models" + "Query the local Ollama server for the list of installed models." (condition-case error-var (mapcar #'intern (mapcar #'car (mapcar #'split-string (cdr (process-lines "podllama" "list"))))) (error (message "Failed to list local models: %s" error-var) ()))) +(defun enrich-ollama-models (available library) + "Enrich the available models with metadata from the library of known models." + (mapcar + (lambda (model) + (seq-find (lambda (x) (eq (car x) model)) library model)) + available)) + (use-package gptel :hook ;; keep-sorted start @@ -330,13 +337,14 @@ (gptel-model 'llama3.2:latest) ;; keep-sorted end :preface + (load "local-models.el") (gptel-make-anthropic "Claude" :stream t :key 'gptel-api-key-from-auth-source ) (gptel-make-ollama "Ollama" :stream t - :models (list-ollama-models) + :models (enrich-ollama-models (list-ollama-models) gptel--local-models) ) ) diff --git a/.config/emacs/site-lisp/local-models.el b/.config/emacs/site-lisp/local-models.el new file mode 100644 index 0000000..cddf6e8 --- /dev/null +++ b/.config/emacs/site-lisp/local-models.el @@ -0,0 +1,37 @@ +(defconst gptel--local-models + '( + ;; keep-sorted start + ( + hf.co/Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2-GGUF:latest + :description "Uncensored model based on Llama-3.1-8b-Instruct" + :context-window 128 + :cutoff-date "2023-12" + ) + ( + hf.co/TheBloke/MythoMax-L2-13B-GGUF:latest + :description "Proficient at both roleplaying and storywriting" + :context-window 32 + ) + ( + hf.co/bartowski/cognitivecomputations_Dolphin-Mistral-24B-Venice-Edition-GGUF:latest + :description "Uncensored version of Mistral 24B" + :context-window 32 + :cutoff-date "2023-10" + ) + ( + llama3.2:latest + :description "Instruction-tuned model optimized for multilingual dialogue" + :context-window 128 + :cutoff-date "2023-12" + ) + ( + mollysama/rwkv-7-g0a3:13.3b + :description "Pure RNN reasoning model, suitable for post-training and fine-tuning" + :context-window 1000 + :cutoff-date "2023-10" + ) + ;; keep-sorted end + ) + "List of known local models and associated properties. +Refer to https://gptel.org/manual.html#models for a description of supported properties" + ) diff --git a/.local/bin/upgrade b/.local/bin/upgrade index 95784aa..5ce5ba5 100755 --- a/.local/bin/upgrade +++ b/.local/bin/upgrade @@ -17,10 +17,7 @@ cargo_update() { } pipx_update() { - for venv in $(pipx list --json | jq --raw-output ".venvs | keys[]") - do - pipx upgrade "${venv}" - done + pipx upgrade-all } git_sync_update() {