Compare commits
9 commits
157a03687d
...
ed44c92fc9
| Author | SHA1 | Date | |
|---|---|---|---|
| ed44c92fc9 | |||
| d187c19d71 | |||
| 7998f20d52 | |||
| a019feb7cd | |||
| 131511a2f7 | |||
| 59583b296a | |||
| f533b3ef76 | |||
| 89d667542a | |||
| 7699aa4084 |
3 changed files with 48 additions and 6 deletions
|
|
@ -306,13 +306,20 @@
|
||||||
)
|
)
|
||||||
|
|
||||||
(defun list-ollama-models ()
|
(defun list-ollama-models ()
|
||||||
"Query the local Ollama server for the list of installed models"
|
"Query the local Ollama server for the list of installed models."
|
||||||
(condition-case error-var
|
(condition-case error-var
|
||||||
(mapcar #'intern (mapcar #'car (mapcar #'split-string (cdr (process-lines "podllama" "list")))))
|
(mapcar #'intern (mapcar #'car (mapcar #'split-string (cdr (process-lines "podllama" "list")))))
|
||||||
(error
|
(error
|
||||||
(message "Failed to list local models: %s" error-var)
|
(message "Failed to list local models: %s" error-var)
|
||||||
())))
|
())))
|
||||||
|
|
||||||
|
(defun enrich-ollama-models (available library)
|
||||||
|
"Enrich the available models with metadata from the library of known models."
|
||||||
|
(mapcar
|
||||||
|
(lambda (model)
|
||||||
|
(seq-find (lambda (x) (eq (car x) model)) library model))
|
||||||
|
available))
|
||||||
|
|
||||||
(use-package gptel
|
(use-package gptel
|
||||||
:hook
|
:hook
|
||||||
;; keep-sorted start
|
;; keep-sorted start
|
||||||
|
|
@ -330,13 +337,14 @@
|
||||||
(gptel-model 'llama3.2:latest)
|
(gptel-model 'llama3.2:latest)
|
||||||
;; keep-sorted end
|
;; keep-sorted end
|
||||||
:preface
|
:preface
|
||||||
|
(load "local-models.el")
|
||||||
(gptel-make-anthropic "Claude"
|
(gptel-make-anthropic "Claude"
|
||||||
:stream t
|
:stream t
|
||||||
:key 'gptel-api-key-from-auth-source
|
:key 'gptel-api-key-from-auth-source
|
||||||
)
|
)
|
||||||
(gptel-make-ollama "Ollama"
|
(gptel-make-ollama "Ollama"
|
||||||
:stream t
|
:stream t
|
||||||
:models (list-ollama-models)
|
:models (enrich-ollama-models (list-ollama-models) gptel--local-models)
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
||||||
37
.config/emacs/site-lisp/local-models.el
Normal file
37
.config/emacs/site-lisp/local-models.el
Normal file
|
|
@ -0,0 +1,37 @@
|
||||||
|
(defconst gptel--local-models
|
||||||
|
'(
|
||||||
|
;; keep-sorted start
|
||||||
|
(
|
||||||
|
hf.co/Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2-GGUF:latest
|
||||||
|
:description "Uncensored model based on Llama-3.1-8b-Instruct"
|
||||||
|
:context-window 128
|
||||||
|
:cutoff-date "2023-12"
|
||||||
|
)
|
||||||
|
(
|
||||||
|
hf.co/TheBloke/MythoMax-L2-13B-GGUF:latest
|
||||||
|
:description "Proficient at both roleplaying and storywriting"
|
||||||
|
:context-window 32
|
||||||
|
)
|
||||||
|
(
|
||||||
|
hf.co/bartowski/cognitivecomputations_Dolphin-Mistral-24B-Venice-Edition-GGUF:latest
|
||||||
|
:description "Uncensored version of Mistral 24B"
|
||||||
|
:context-window 32
|
||||||
|
:cutoff-date "2023-10"
|
||||||
|
)
|
||||||
|
(
|
||||||
|
llama3.2:latest
|
||||||
|
:description "Instruction-tuned model optimized for multilingual dialogue"
|
||||||
|
:context-window 128
|
||||||
|
:cutoff-date "2023-12"
|
||||||
|
)
|
||||||
|
(
|
||||||
|
mollysama/rwkv-7-g0a3:13.3b
|
||||||
|
:description "Pure RNN reasoning model, suitable for post-training and fine-tuning"
|
||||||
|
:context-window 1000
|
||||||
|
:cutoff-date "2023-10"
|
||||||
|
)
|
||||||
|
;; keep-sorted end
|
||||||
|
)
|
||||||
|
"List of known local models and associated properties.
|
||||||
|
Refer to https://gptel.org/manual.html#models for a description of supported properties"
|
||||||
|
)
|
||||||
|
|
@ -17,10 +17,7 @@ cargo_update() {
|
||||||
}
|
}
|
||||||
|
|
||||||
pipx_update() {
|
pipx_update() {
|
||||||
for venv in $(pipx list --json | jq --raw-output ".venvs | keys[]")
|
pipx upgrade-all
|
||||||
do
|
|
||||||
pipx upgrade "${venv}"
|
|
||||||
done
|
|
||||||
}
|
}
|
||||||
|
|
||||||
git_sync_update() {
|
git_sync_update() {
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue