Compare commits
6 commits
ed44c92fc9
...
157a03687d
| Author | SHA1 | Date | |
|---|---|---|---|
| 157a03687d | |||
| 9c4d290062 | |||
| 77f3b175ef | |||
| 23c8d41cbd | |||
| 8de98519ec | |||
| 73463a241f |
3 changed files with 6 additions and 48 deletions
|
|
@ -306,20 +306,13 @@
|
|||
)
|
||||
|
||||
(defun list-ollama-models ()
|
||||
"Query the local Ollama server for the list of installed models."
|
||||
"Query the local Ollama server for the list of installed models"
|
||||
(condition-case error-var
|
||||
(mapcar #'intern (mapcar #'car (mapcar #'split-string (cdr (process-lines "podllama" "list")))))
|
||||
(error
|
||||
(message "Failed to list local models: %s" error-var)
|
||||
())))
|
||||
|
||||
(defun enrich-ollama-models (available library)
|
||||
"Enrich the available models with metadata from the library of known models."
|
||||
(mapcar
|
||||
(lambda (model)
|
||||
(seq-find (lambda (x) (eq (car x) model)) library model))
|
||||
available))
|
||||
|
||||
(use-package gptel
|
||||
:hook
|
||||
;; keep-sorted start
|
||||
|
|
@ -337,14 +330,13 @@
|
|||
(gptel-model 'llama3.2:latest)
|
||||
;; keep-sorted end
|
||||
:preface
|
||||
(load "local-models.el")
|
||||
(gptel-make-anthropic "Claude"
|
||||
:stream t
|
||||
:key 'gptel-api-key-from-auth-source
|
||||
)
|
||||
(gptel-make-ollama "Ollama"
|
||||
:stream t
|
||||
:models (enrich-ollama-models (list-ollama-models) gptel--local-models)
|
||||
:models (list-ollama-models)
|
||||
)
|
||||
)
|
||||
|
||||
|
|
|
|||
|
|
@ -1,37 +0,0 @@
|
|||
(defconst gptel--local-models
|
||||
'(
|
||||
;; keep-sorted start
|
||||
(
|
||||
hf.co/Orenguteng/Llama-3.1-8B-Lexi-Uncensored-V2-GGUF:latest
|
||||
:description "Uncensored model based on Llama-3.1-8b-Instruct"
|
||||
:context-window 128
|
||||
:cutoff-date "2023-12"
|
||||
)
|
||||
(
|
||||
hf.co/TheBloke/MythoMax-L2-13B-GGUF:latest
|
||||
:description "Proficient at both roleplaying and storywriting"
|
||||
:context-window 32
|
||||
)
|
||||
(
|
||||
hf.co/bartowski/cognitivecomputations_Dolphin-Mistral-24B-Venice-Edition-GGUF:latest
|
||||
:description "Uncensored version of Mistral 24B"
|
||||
:context-window 32
|
||||
:cutoff-date "2023-10"
|
||||
)
|
||||
(
|
||||
llama3.2:latest
|
||||
:description "Instruction-tuned model optimized for multilingual dialogue"
|
||||
:context-window 128
|
||||
:cutoff-date "2023-12"
|
||||
)
|
||||
(
|
||||
mollysama/rwkv-7-g0a3:13.3b
|
||||
:description "Pure RNN reasoning model, suitable for post-training and fine-tuning"
|
||||
:context-window 1000
|
||||
:cutoff-date "2023-10"
|
||||
)
|
||||
;; keep-sorted end
|
||||
)
|
||||
"List of known local models and associated properties.
|
||||
Refer to https://gptel.org/manual.html#models for a description of supported properties"
|
||||
)
|
||||
|
|
@ -17,7 +17,10 @@ cargo_update() {
|
|||
}
|
||||
|
||||
pipx_update() {
|
||||
pipx upgrade-all
|
||||
for venv in $(pipx list --json | jq --raw-output ".venvs | keys[]")
|
||||
do
|
||||
pipx upgrade "${venv}"
|
||||
done
|
||||
}
|
||||
|
||||
git_sync_update() {
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue