feat(ai): Add Gemini backend with Ollama fallback

Prioritize Gemini backend if GEMINI_KEY enviroment variable is present,
otherwise fallback to the local Ollama instance.
This commit is contained in:
kj
2025-10-02 14:27:45 -03:00
parent 9cce858cd5
commit 844e2965db

View File

@ -12,17 +12,22 @@
;; Cliente LLM (ollama, chatgpt, gemini, etc.) ;; Cliente LLM (ollama, chatgpt, gemini, etc.)
(use-package gptel (use-package gptel
:config :config
;; (setq gptel-model 'gemma3:4b (if (getenv "GEMINI_KEY")
;; gptel-backend (gptel-make-ollama "Ollama" (setq gptel-model 'gemini-2.5-flash
;; :host "localhost:11434" gptel-backend (gptel-make-gemini "Gemini"
;; :stream t :key (getenv "GEMINI_KEY")
;; :models '("mistral:latest" :stream t))
;; "deepseek-r1:1.5b" (setq gptel-model 'gemma3:4b
;; "deepcoder" gptel-backend (gptel-make-ollama "Ollama"
;; "dolphin-llama3:latest" :host "localhost:11434"
;; "gemma3:4b" :stream t
;; "llava:latest")) :models '("mistral:latest"
gptel-default-mode 'org-mode "deepseek-r1:1.5b"
"deepcoder"
"dolphin-llama3:latest"
"gemma3:4b"
"llava:latest"))))
(setq gptel-default-mode 'org-mode
gptel-prompt-prefix-alist gptel-prompt-prefix-alist
'((markdown-mode . "# User\n\n") '((markdown-mode . "# User\n\n")
(org-mode . "* User\n\n") (org-mode . "* User\n\n")