diff --git a/configs/init-ai.el b/configs/init-ai.el index 03f64a2..525e3ae 100644 --- a/configs/init-ai.el +++ b/configs/init-ai.el @@ -12,7 +12,7 @@ ;; Cliente LLM (ollama, chatgpt, gemini, etc.) (use-package gptel :config - (setq gptel-model 'gemma3n:e2b + (setq gptel-model 'gemma4:e2b gptel-backend (gptel-make-ollama "Ollama" :host "localhost:11434" :stream t @@ -88,16 +88,22 @@ ;; ("S-" . 'copilot-accept-completion-by-word))) ;; El asistente más completo de todos: Tiene chat mpc, code completion, etc. -(use-package eca - ;; :hook (prog-mode . eca-completion-mode) - :bind (("M-" . eca-complete) - :map eca-completion-map - ("C-" . eca-completion-accept))) +;; (use-package eca +;; ;; :hook (prog-mode . eca-completion-mode) +;; :bind (("M-" . eca-complete) +;; :map eca-completion-map +;; ("C-" . eca-completion-accept))) (use-package gptel-magit :ensure t :hook (magit-mode . gptel-magit-install)) +(use-package gptel-autocomplete + :ensure (:host github :repo "JDNdeveloper/gptel-autocomplete") + :bind (("M-" . gptel-complete) + :map gptel-autocomplete-completion-map + ("C-" . gptel-accept-completion))) + (use-package macher :ensure (:host github :repo "kmontag/macher") :custom