98 lines
3.6 KiB
EmacsLisp
98 lines
3.6 KiB
EmacsLisp
;;; init-ai.el --- Configuración de inteligencias artificales -*- lexical-binding: t -*-
|
|
|
|
;; Author: kj <webmaster@outcontrol.net>
|
|
;; URL: https://git.kj2.me/kj/confi-emacs-actual
|
|
|
|
;;; Commentary:
|
|
|
|
;; Configuración para Inteligencia artifical en Emacs.
|
|
|
|
;;; Code:
|
|
|
|
;; Cliente LLM (ollama, chatgpt, gemini, etc.)
|
|
(use-package gptel
|
|
:config
|
|
(setq gptel-model 'gemma3n:e2b
|
|
gptel-backend (gptel-make-ollama "Ollama"
|
|
:host "localhost:11434"
|
|
:stream t
|
|
:models '("mistral:latest"
|
|
"deepseek-r1:1.5b"
|
|
"gemma3n:e2b"
|
|
"gemma4:e2b"
|
|
"cajina/gemma4_e2b-q2_k_xl:v01"
|
|
"deepcoder"
|
|
"glm-5.1:cloud"
|
|
"gemma4:latest"
|
|
"gemini-3-flash-preview:cloud")))
|
|
(when (getenv "GEMINI_KEY")
|
|
(setq gptel-model 'gemini-2.5-flash
|
|
gptel-backend (gptel-make-gemini "Gemini"
|
|
:key (getenv "GEMINI_KEY")
|
|
:stream t)))
|
|
(setq gptel-default-mode 'org-mode
|
|
gptel-prompt-prefix-alist
|
|
'((markdown-mode . "# User\n\n")
|
|
(org-mode . "* User\n\n")
|
|
(text-mode . "# User\n\n"))
|
|
gptel-response-prefix-alist
|
|
'((markdown-mode . "# AI\n\n")
|
|
(org-mode . "* AI\n\n")
|
|
(text-mode . "# AI\n\n"))
|
|
gptel-directives
|
|
'((default . "You are a large language model living in Emacs and a helpful assistant. Respond concisely in Spanish."))
|
|
)
|
|
(setopt gptel-include-reasoning nil) ;; Mantener hasta resolver: https://github.com/ragnard/gptel-magit/issues/8
|
|
|
|
(defun gptel-switch+model ()
|
|
"Switch to gptel backend and model in a single completion prompt."
|
|
(interactive)
|
|
(let (choices)
|
|
(dolist (pair gptel--known-backends)
|
|
(let* ((backend-name (car pair))
|
|
(backend (cdr pair))
|
|
(models
|
|
(and (fboundp 'gptel-backend-models)
|
|
(gptel-backend-models backend))))
|
|
(when models
|
|
(dolist (model models)
|
|
(push (cons ; (format "%s:%s" backend-name model)
|
|
(format "%s → %s"
|
|
(propertize backend-name 'face 'font-lock-keyword-face)
|
|
(propertize (symbol-name model) 'face 'font-lock-function-name-face))
|
|
(cons backend-name model))
|
|
choices)))))
|
|
(let* ((choice
|
|
(completing-read "Model: " (mapcar #'car choices) nil t))
|
|
(sel (cdr (assoc choice choices))))
|
|
(setq gptel-backend (cdr (assoc (car sel) gptel--known-backends))
|
|
gptel-model (cdr sel))
|
|
(message "gptel set to %s:%s" (car sel) (cdr sel)))))
|
|
)
|
|
|
|
;; (use-package copilot
|
|
;; :hook (prog-mode . copilot-mode)
|
|
;; :bind (:map copilot-completion-map
|
|
;; ("C-g" . 'copilot-clear-overlay)
|
|
;; ("C-<return>" . 'copilot-accept-completion)
|
|
;; ("S-<return>" . 'copilot-accept-completion-by-word)))
|
|
|
|
;; El asistente más completo de todos: Tiene chat mpc, code completion, etc.
|
|
(use-package eca
|
|
;; :hook (prog-mode . eca-completion-mode)
|
|
:bind (("M-<return>" . eca-complete)
|
|
:map eca-completion-map
|
|
("C-<return>" . eca-completion-accept)))
|
|
|
|
(use-package gptel-magit
|
|
:ensure t
|
|
:hook (magit-mode . gptel-magit-install))
|
|
|
|
(use-package macher
|
|
:ensure (:host github :repo "kmontag/macher")
|
|
:custom
|
|
(macher-action-buffer-ui 'org))
|
|
|
|
(provide 'init-ai)
|
|
;;; init-ai.el ends here
|