Files
confi-emacs-actual/configs/init-ai.el

52 lines
1.8 KiB
EmacsLisp

;;; init-ai.el --- Configuración de inteligencias artificales -*- lexical-binding: t -*-
;; Author: kj <webmaster@outcontrol.net>
;; URL: https://git.kj2.me/kj/confi-emacs-actual
;;; Commentary:
;; Esta configuración para Inteligencia artifical en emacs, principalmente para el uso de chats.
;;; Code:
;; Cliente LLM (ollama, chatgpt, gemini, etc.)
(use-package gptel
:config
;; (setq gptel-model 'gemma3:4b
;; gptel-backend (gptel-make-ollama "Ollama"
;; :host "localhost:11434"
;; :stream t
;; :models '("mistral:latest"
;; "deepseek-r1:1.5b"
;; "deepcoder"
;; "dolphin-llama3:latest"
;; "gemma3:4b"
;; "llava:latest"))
gptel-default-mode 'org-mode
gptel-prompt-prefix-alist
'((markdown-mode . "# User\n\n")
(org-mode . "* User\n\n")
(text-mode . "# User\n\n"))
gptel-response-prefix-alist
'((markdown-mode . "# AI\n\n")
(org-mode . "* AI\n\n")
(text-mode . "# AI\n\n"))
gptel-directives
'((default . "You are a large language model living in Emacs and a helpful assistant. Respond concisely in Spanish."))
)
(setopt gptel-include-reasoning nil)) ;; Mantener hasta resolver: https://github.com/ragnard/gptel-magit/issues/8
(use-package copilot
:bind (("M-<return>" . copilot-complete)
:map copilot-completion-map
("C-g" . 'copilot-clear-overlay)
("C-<return>" . 'copilot-accept-completion)
("C-<tab>" . 'copilot-accept-completion-by-word)))
(use-package gptel-magit
:ensure t
:hook (magit-mode . gptel-magit-install))
(provide 'init-ai)
;;; init-ai.el ends here