Files
confi-emacs-actual/configs/init-ai.el
2025-09-08 10:24:01 -03:00

57 lines
1.9 KiB
EmacsLisp

;;; init-ai.el --- Configuración de inteligencias artificales -*- lexical-binding: t -*-
;; Author: kj <webmaster@outcontrol.net>
;; URL: https://git.kj2.me/kj/confi-emacs-actual
;;; Commentary:
;; Configuración para Inteligencia artifical en Emacs.
;;; Code:
;; Cliente LLM (ollama, chatgpt, gemini, etc.)
(use-package gptel
:config
;; (setq gptel-model 'gemma3:4b
;; gptel-backend (gptel-make-ollama "Ollama"
;; :host "localhost:11434"
;; :stream t
;; :models '("mistral:latest"
;; "deepseek-r1:1.5b"
;; "deepcoder"
;; "dolphin-llama3:latest"
;; "gemma3:4b"
;; "llava:latest"))
gptel-default-mode 'org-mode
gptel-prompt-prefix-alist
'((markdown-mode . "# User\n\n")
(org-mode . "* User\n\n")
(text-mode . "# User\n\n"))
gptel-response-prefix-alist
'((markdown-mode . "# AI\n\n")
(org-mode . "* AI\n\n")
(text-mode . "# AI\n\n"))
gptel-directives
'((default . "You are a large language model living in Emacs and a helpful assistant. Respond concisely in Spanish."))
)
(setopt gptel-include-reasoning nil)) ;; Mantener hasta resolver: https://github.com/ragnard/gptel-magit/issues/8
(use-package copilot
:bind (("M-<return>" . copilot-complete)
:map copilot-completion-map
("C-g" . 'copilot-clear-overlay)
("C-<return>" . 'copilot-accept-completion)
("C-<tab>" . 'copilot-accept-completion-by-word)))
(use-package gptel-magit
:ensure t
:hook (magit-mode . gptel-magit-install))
(use-package macher
:ensure (:host github :repo "kmontag/macher")
:custom
(macher-action-buffer-ui 'org))
(provide 'init-ai)
;;; init-ai.el ends here