summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorbd <bdunahu@operationnull.com>2025-10-12 22:52:28 -0400
committerbd <bdunahu@operationnull.com>2025-10-12 22:54:09 -0400
commitf1a4ebbbc3c590ee439ffa27b9286384ff940cb0 (patch)
tree4d2baa15c08029f0c2edbb482fdfc486d6605988
parent1f224a94dc70d415ec821acc6081fb99b1a2e230 (diff)
Add llama-cpp and gptel
-rw-r--r--.config/emacs/init.el2
-rw-r--r--.config/emacs/modules/bd--chat.el (renamed from .config/emacs/modules/bd--irc.el)54
-rw-r--r--.config/guix/modules/services/emacs.scm4
3 files changed, 57 insertions, 3 deletions
diff --git a/.config/emacs/init.el b/.config/emacs/init.el
index b8d911b..c46441a 100644
--- a/.config/emacs/init.el
+++ b/.config/emacs/init.el
@@ -103,7 +103,7 @@
(require 'bd--project)
(require 'bd--files)
(require 'bd--dictionary)
-(require 'bd--irc)
+(require 'bd--chat)
(require 'bd--shells)
(require 'bd--minibuffer)
(require 'bd--buffer)
diff --git a/.config/emacs/modules/bd--irc.el b/.config/emacs/modules/bd--chat.el
index fe2d58f..a55f10a 100644
--- a/.config/emacs/modules/bd--irc.el
+++ b/.config/emacs/modules/bd--chat.el
@@ -58,6 +58,56 @@ channel, which is issuing the PART command."
:port 6697
:encryption tls))))
+(use-package gptel
+ :bind (("C-c g" . gptel-menu))
+ :config
+ (defvar bd/llama-cpp-buffer-name "*llama-cpp-proc*")
+ (defvar bd/llama-cpp-reasoning-buffer-name "*llama-cpp-reasoning*")
+ (defvar bd/llama-cpp-port "4568")
+ (defvar bd/llama-cpp-threads "8")
+ (defvar bd/llama-cpp-model-file "~/.config/guix/assets/Qwen3-4B.Q3_K_M.gguf")
+ (defvar bd/llama-cpp-reasoning-budget nil)
+ (defun bd/gptel-start-backend ()
+ (interactive)
+ (let ((process (get-buffer-process bd/llama-cpp-buffer-name)))
+ (if process
+ (message "llama-cpp process is already running!")
+ (progn
+ (start-process-shell-command
+ "llama-cpp" bd/llama-cpp-buffer-name
+ (concat "llama-server --reasoning-budget "
+ (if bd/llama-cpp-reasoning-budget "-1" "0")
+ " --port " bd/llama-cpp-port
+ " -t " bd/llama-cpp-threads
+ " -m " bd/llama-cpp-model-file)))
+ (unless (get-buffer bd/llama-cpp-reasoning-buffer-name)
+ (generate-new-buffer bd/llama-cpp-reasoning-buffer-name)))))
+ (defun bd/gptel-stop-backend ()
+ (interactive)
+ (let ((process (get-buffer-process bd/llama-cpp-buffer-name)))
+ (if process
+ (progn
+ (delete-process process)
+ (kill-buffer bd/llama-cpp-buffer-name)
+ (message "Killed %s." process))
+ (message "No llama-cpp process is running."))))
+ (defun bd/gptel-restart-backend ()
+ (interactive)
+ (bd/gptel-stop-backend)
+ (bd/gptel-start-backend))
+
+ (setopt gptel-model 'qwen-4b
+ gptel-backend (gptel-make-openai "llama-cpp"
+ :stream t
+ :protocol "http"
+ :host (concat "localhost:" bd/llama-cpp-port)
+ :models '(qwen-4b))
+ gptel-max-tokens 500
+ gptel-include-reasoning bd/llama-cpp-reasoning-buffer-name)
+
+ (gptel-make-preset 'default
+ :system "You are a wolf (furry) named Evka hired as a secretary to complete language-based tasks. First describe an action your character does, e.x.: *I tap my claws on the desk*. Finish by responding to the task as tersely as possible, in character."))
+
-(provide 'bd--irc)
-;;; bd--irc.el ends here
+(provide 'bd--chat)
+;;; bd--chat.el ends here
diff --git a/.config/guix/modules/services/emacs.scm b/.config/guix/modules/services/emacs.scm
index 9c42906..fa70880 100644
--- a/.config/guix/modules/services/emacs.scm
+++ b/.config/guix/modules/services/emacs.scm
@@ -21,6 +21,7 @@
cpp
gnupg
python-xyz
+ machine-learning
mail
lisp)
@@ -39,6 +40,7 @@
emacs-emms
emacs-exwm
emacs-f
+ emacs-gptel
emacs-guix
emacs-hydra
emacs-jeison
@@ -57,6 +59,8 @@
ccls ;; for eglot
python-lsp-server ;;
+
+ llama-cpp ;; for emacs-gptel
))