summaryrefslogtreecommitdiff
path: root/.config/emacs/modules/bd--chat.el
diff options
context:
space:
mode:
authorbd <bdunahu@operationnull.com>2025-10-12 22:52:28 -0400
committerbd <bdunahu@operationnull.com>2025-10-12 22:54:09 -0400
commitf1a4ebbbc3c590ee439ffa27b9286384ff940cb0 (patch)
tree4d2baa15c08029f0c2edbb482fdfc486d6605988 /.config/emacs/modules/bd--chat.el
parent1f224a94dc70d415ec821acc6081fb99b1a2e230 (diff)
Add llama-cpp and gptel
Diffstat (limited to '.config/emacs/modules/bd--chat.el')
-rw-r--r--.config/emacs/modules/bd--chat.el113
1 files changed, 113 insertions, 0 deletions
diff --git a/.config/emacs/modules/bd--chat.el b/.config/emacs/modules/bd--chat.el
new file mode 100644
index 0000000..a55f10a
--- /dev/null
+++ b/.config/emacs/modules/bd--chat.el
@@ -0,0 +1,113 @@
+;;; -*- lexical-binding: t; -*-
+;;; Commentary:
+;;; Code:
+
+
+(require 'fill-column)
+(use-package rcirc
+ :bind (:map rcirc-mode-map
+ ("C-c j" . #'bd/rcirc-jump-net)
+ ("C-c q" . #'bd/rcirc-detach-buffer))
+ :hook
+ ((rcirc-mode . (lambda ()
+ (setq-local fill-column-desired-width 80)
+ (fill-column-mode)
+ (rcirc-omit-mode))))
+ :config
+ (setopt bd/rcirc-networks '("libera" "furnet"))
+ (defun bd/rcirc-jump-net ()
+ "Prompts the user for a irc network in BD/RCIRC-NETWORKS, then issues
+ZNC to hop networks."
+ (interactive)
+ (let ((buffer (current-buffer)))
+ (when (and (buffer-local-value 'rcirc-server-buffer buffer)
+ (eq (process-status (rcirc-buffer-process)) 'open))
+ (let ((target (completing-read "Jump to: " bd/rcirc-networks)))
+ (if (stringp target)
+ (rcirc-send-string (rcirc-buffer-process)
+ "PRIVMSG" "*status" :
+ (concat "JUMPNETWORK " target)))))))
+ (defun bd/rcirc-detach-buffer ()
+ "If the current buffer is an rcirc channel, detaches through ZNC and
+deletes the buffer. This bypasses the default behavior of deleting an active
+channel, which is issuing the PART command."
+ (interactive)
+ (let ((buffer (current-buffer)))
+ (when (and (rcirc-buffer-process)
+ (eq (process-status (rcirc-buffer-process)) 'open))
+ (with-rcirc-server-buffer
+ (setq rcirc-buffer-alist
+ (rassq-delete-all buffer rcirc-buffer-alist)))
+ (rcirc-update-short-buffer-names)
+ (if (rcirc-channel-p rcirc-target)
+ (rcirc-send-string (rcirc-buffer-process)
+ "PRIVMSG" "*status" :
+ (concat "DETACH " rcirc-target))))
+ (setq rcirc-target nil)
+ (kill-buffer buffer)))
+ (setopt rcirc-fill-column 80
+ rcirc-omit-threshold 10
+ rcirc-reconnect-delay 60
+ rcirc-omit-responses '("JOIN" "PART" "QUIT" "NICK" "AWAY")
+ rcirc-track-minor-mode t
+ rcirc-track-ignore-server-buffer-flag t
+ rcirc-server-alist
+ '(("operationnull.com"
+ :nick "Gondul"
+ :user-name "Gondul"
+ :port 6697
+ :encryption tls))))
+
+(use-package gptel
+ :bind (("C-c g" . gptel-menu))
+ :config
+ (defvar bd/llama-cpp-buffer-name "*llama-cpp-proc*")
+ (defvar bd/llama-cpp-reasoning-buffer-name "*llama-cpp-reasoning*")
+ (defvar bd/llama-cpp-port "4568")
+ (defvar bd/llama-cpp-threads "8")
+ (defvar bd/llama-cpp-model-file "~/.config/guix/assets/Qwen3-4B.Q3_K_M.gguf")
+ (defvar bd/llama-cpp-reasoning-budget nil)
+ (defun bd/gptel-start-backend ()
+ (interactive)
+ (let ((process (get-buffer-process bd/llama-cpp-buffer-name)))
+ (if process
+ (message "llama-cpp process is already running!")
+ (progn
+ (start-process-shell-command
+ "llama-cpp" bd/llama-cpp-buffer-name
+ (concat "llama-server --reasoning-budget "
+ (if bd/llama-cpp-reasoning-budget "-1" "0")
+ " --port " bd/llama-cpp-port
+ " -t " bd/llama-cpp-threads
+ " -m " bd/llama-cpp-model-file)))
+ (unless (get-buffer bd/llama-cpp-reasoning-buffer-name)
+ (generate-new-buffer bd/llama-cpp-reasoning-buffer-name)))))
+ (defun bd/gptel-stop-backend ()
+ (interactive)
+ (let ((process (get-buffer-process bd/llama-cpp-buffer-name)))
+ (if process
+ (progn
+ (delete-process process)
+ (kill-buffer bd/llama-cpp-buffer-name)
+ (message "Killed %s." process))
+ (message "No llama-cpp process is running."))))
+ (defun bd/gptel-restart-backend ()
+ (interactive)
+ (bd/gptel-stop-backend)
+ (bd/gptel-start-backend))
+
+ (setopt gptel-model 'qwen-4b
+ gptel-backend (gptel-make-openai "llama-cpp"
+ :stream t
+ :protocol "http"
+ :host (concat "localhost:" bd/llama-cpp-port)
+ :models '(qwen-4b))
+ gptel-max-tokens 500
+ gptel-include-reasoning bd/llama-cpp-reasoning-buffer-name)
+
+ (gptel-make-preset 'default
+ :system "You are a wolf (furry) named Evka hired as a secretary to complete language-based tasks. First describe an action your character does, e.x.: *I tap my claws on the desk*. Finish by responding to the task as tersely as possible, in character."))
+
+
+(provide 'bd--chat)
+;;; bd--chat.el ends here