diff --git a/ext/doom/config.el b/ext/doom/config.el index 480a238..a3d0f4d 100644 --- a/ext/doom/config.el +++ b/ext/doom/config.el @@ -213,7 +213,7 @@ (defvar categories-alist '( (:name "org" :modes (org-mode org-agenda-mode) :buffer-name () :type global) - (:name "gptel" :modes nil :buffer-name ("*llama-cpp*" "*gptel*" "*gptel-context*" "*gptel-query*") :type global) + (:name "gptel" :modes nil :buffer-name ("*llama-cpp*" "*gptel*" "*groq*" "*gptel-context*" "*gptel-query*") :type global) (:name "terminal" :modes (vterm-mode term-mode) :buffer-name () :type per-project) (:name "misc" :modes (+doom-dashboard-mode native-comp-limple-mode messages-buffer-mode) :buffer-name () :type global) (:name "misc" :modes (special-mode fundamental-mode) :buffer-name () :type per-project) @@ -291,12 +291,29 @@ (use-package! gptel :init :config - (setq! gptel-backend (gptel-make-openai "gptel" + (defun read-api-secret (secret-file) + (with-temp-buffer + (insert-file-contents (expand-file-name secret-file "~")) + (string-trim (buffer-substring-no-properties (point-min) (point-max))))) + + + (setq! gptel-backend (gptel-make-openai "groq" + :host "api.groq.com" + :endpoint "/openai/v1/chat/completions" :stream t - :protocol "http" - :host "100.64.0.3:8080" - :models '("default")) - gptel-model "default") + :key (read-api-secret ".groq_api_key") + :models '("llama3-70b-8192" + "mixtral-8x7b-32768" + "llama3-8b-8192")) + gptel-model "llama3-70b-8192" + ) + + (gptel-make-openai "llama-cpp" + :stream t + :protocol "http" + :host "100.64.0.3:8080" + :models '("llama-cpp-model")) + (setq! gptel-expert-commands t) (setq! gptel-temperature 0.2)