gptel: Include more API parameters

gptel.el (gptel--system-message, gptel--system-message-alist,
gptel--model, gptel--temperature, gptel--max-tokens,
gptel--request-data): Add new buffer-local variables to hold API
parameters.  Generating the full request data plist is now done in a
separate function, `gptel--request-data'.
This commit is contained in:
Karthik Chikmagalur 2023-03-08 19:20:00 -08:00
parent 172059060a
commit 65e6d73372
2 changed files with 24 additions and 12 deletions

View file

@ -45,11 +45,7 @@ PROMPTS is the data to send, TOKEN is a unique identifier."
(list "--location" "--silent" "--compressed" "--disable"))
(url "https://api.openai.com/v1/chat/completions")
(data (encode-coding-string
(json-encode
`(:model "gpt-3.5-turbo"
;; :temperature 1.0
;; :top_p 1.0
:messages [,@prompts]))
(json-encode (gptel--request-data prompts))
'utf-8))
(api-key
(cond

View file

@ -90,6 +90,18 @@ When set to nil, it is inserted all at once.
(defvar gptel-prompt-string "### ")
(aio-defun gptel-send ()
;; Model and interaction parameters
(defvar-local gptel--system-message
"You are a large language model living in Emacs and a helpful assistant. Respond concisely.")
(defvar gptel--system-message-alist
`((default . ,gptel--system-message)
(programming . "You are a large language model and a careful programmer. Respond only with code unless explicitly asked.")
(writing . "You are a large language model and a writing assistant. Respond concisely.")
(chat . "You are a large language model and a conversation partner. Respond concisely."))
"Prompt templates (directives).")
(defvar-local gptel--max-tokens nil)
(defvar-local gptel--model "gpt-3.5-turbo")
(defvar-local gptel--temperature 1.0)
(defvar-local gptel--num-messages-to-send nil)
(defsubst gptel--numberize (val)
@ -173,6 +185,16 @@ instead."
gptel--system-message))
prompts)))))
(defun gptel--request-data (prompts)
"JSON encode PROMPTS for sending to ChatGPT."
(let ((prompts-plist
`(:model ,gptel--model
:messages [,@prompts])))
(when gptel--temperature
(plist-put prompts-plist :temperature (gptel--numberize gptel--temperature)))
(when gptel--max-tokens
(plist-put prompts-plist :max_tokens (gptel--numberize gptel--max-tokens)))
prompts-plist))
(aio-defun gptel--get-response (prompts)
"Fetch response for PROMPTS from ChatGPT.
@ -189,13 +211,7 @@ Return the message received."
`(("Content-Type" . "application/json")
("Authorization" . ,(concat "Bearer " api-key))))
(url-request-data
(encode-coding-string
(json-encode
`(:model "gpt-3.5-turbo"
;; :temperature 1.0
;; :top_p 1.0
:messages [,@prompts]))
'utf-8)))
(encode-coding-string (json-encode (gptel--request-data prompts)) 'utf-8)))
(pcase-let ((`(,_ . ,response-buffer)
(aio-await
(aio-url-retrieve "https://api.openai.com/v1/chat/completions"))))