gptel: Include more API parameters
gptel.el (gptel--system-message, gptel--system-message-alist, gptel--model, gptel--temperature, gptel--max-tokens, gptel--request-data): Add new buffer-local variables to hold API parameters. Generating the full request data plist is now done in a separate function, `gptel--request-data'.
This commit is contained in:
parent
172059060a
commit
65e6d73372
2 changed files with 24 additions and 12 deletions
|
@ -45,11 +45,7 @@ PROMPTS is the data to send, TOKEN is a unique identifier."
|
||||||
(list "--location" "--silent" "--compressed" "--disable"))
|
(list "--location" "--silent" "--compressed" "--disable"))
|
||||||
(url "https://api.openai.com/v1/chat/completions")
|
(url "https://api.openai.com/v1/chat/completions")
|
||||||
(data (encode-coding-string
|
(data (encode-coding-string
|
||||||
(json-encode
|
(json-encode (gptel--request-data prompts))
|
||||||
`(:model "gpt-3.5-turbo"
|
|
||||||
;; :temperature 1.0
|
|
||||||
;; :top_p 1.0
|
|
||||||
:messages [,@prompts]))
|
|
||||||
'utf-8))
|
'utf-8))
|
||||||
(api-key
|
(api-key
|
||||||
(cond
|
(cond
|
||||||
|
|
30
gptel.el
30
gptel.el
|
@ -90,6 +90,18 @@ When set to nil, it is inserted all at once.
|
||||||
(defvar gptel-prompt-string "### ")
|
(defvar gptel-prompt-string "### ")
|
||||||
|
|
||||||
(aio-defun gptel-send ()
|
(aio-defun gptel-send ()
|
||||||
|
;; Model and interaction parameters
|
||||||
|
(defvar-local gptel--system-message
|
||||||
|
"You are a large language model living in Emacs and a helpful assistant. Respond concisely.")
|
||||||
|
(defvar gptel--system-message-alist
|
||||||
|
`((default . ,gptel--system-message)
|
||||||
|
(programming . "You are a large language model and a careful programmer. Respond only with code unless explicitly asked.")
|
||||||
|
(writing . "You are a large language model and a writing assistant. Respond concisely.")
|
||||||
|
(chat . "You are a large language model and a conversation partner. Respond concisely."))
|
||||||
|
"Prompt templates (directives).")
|
||||||
|
(defvar-local gptel--max-tokens nil)
|
||||||
|
(defvar-local gptel--model "gpt-3.5-turbo")
|
||||||
|
(defvar-local gptel--temperature 1.0)
|
||||||
(defvar-local gptel--num-messages-to-send nil)
|
(defvar-local gptel--num-messages-to-send nil)
|
||||||
|
|
||||||
(defsubst gptel--numberize (val)
|
(defsubst gptel--numberize (val)
|
||||||
|
@ -173,6 +185,16 @@ instead."
|
||||||
gptel--system-message))
|
gptel--system-message))
|
||||||
prompts)))))
|
prompts)))))
|
||||||
|
|
||||||
|
(defun gptel--request-data (prompts)
|
||||||
|
"JSON encode PROMPTS for sending to ChatGPT."
|
||||||
|
(let ((prompts-plist
|
||||||
|
`(:model ,gptel--model
|
||||||
|
:messages [,@prompts])))
|
||||||
|
(when gptel--temperature
|
||||||
|
(plist-put prompts-plist :temperature (gptel--numberize gptel--temperature)))
|
||||||
|
(when gptel--max-tokens
|
||||||
|
(plist-put prompts-plist :max_tokens (gptel--numberize gptel--max-tokens)))
|
||||||
|
prompts-plist))
|
||||||
|
|
||||||
(aio-defun gptel--get-response (prompts)
|
(aio-defun gptel--get-response (prompts)
|
||||||
"Fetch response for PROMPTS from ChatGPT.
|
"Fetch response for PROMPTS from ChatGPT.
|
||||||
|
@ -189,13 +211,7 @@ Return the message received."
|
||||||
`(("Content-Type" . "application/json")
|
`(("Content-Type" . "application/json")
|
||||||
("Authorization" . ,(concat "Bearer " api-key))))
|
("Authorization" . ,(concat "Bearer " api-key))))
|
||||||
(url-request-data
|
(url-request-data
|
||||||
(encode-coding-string
|
(encode-coding-string (json-encode (gptel--request-data prompts)) 'utf-8)))
|
||||||
(json-encode
|
|
||||||
`(:model "gpt-3.5-turbo"
|
|
||||||
;; :temperature 1.0
|
|
||||||
;; :top_p 1.0
|
|
||||||
:messages [,@prompts]))
|
|
||||||
'utf-8)))
|
|
||||||
(pcase-let ((`(,_ . ,response-buffer)
|
(pcase-let ((`(,_ . ,response-buffer)
|
||||||
(aio-await
|
(aio-await
|
||||||
(aio-url-retrieve "https://api.openai.com/v1/chat/completions"))))
|
(aio-url-retrieve "https://api.openai.com/v1/chat/completions"))))
|
||||||
|
|
Loading…
Add table
Reference in a new issue