forked from karthink/gptel
-
Notifications
You must be signed in to change notification settings - Fork 0
/
gptel-openai.el
254 lines (206 loc) · 8.08 KB
/
gptel-openai.el
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
;;; gptel-openai.el --- ChatGPT suppport for gptel -*- lexical-binding: t; -*-
;; Copyright (C) 2023 Karthik Chikmagalur
;; Author: Karthik Chikmagalur <karthikchikmagalur@gmail.com>
;; Keywords:
;; This program is free software; you can redistribute it and/or modify
;; it under the terms of the GNU General Public License as published by
;; the Free Software Foundation, either version 3 of the License, or
;; (at your option) any later version.
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;; You should have received a copy of the GNU General Public License
;; along with this program. If not, see <https://www.gnu.org/licenses/>.
;;; Commentary:
;; This file adds support for the ChatGPT API to gptel
;;; Code:
(require 'cl-generic)
(require 'map)
(defvar gptel-model)
(defvar gptel-stream)
(defvar gptel-use-curl)
(defvar gptel-backend)
(defvar gptel-temperature)
(defvar gptel-max-tokens)
(defvar gptel--system-message)
(defvar gptel--known-backends)
(defvar json-object-type)
(declare-function gptel--get-api-key "gptel")
(declare-function prop-match-value "text-property-search")
(declare-function text-property-search-backward "text-property-search")
(declare-function json-read "json")
;;; Common backend struct for LLM support
(cl-defstruct
(gptel-backend (:constructor gptel--make-backend)
(:copier gptel--copy-backend))
name host header protocol stream
endpoint key models url)
;;; OpenAI (ChatGPT)
(cl-defstruct (gptel-openai (:constructor gptel--make-openai)
(:copier nil)
(:include gptel-backend)))
(cl-defmethod gptel-curl--parse-stream ((_backend gptel-openai) _info)
(let* ((json-object-type 'plist)
(content-strs))
(condition-case nil
(while (re-search-forward "^data:" nil t)
(save-match-data
(unless (looking-at " *\\[DONE\\]")
(when-let* ((response (json-read))
(delta (map-nested-elt
response '(:choices 0 :delta)))
(content (plist-get delta :content)))
(push content content-strs)))))
(error
(goto-char (match-beginning 0))))
(apply #'concat (nreverse content-strs))))
(cl-defmethod gptel--parse-response ((_backend gptel-openai) response _info)
(map-nested-elt response '(:choices 0 :message :content)))
(cl-defmethod gptel--request-data ((_backend gptel-openai) prompts)
"JSON encode PROMPTS for sending to ChatGPT."
(let ((prompts-plist
`(:model ,gptel-model
:messages [,@prompts]
:stream ,(or (and gptel-stream gptel-use-curl
(gptel-backend-stream gptel-backend))
:json-false))))
(when gptel-temperature
(plist-put prompts-plist :temperature gptel-temperature))
(when gptel-max-tokens
(plist-put prompts-plist :max_tokens gptel-max-tokens))
prompts-plist))
(cl-defmethod gptel--parse-buffer ((_backend gptel-openai) &optional max-entries)
(let ((prompts) (prop))
(while (and
(or (not max-entries) (>= max-entries 0))
(setq prop (text-property-search-backward
'gptel 'response
(when (get-char-property (max (point-min) (1- (point)))
'gptel)
t))))
(push (list :role (if (prop-match-value prop) "assistant" "user")
:content
(string-trim
(buffer-substring-no-properties (prop-match-beginning prop)
(prop-match-end prop))
"[*# \t\n\r]+"))
prompts)
(and max-entries (cl-decf max-entries)))
(cons (list :role "system"
:content gptel--system-message)
prompts)))
;;;###autoload
(cl-defun gptel-make-openai
(name &key header models stream
(key 'gptel-api-key)
(host "api.openai.com")
(protocol "https")
(endpoint "/v1/chat/completions"))
"Register a ChatGPT backend for gptel with NAME.
Keyword arguments:
HOST (optional) is the API host, typically \"api.openai.com\".
MODELS is a list of available model names.
STREAM is a boolean to toggle streaming responses, defaults to
false.
PROTOCOL (optional) specifies the protocol, https by default.
ENDPOINT (optional) is the API endpoint for completions, defaults to
\"/v1/chat/completions\".
HEADER (optional) is for additional headers to send with each
request. It should be an alist or a function that retuns an
alist, like:
((\"Content-Type\" . \"application/json\"))
KEY (optional) is a variable whose value is the API key, or
function that returns the key."
(let ((backend (gptel--make-openai
:name name
:host host
:header header
:key key
:models models
:protocol protocol
:endpoint endpoint
:stream stream
:url (if protocol
(concat protocol "://" host endpoint)
(concat host endpoint)))))
(prog1 backend
(setf (alist-get name gptel--known-backends
nil nil #'equal)
backend))))
;;; Azure
;;;###autoload
(cl-defun gptel-make-azure
(name &key host
(protocol "https")
(header (lambda () `(("api-key" . ,(gptel--get-api-key)))))
(key 'gptel-api-key)
models stream endpoint)
"Register an Azure backend for gptel with NAME.
Keyword arguments:
HOST is the API host.
MODELS is a list of available model names.
STREAM is a boolean to toggle streaming responses, defaults to
false.
PROTOCOL (optional) specifies the protocol, https by default.
ENDPOINT is the API endpoint for completions.
HEADER (optional) is for additional headers to send with each
request. It should be an alist or a function that retuns an
alist, like:
((\"Content-Type\" . \"application/json\"))
KEY (optional) is a variable whose value is the API key, or
function that returns the key.
Example:
-------
(gptel-make-azure
\"Azure-1\"
:protocol \"https\"
:host \"RESOURCE_NAME.openai.azure.com\"
:endpoint
\"/openai/deployments/DEPLOYMENT_NAME/completions?api-version=2023-05-15\"
:stream t
:models \\='(\"gpt-3.5-turbo\" \"gpt-4\"))"
(let ((backend (gptel--make-openai
:name name
:host host
:header header
:key key
:models models
:protocol protocol
:endpoint endpoint
:stream stream
:url (if protocol
(concat protocol "://" host endpoint)
(concat host endpoint)))))
(prog1 backend
(setf (alist-get name gptel--known-backends
nil nil #'equal)
backend))))
;; GPT4All
;;;###autoload
(defalias 'gptel-make-gpt4all 'gptel-make-openai
"Register a GPT4All backend for gptel with NAME.
Keyword arguments:
HOST is where GPT4All runs (with port), typically localhost:8491
MODELS is a list of available model names.
STREAM is a boolean to toggle streaming responses, defaults to
false.
PROTOCOL specifies the protocol, https by default.
ENDPOINT (optional) is the API endpoint for completions, defaults to
\"/api/v1/completions\"
HEADER (optional) is for additional headers to send with each
request. It should be an alist or a function that retuns an
alist, like:
((\"Content-Type\" . \"application/json\"))
KEY (optional) is a variable whose value is the API key, or
function that returns the key. This is typically not required for
local models like GPT4All.
Example:
-------
(gptel-make-gpt4all
\"GPT4All\"
:protocol \"http\"
:host \"localhost:4891\"
:models \\='(\"mistral-7b-openorca.Q4_0.gguf\"))")
(provide 'gptel-openai)
;;; gptel-backends.el ends here