[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[elpa] externals/llm 3988fecb53 11/71: Make separate function for event
From: |
ELPA Syncer |
Subject: |
[elpa] externals/llm 3988fecb53 11/71: Make separate function for event streaming, w/ client-side handlers |
Date: |
Fri, 17 May 2024 00:58:43 -0400 (EDT) |
branch: externals/llm
commit 3988fecb534c77624b2ba421f717f51a95a6e5a7
Author: Andrew Hyatt <ahyatt@gmail.com>
Commit: Andrew Hyatt <ahyatt@gmail.com>
Make separate function for event streaming, w/ client-side handlers
---
llm-openai.el | 54 +++++++++++++++++++++++++++++-------------------------
llm-request-plz.el | 49 +++++++++++++++++++++++++++++++++++++++++++++++++
2 files changed, 78 insertions(+), 25 deletions(-)
diff --git a/llm-openai.el b/llm-openai.el
index 00c2765dc1..1a3d89b7a6 100644
--- a/llm-openai.el
+++ b/llm-openai.el
@@ -296,31 +296,35 @@ RESPONSE can be nil if the response is complete."
response-callback error-callback)
(llm-openai--check-key provider)
(let ((buf (current-buffer)))
- (llm-request-plz-async (llm-openai--url provider "chat/completions")
- :headers (llm-openai--headers provider)
- :data (llm-openai--chat-request
(llm-openai-chat-model provider) prompt t)
- :on-error (lambda (_ data)
- (let ((errdata (cdr
- (assoc 'error
-
(json-read-from-string data)))))
- (llm-request-callback-in-buffer
- buf error-callback 'error
- (format "Problem calling Open AI: %s
message: %s"
- (cdr (assoc 'type errdata))
- (cdr (assoc 'message
errdata))))))
- :on-partial (lambda (data)
- (when (not (equal data "[DONE]"))
- (when-let ((response
(llm-openai--get-partial-chat-response
-
(json-read-from-string data))))
- (when (stringp response)
- (llm-request-callback-in-buffer
buf partial-callback response)))))
- :on-success (lambda (_)
- (llm-request-callback-in-buffer
- buf
- response-callback
- (llm-openai--process-and-return
- provider prompt nil
- error-callback))))))
+ (llm-request-plz-event-stream
+ (llm-openai--url provider "chat/completions")
+ :headers (llm-openai--headers provider)
+ :data (llm-openai--chat-request (llm-openai-chat-model provider) prompt t)
+ :event-stream-handlers
+ `(("message" . ,(lambda (data)
+ (when (not (equal data "[DONE]"))
+ (when-let ((response
(llm-openai--get-partial-chat-response
+ (json-read-from-string data))))
+ (when (stringp response)
+ (llm-request-callback-in-buffer buf
partial-callback response))))))
+ ("error" . ,(lambda (data)
+ (llm-request-callback-in-buffer
+ buf error-callback 'error data))))
+ :on-error (lambda (_ data)
+ (let ((errdata
+ (cdr (assoc 'error (json-read-from-string data)))))
+ (llm-request-callback-in-buffer
+ buf error-callback 'error
+ (format "Problem calling Open AI: %s message: %s"
+ (cdr (assoc 'type errdata))
+ (cdr (assoc 'message errdata))))))
+ :on-success (lambda (_)
+ (llm-request-callback-in-buffer
+ buf
+ response-callback
+ (llm-openai--process-and-return
+ provider prompt nil
+ error-callback))))))
(cl-defmethod llm-name ((_ llm-openai))
"Open AI")
diff --git a/llm-request-plz.el b/llm-request-plz.el
index d7bd3813b2..67285eb1b0 100644
--- a/llm-request-plz.el
+++ b/llm-request-plz.el
@@ -146,6 +146,55 @@ the buffer is turned into JSON and passed to ON-SUCCESS."
(funcall on-error error)))
:timeout (or timeout llm-request-plz-timeout)))
+(cl-defun llm-request-plz-event-stream (url &key headers data on-error
on-success
+ event-stream-handlers timeout)
+ "Make a request to URL.
+Nothing will be returned.
+
+HEADERS will be added in the Authorization header, in addition to
+standard json header. This is optional.
+
+DATA will be jsonified and sent as the request body.
+This is required.
+
+ON-SUCCESS will be called with the response body as a json
+object. This is optional in the case that ON-SUCCESS-DATA is set,
+and required otherwise.
+
+EVENT-STREAM-HANDLERS are an alist of event names to functions
+that handle the event's corresponding data, which will be called
+with the new event data as a string.
+
+ON-ERROR will be called with the error code and a response-body.
+This is required.
+"
+ (plz-media-type-request
+ 'post url
+ :as `(media-types
+ ,(cons
+ (cons "text/event-stream"
+ (plz-media-type:text/event-stream
+ ;; Convert so that each event handler gets the body, not the
+ ;; `plz-response' itself.
+ :events (mapcar
+ (lambda (cons)
+ (cons (car cons)
+ (lambda (_ resp) (funcall (cdr cons)
(plz-event-source-event-data resp)))))
+ event-stream-handlers)))
+ plz-media-types))
+ :body (when data
+ (encode-coding-string (json-encode data) 'utf-8))
+ :headers (append headers
+ '(("Accept-encoding" . "identity")
+ ("Content-Type" . "application/json")))
+ :then (lambda (response)
+ (when on-success
+ (funcall on-success (plz-response-body response))))
+ :else (lambda (error)
+ (when on-error
+ (funcall on-error error)))
+ :timeout (or timeout llm-request-plz-timeout)))
+
;; This is a useful method for getting out of the request buffer when it's time
;; to make callbacks.
(defun llm-request-plz-callback-in-buffer (buf f &rest args)
- [elpa] externals/llm 1f3b018dcb 03/71: Merge pull request #26 from r0man/plz, (continued)
- [elpa] externals/llm 1f3b018dcb 03/71: Merge pull request #26 from r0man/plz, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 829bedabe6 04/71: Support for vertex embeddings, adding callback compatibility, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 2ac956a060 05/71: Add support for the application/x-ndjson media type, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 63f2b8ffbc 10/71: Merge branch 'main' into plz, ELPA Syncer, 2024/05/17
- [elpa] externals/llm a9cd296cd8 02/71: Add llm-request-plz.el, ELPA Syncer, 2024/05/17
- [elpa] externals/llm f33475eeae 01/71: Add plz and media type and event source extensions, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 41d0889bcb 06/71: Make sync use of plz return data instead of object, converted Gemini, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 84678edfae 07/71: Merge pull request #28 from r0man/plz, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 9541d34656 08/71: Enable streaming in Open AI with plz modifications, ELPA Syncer, 2024/05/17
- [elpa] externals/llm c9ab8664ce 09/71: Support function streaming with Open AI & plz, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 3988fecb53 11/71: Make separate function for event streaming, w/ client-side handlers,
ELPA Syncer <=
- [elpa] externals/llm 9ce3d9e003 13/71: Port claude to llm-request-plz, ELPA Syncer, 2024/05/17
- [elpa] externals/llm f9213b981c 14/71: Use the plz request module for everything in Claude, ELPA Syncer, 2024/05/17
- [elpa] externals/llm cdbb41528c 18/71: Fix issue advancing the process buffer, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 3441784ae2 19/71: Fix error handling., ELPA Syncer, 2024/05/17
- [elpa] externals/llm 6f9c604e58 20/71: Decode body and chunks using a coding system, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 62495de57f 21/71: Don't decode error response twice, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 03afb1d795 23/71: Use symbols for event types & improve timeout handling, ELPA Syncer, 2024/05/17
- [elpa] externals/llm e07c84b81b 24/71: Improve Curl/HTTP error handling, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 069bd193ef 26/71: Adding Gemini & Vertex streaming using plz, ELPA Syncer, 2024/05/17
- [elpa] externals/llm 99814b533d 28/71: Changes to get at least some of Gemini / Vertex working, ELPA Syncer, 2024/05/17