[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[elpa] externals/llm a16338f31a 08/10: Add streaming to llm-fake
From: |
ELPA Syncer |
Subject: |
[elpa] externals/llm a16338f31a 08/10: Add streaming to llm-fake |
Date: |
Sun, 1 Oct 2023 18:58:35 -0400 (EDT) |
branch: externals/llm
commit a16338f31a44ea6154ed0260d299a9b0ba66ab89
Author: Andrew Hyatt <ahyatt@gmail.com>
Commit: Andrew Hyatt <ahyatt@gmail.com>
Add streaming to llm-fake
---
llm-fake.el | 20 ++++++++++++++++++++
1 file changed, 20 insertions(+)
diff --git a/llm-fake.el b/llm-fake.el
index 9ffdcdd936..5b3901bd38 100644
--- a/llm-fake.el
+++ b/llm-fake.el
@@ -66,6 +66,26 @@ message cons. If nil, the response will be a simple vector."
(_ (error "Incorrect type found in `chat-action-func': %s"
(type-of result)))))
"Sample response from `llm-chat-async'"))
+(cl-defmethod llm-chat-streaming ((provider llm-fake) prompt partial-callback
response-callback _error-callback)
+ (when (llm-fake-output-to-buffer provider)
+ (with-current-buffer (get-buffer-create (llm-fake-output-to-buffer
provider))
+ (goto-char (point-max))
+ (insert "\nCall to llm-chat-streaming\n" (llm-chat-prompt-to-text
prompt) "\n")))
+ (let ((text "Sample response from `llm-chat-streaming'"))
+ (when (llm-fake-chat-action-func provider)
+ (let* ((f (llm-fake-chat-action-func provider))
+ (result (funcall f)))
+ (pcase (type-of result)
+ ('string (setq text result))
+ ('cons (signal (car result) (cdr result)))
+ (_ (error "Incorrect type found in `chat-action-func': %s" (type-of
result))))))
+ (let ((accum ""))
+ (mapc (lambda (word)
+ (setq accum (concat accum word " "))
+ (funcall partial-callback accum))
+ (string-split text))
+ (funcall response-callback text))))
+
(cl-defmethod llm-embedding ((provider llm-fake) string)
(when (llm-fake-output-to-buffer provider)
(with-current-buffer (get-buffer-create (llm-fake-output-to-buffer
provider))
- [elpa] externals/llm updated (8dee3d059a -> 7954a92d7c), ELPA Syncer, 2023/10/01
- [elpa] externals/llm 38a627409c 01/10: Beginning of a streaming option, with only llm-vertex implemented, ELPA Syncer, 2023/10/01
- [elpa] externals/llm 0faa9e5cc7 02/10: Merge branch 'main' into streaming, ELPA Syncer, 2023/10/01
- [elpa] externals/llm 46feff756f 03/10: Change request functionality to better handle streaming, ELPA Syncer, 2023/10/01
- [elpa] externals/llm fe064b2bc3 04/10: Create streaming method and implement it for vertex, ELPA Syncer, 2023/10/01
- [elpa] externals/llm 95b907c32a 06/10: Add Open AI streaming and allow for raw processing on success, ELPA Syncer, 2023/10/01
- [elpa] externals/llm c7a1e06e3e 07/10: Test streaming as well in the normal suite of tests, ELPA Syncer, 2023/10/01
- [elpa] externals/llm 7e9b1f8c60 09/10: Add streaming to README, ELPA Syncer, 2023/10/01
- [elpa] externals/llm 7954a92d7c 10/10: Bump version to 0.3.0, and note changes in NEWS.org, ELPA Syncer, 2023/10/01
- [elpa] externals/llm 454ec53fd3 05/10: Merge branch 'main' into streaming, ELPA Syncer, 2023/10/01
- [elpa] externals/llm a16338f31a 08/10: Add streaming to llm-fake,
ELPA Syncer <=