emacs-elpa-diffs
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[elpa] externals/llm 9d215d7ba2 58/71: Enable handling errors on streami


From: ELPA Syncer
Subject: [elpa] externals/llm 9d215d7ba2 58/71: Enable handling errors on streaming, and add requires
Date: Fri, 17 May 2024 00:58:49 -0400 (EDT)

branch: externals/llm
commit 9d215d7ba23de0243a114b7990d42cf4b21c84dc
Author: Andrew Hyatt <ahyatt@gmail.com>
Commit: Andrew Hyatt <ahyatt@gmail.com>

    Enable handling errors on streaming, and add requires
---
 llm-claude.el         | 36 ++++++++++++++++++++----------------
 llm-ollama.el         |  3 ++-
 llm-openai.el         | 19 ++++++++++---------
 llm-provider-utils.el | 32 ++++++++++++++++++++++----------
 llm-vertex.el         |  2 +-
 5 files changed, 55 insertions(+), 37 deletions(-)

diff --git a/llm-claude.el b/llm-claude.el
index 96f653d0e9..ffd42ed9d4 100644
--- a/llm-claude.el
+++ b/llm-claude.el
@@ -28,6 +28,7 @@
 (require 'llm)
 (require 'llm-request)
 (require 'llm-provider-utils)
+(require 'plz-event-source)
 (require 'rx)
 
 ;; Models defined at https://docs.anthropic.com/claude/docs/models-overview
@@ -66,23 +67,26 @@
         (assoc-default 'text content)
       (format "Unsupported non-text response: %s" content))))
 
-(cl-defmethod llm-provider-streaming-media-handler ((_ llm-claude) 
msg-receiver _)
+(cl-defmethod llm-provider-streaming-media-handler ((_ llm-claude)
+                                                    msg-receiver _ 
err-receiver)
   (cons 'text/event-stream
-       (plz-event-source:text/event-stream
-        :events `((message_start . ignore)
-                  (content_block_start . ignore)
-                  (ping . ignore)
-                  (message_stop . ignore)
-                  (content_block_stop . ignore)
-                  (content_block_delta
-                   .
-                   ,(lambda (_ event)
-                      (let* ((data (plz-event-source-event-data event))
-                            (json (json-parse-string data :object-type 'alist))
-                             (delta (assoc-default 'delta json))
-                             (type (assoc-default 'type delta)))
-                        (when (equal type "text_delta")
-                          (funcall msg-receiver (assoc-default 'text 
delta))))))))))
+           (plz-event-source:text/event-stream
+         :events `((message_start . ignore)
+                   (content_block_start . ignore)
+                   (ping . ignore)
+                   (message_stop . ignore)
+                   (content_block_stop . ignore)
+                   (error . ,(lambda (_ event)
+                               (funcall err-receiver 
(plz-event-source-event-data event))))
+                   (content_block_delta
+                    .
+                    ,(lambda (_ event)
+                       (let* ((data (plz-event-source-event-data event))
+                                         (json (json-parse-string data 
:object-type 'alist))
+                              (delta (assoc-default 'delta json))
+                              (type (assoc-default 'type delta)))
+                         (when (equal type "text_delta")
+                           (funcall msg-receiver (assoc-default 'text 
delta))))))))))
 
 (cl-defmethod llm-provider-headers ((provider llm-claude))
   `(("x-api-key" . ,(llm-claude-key provider))
diff --git a/llm-ollama.el b/llm-ollama.el
index ba409eb040..a7c2dae7fa 100644
--- a/llm-ollama.el
+++ b/llm-ollama.el
@@ -30,6 +30,7 @@
 (require 'llm-request-plz)
 (require 'llm-provider-utils)
 (require 'json)
+(require 'plz-media-type)
 
 (defgroup llm-ollama nil
   "LLM implementation for Ollama."
@@ -116,7 +117,7 @@ PROVIDER is the llm-ollama provider."
     (when options (push `("options" . ,options) request-alist))
     request-alist))
 
-(cl-defmethod llm-provider-streaming-media-handler ((_ llm-ollama) 
msg-receiver _)
+(cl-defmethod llm-provider-streaming-media-handler ((_ llm-ollama) 
msg-receiver _ _)
   (cons 'application/x-ndjson
         (plz-media-type:application/x-ndjson
          :handler (lambda (data)
diff --git a/llm-openai.el b/llm-openai.el
index 107e010356..f281eb9c16 100644
--- a/llm-openai.el
+++ b/llm-openai.el
@@ -30,6 +30,7 @@
 (require 'llm-request-plz)
 (require 'llm-provider-utils)
 (require 'json)
+(require 'plz-event-source)
 
 (defgroup llm-openai nil
   "LLM implementation for Open AI."
@@ -201,17 +202,17 @@ RESPONSE can be nil if the response is complete."
                                 (assoc-default 'tool_calls delta))))
       content-or-call)))
 
-(cl-defmethod llm-provider-streaming-media-handler ((_ llm-openai) 
msg-receiver fc-receiver)
+(cl-defmethod llm-provider-streaming-media-handler ((_ llm-openai) 
msg-receiver fc-receiver _)
   (cons 'text/event-stream
-       (plz-event-source:text/event-stream
+           (plz-event-source:text/event-stream
          :events `((message
-                   .
-                  ,(lambda (_ event)
-                     (let ((data (plz-event-source-event-data event)))
-                       (unless (equal data "[DONE]")
-                         (when-let ((response 
(llm-openai--get-partial-chat-response
-                                               (json-read-from-string data))))
-                            (funcall (if (stringp response) msg-receiver 
fc-receiver) response))))))))))
+                    .
+                           ,(lambda (_ event)
+                              (let ((data (plz-event-source-event-data event)))
+                                    (unless (equal data "[DONE]")
+                                      (when-let ((response 
(llm-openai--get-partial-chat-response
+                                                                        
(json-read-from-string data))))
+                             (funcall (if (stringp response) msg-receiver 
fc-receiver) response))))))))))
 
 (cl-defmethod llm-provider-collect-streaming-function-data ((_ llm-openai) 
data)
   (let ((cvec (make-vector (length (car data)) nil)))
diff --git a/llm-provider-utils.el b/llm-provider-utils.el
index 2e6e82e9d6..eddb98a97e 100644
--- a/llm-provider-utils.el
+++ b/llm-provider-utils.el
@@ -119,14 +119,19 @@ FUNC-RESULTS is a list of function results, if any.")
   "By default, the standard provider appends to the prompt."
   (llm-provider-utils-append-to-prompt prompt result func-results))
 
-(cl-defgeneric llm-provider-streaming-media-handler (provider msg-receiver 
fc-receiver)
-  "Return a function that will handle streaming media for PROVIDER.
+(cl-defgeneric llm-provider-streaming-media-handler (provider msg-receiver 
fc-receiver err-receiver)
+  "Define how to handle streaming media for the PROVIDER.
 
-This should be a cons of the media type as a symbol, and a plist
-of the particular data the media type needs to process the
-streaming media.")
+This should return a cons of the media type and an instance that
+handle objects of that type.
 
-(cl-defmethod llm-provider-streaming-media-handler ((_ 
llm-standard-chat-provider) msg-receiver fc-receiver)
+The handlers defined can call MSG-RECEIVER when they receive part
+of a text message for the client (a chat response).  If they
+receive a function call, they should call FC-RECEIVER with the
+function call.  If they receive an error, they should call
+ERR-RECEIVER with the error message.")
+
+(cl-defmethod llm-provider-streaming-media-handler ((_ 
llm-standard-chat-provider) _ _ _)
   "By default, the standard provider has no streaming media handler."
   nil)
 
@@ -255,7 +260,11 @@ return a list of `llm-chat-function-call' structs.")
                       (when partial-callback
                                    (llm-provider-utils-callback-in-buffer
                                     buf partial-callback current-text))))
-                  (lambda (fc-new) (push fc-new fc)))
+                  (lambda (fc-new) (push fc-new fc))
+                  (lambda (err)
+                    (llm-provider-utils-callback-in-buffer
+                     buf error-callback 'error
+                     err)))
      :on-success
      (lambda (data)
        (llm-provider-utils-callback-in-buffer
@@ -271,8 +280,8 @@ return a list of `llm-chat-function-call' structs.")
                   (if (stringp data)
                       data
                     (or (llm-provider-chat-extract-error
-                         provider data))
-                    "Unknown error"))))))
+                         provider data)
+                        "Unknown error")))))))
 
 (defun llm-provider-utils-get-system-prompt (prompt &optional example-prelude)
   "From PROMPT, turn the context and examples into a string.
@@ -470,7 +479,10 @@ be either FUNCALLS or TEXT."
       ;; If we have function calls, execute them and return the results, and
       ;; it talso takes care of updating the prompt.
       (llm-provider-utils-execute-function-calls provider prompt funcalls)
-    (llm-provider-append-to-prompt provider prompt text)
+    ;; We probably shouldn't be called if text is nil, but if we do,
+    ;; we shouldn't add something invalid to the prompt.
+    (when text
+      (llm-provider-append-to-prompt provider prompt text))
     text))
 
 (defun llm-provider-utils-populate-function-results (provider prompt func 
result)
diff --git a/llm-vertex.el b/llm-vertex.el
index 63828c6983..1764f24b32 100644
--- a/llm-vertex.el
+++ b/llm-vertex.el
@@ -229,7 +229,7 @@ nothing to add, in which case it is nil."
            calls)))
 
 (cl-defmethod llm-provider-streaming-media-handler ((provider llm-google)
-                                                    msg-receiver fc-receiver)
+                                                    msg-receiver fc-receiver _)
   (cons 'application/json
         (plz-media-type:application/json-array
          :handler



reply via email to

[Prev in Thread] Current Thread [Next in Thread]