emacs-elpa-diffs
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[elpa] externals/llm 152a6ed44a: Fix Vertex and Open AI's parallel call


From: ELPA Syncer
Subject: [elpa] externals/llm 152a6ed44a: Fix Vertex and Open AI's parallel call tool use (#78)
Date: Fri, 6 Sep 2024 00:58:45 -0400 (EDT)

branch: externals/llm
commit 152a6ed44ae43641e35a0d2ce2cf9f9d35f23999
Author: Andrew Hyatt <ahyatt@gmail.com>
Commit: GitHub <noreply@github.com>

    Fix Vertex and Open AI's parallel call tool use (#78)
    
    Change how we store in the prompt the results to support parallel calls.
    
    This is an alternate fix for https://github.com/ahyatt/llm/issues/71, so
    undoes https://github.com/ahyatt/llm/pull/73 and makes a fix along the
    lines of what Paul Nelson speculated would be needed.
---
 NEWS.org                |   1 +
 llm-claude.el           | 107 ++++++++----------------------------------------
 llm-integration-test.el |  47 +++++++++++++++++----
 llm-openai.el           |  49 +++++++++++++---------
 llm-provider-utils.el   |  83 +++++++++++++++++++++----------------
 llm-vertex.el           |  19 +++++----
 llm.el                  |   8 ++--
 7 files changed, 149 insertions(+), 165 deletions(-)

diff --git a/NEWS.org b/NEWS.org
index 42955c58b4..f72ee35b80 100644
--- a/NEWS.org
+++ b/NEWS.org
@@ -1,5 +1,6 @@
 * Version 0.17.4
 - Fix problem with Open AI's =llm-chat-token-limit=.
+- Fix Open AI and Gemini's parallel function calling.
 * Version 0.17.3
 - More fixes with Claude and Ollama function calling conversation, thanks to 
Paul Nelson.
 - Make =llm-chat-streaming-to-point= more efficient, just inserting new text, 
thanks to Paul Nelson.
diff --git a/llm-claude.el b/llm-claude.el
index c4c6cc19c1..15c2720e10 100644
--- a/llm-claude.el
+++ b/llm-claude.el
@@ -50,102 +50,29 @@
     ("input_schema" . ,(llm-provider-utils-openai-arguments
                         (llm-function-call-args call)))))
 
-(defun llm-claude--postprocess-messages (messages)
-  "Post-process MESSAGES, as in `llm-provider-chat-request'.
-
-Currently this flattens consecutive user tool results, for reasons
-explained in the final couple sentences of URL
-`https://docs.anthropic.com/en/docs/build-with-claude/\
-tool-use#multiple-tool-example'
-
-Example input:
-
- (((\"role\" . user) (\"content\" . \"Compute 2+3 and 4+5.\"))
-  ((\"role\" . assistant)
-   (\"content\"
-    ((type . \"tool_use\") (id . \"toolu_017epyv32yDx5zwhNPVprRrQ\")
-     (name . \"add\") (input (a . 2) (b . 3)))
-    ((type . \"tool_use\") (id . \"toolu_01EPALH8MdwuWVxzX8ErNKWM\")
-     (name . \"add\") (input (a . 4) (b . 5)))))
-  ((\"role\" . user)
-   (\"content\"
-    ((\"type\" . \"tool_result\")
-     (\"tool_use_id\" . \"toolu_017epyv32yDx5zwhNPVprRrQ\")
-     (\"content\" . \"5\"))))
-  ((\"role\" . user)
-   (\"content\"
-    ((\"type\" . \"tool_result\")
-     (\"tool_use_id\" . \"toolu_01EPALH8MdwuWVxzX8ErNKWM\")
-     (\"content\" . \"9\")))))
-
-Example output:
-
- (((\"role\" . user) (\"content\" . \"Compute 2+3 and 4+5.\"))
-  ((\"role\" . assistant)
-   (\"content\"
-    ((type . \"tool_use\") (id . \"toolu_017epyv32yDx5zwhNPVprRrQ\")
-     (name . \"add\") (input (a . 2) (b . 3)))
-    ((type . \"tool_use\") (id . \"toolu_01EPALH8MdwuWVxzX8ErNKWM\")
-     (name . \"add\") (input (a . 4) (b . 5)))))
-  ((\"role\" . \"user\")
-   (\"content\"
-    ((\"type\" . \"tool_result\")
-     (\"tool_use_id\" . \"toolu_017epyv32yDx5zwhNPVprRrQ\")
-     (\"content\" . \"5\"))
-    ((\"type\" . \"tool_result\")
-     (\"tool_use_id\" . \"toolu_01EPALH8MdwuWVxzX8ErNKWM\")
-     (\"content\" . \"9\")))))"
-  (let ((result '())
-        (tool-results '()))
-    (dolist (message messages)
-      (let* ((role (alist-get "role" message nil nil #'equal))
-             (content (alist-get "content" message nil nil #'equal))
-             (is-tool-result
-              (and (equal role 'user)
-                   (listp content)
-                   (let ((type
-                          (alist-get "type" (car content) nil nil #'equal)))
-                     (equal type "tool_result")))))
-        (cond
-         (is-tool-result
-          (setq tool-results (append tool-results content)))
-
-         ;; End of segment of user tool-results
-         ((and (null is-tool-result) tool-results)
-          (push `(("role" . "user") ("content" . ,tool-results)) result)
-          (setq tool-results '())
-          (push message result))
-
-         (t
-          (push message result)))))
-
-    (when tool-results
-      (push `(("role" . "user") ("content" . ,tool-results)) result))
-
-    (nreverse result)))
-
 (cl-defmethod llm-provider-chat-request ((provider llm-claude) prompt stream)
   (let ((request `(("model" . ,(llm-claude-chat-model provider))
                    ("stream" . ,(if stream t :json-false))
                    ;; Claude requires max_tokens
                    ("max_tokens" . ,(or (llm-chat-prompt-max-tokens prompt) 
4096))
                    ("messages" .
-                    ,(llm-claude--postprocess-messages
-                      (mapcar (lambda (interaction)
-                                `(("role" . ,(pcase 
(llm-chat-prompt-interaction-role interaction)
-                                               ('function 'user)
-                                               ('assistant 'assistant)
-                                               ('user 'user)))
-                                  ("content" .
-                                   ,(if 
(llm-chat-prompt-interaction-function-call-result interaction)
-                                        `((("type" . "tool_result")
-                                           ("tool_use_id" .
-                                            
,(llm-chat-prompt-function-call-result-call-id
-                                              
(llm-chat-prompt-interaction-function-call-result interaction)))
-                                           ("content" .
-                                            
,(llm-chat-prompt-interaction-content interaction))))
-                                      (llm-chat-prompt-interaction-content 
interaction)))))
-                              (llm-chat-prompt-interactions prompt))))))
+                    ,(mapcar (lambda (interaction)
+                               `(("role" . ,(pcase 
(llm-chat-prompt-interaction-role interaction)
+                                              ('function 'user)
+                                              ('assistant 'assistant)
+                                              ('user 'user)))
+                                 ("content" .
+                                  ,(if 
(llm-chat-prompt-interaction-function-call-results interaction)
+                                       (mapcar (lambda (result)
+                                                 `(("type" . "tool_result")
+                                                   ("tool_use_id" .
+                                                    
,(llm-chat-prompt-function-call-result-call-id
+                                                      result))
+                                                   ("content" .
+                                                    
,(llm-chat-prompt-function-call-result-result result))))
+                                               
(llm-chat-prompt-interaction-function-call-results interaction))
+                                     (llm-chat-prompt-interaction-content 
interaction)))))
+                             (llm-chat-prompt-interactions prompt)))))
         (system (llm-provider-utils-get-system-prompt prompt)))
     (when (llm-chat-prompt-functions prompt)
       (push `("tools" . ,(mapcar (lambda (f) (llm-claude--tool-call f))
diff --git a/llm-integration-test.el b/llm-integration-test.el
index 6a02cccaf8..cac5b981e8 100644
--- a/llm-integration-test.el
+++ b/llm-integration-test.el
@@ -68,6 +68,25 @@
   '(("capital_of_country" . "France"))
   "The correct answer to the function call prompt.")
 
+(defun llm-integration-test-fc-multiple-prompt ()
+  (llm-make-chat-prompt
+   "What is the capital of France, and also what is the capital of Italy?"
+   :functions
+   (list (make-llm-function-call
+          :function (lambda (f) f)
+          :name "capital_of_country"
+          :description "Get the capital of a country."
+          :args (list (make-llm-function-arg
+                       :name "country"
+                       :description "The country whose capital to look up."
+                       :type 'string
+                       :required t))))))
+
+(defconst llm-integration-test-fc-multiple-answer
+  '(("capital_of_country" . "France")
+    ("capital_of_country" . "Italy"))
+  "The correct answer to the function call prompt.")
+
 (defun llm-integration-test-providers ()
   "Return a list of providers to test."
   (let ((providers))
@@ -87,7 +106,8 @@
       (require 'llm-ollama)
       ;; This variable is a list of models to test.
       (dolist (model (split-string (getenv "OLLAMA_CHAT_MODELS") ", "))
-        (push (make-llm-ollama :chat-model model) providers)))))
+        (push (make-llm-ollama :chat-model model) providers)))
+    providers))
 
 (defmacro llm-def-integration-test (name arglist &rest body)
   "Define an integration test."
@@ -145,11 +165,24 @@
       (should (equal streamed-result llm-integration-test-chat-answer)))))
 
 (llm-def-integration-test llm-function-call (provider)
-  (should (equal
-           (llm-chat provider (llm-integration-test-fc-prompt))
-           llm-integration-test-fc-answer))
-  ;; Test that we can send the function back to the provider without error.
-  (llm-chat provider (llm-integration-test-fc-prompt)))
-
+  (let ((prompt (llm-integration-test-fc-prompt)))
+    (should (equal
+             (llm-chat provider prompt)
+             llm-integration-test-fc-answer))
+    ;; Test that we can send the function back to the provider without error.
+    (llm-chat provider prompt)))
+
+(llm-def-integration-test llm-function-call-multiple (provider)
+  (let ((prompt (llm-integration-test-fc-multiple-prompt)))
+    ;; Sending back multiple answers often doesn't happen, so we can't reliably
+    ;; check for this yet.
+    (llm-chat provider prompt)
+    ;; Test that we can send the function back to the provider without error.
+    (llm-chat provider prompt)))
+
+(llm-def-integration-test llm-count-tokens (provider)
+  (let ((result (llm-count-tokens provider "What is the capital of France?")))
+    (should (integerp result))
+    (should (> result 0))))
 
 (provide 'llm-integration-test)
diff --git a/llm-openai.el b/llm-openai.el
index 5e50d7896e..8081392757 100644
--- a/llm-openai.el
+++ b/llm-openai.el
@@ -144,19 +144,23 @@ STREAMING if non-nil, turn on response streaming."
     (llm-provider-utils-combine-to-system-prompt prompt 
llm-openai-example-prelude)
     (when streaming (push `("stream" . ,t) request-alist))
     (push `("messages" .
-            ,(mapcar (lambda (p)
-                       (append
-                        `(("role" . ,(llm-chat-prompt-interaction-role p))
-                          ("content" . ,(let ((content
-                                               
(llm-chat-prompt-interaction-content p)))
-                                          (if (stringp content) content
-                                            (json-encode content)))))
-                        (when-let ((fc 
(llm-chat-prompt-interaction-function-call-result p)))
+            ,(mapcan (lambda (i)
+                       (if (llm-chat-prompt-interaction-function-call-results 
i)
+                           (mapcar (lambda (fc)
+                                     (append
+                                      (when 
(llm-chat-prompt-function-call-result-call-id fc)
+                                        `(("tool_call_id" .
+                                           
,(llm-chat-prompt-function-call-result-call-id fc))))
+                                      `(("role" . "tool")
+                                        ("name" . 
,(llm-chat-prompt-function-call-result-function-name fc))
+                                        ("content" . 
,(llm-chat-prompt-function-call-result-result fc)))))
+                                   
(llm-chat-prompt-interaction-function-call-results i))
+                         (list
                           (append
-                           (when (llm-chat-prompt-function-call-result-call-id 
fc)
-                             `(("tool_call_id" .
-                                ,(llm-chat-prompt-function-call-result-call-id 
fc))))
-                           `(("name" . 
,(llm-chat-prompt-function-call-result-function-name fc)))))))
+                           `(("role" . ,(llm-chat-prompt-interaction-role i)))
+                           (when-let ((content 
(llm-chat-prompt-interaction-content i)))
+                             (if (stringp content) `(("content" . ,content))
+                               (llm-openai-function-call-to-response 
content)))))))
                      (llm-chat-prompt-interactions prompt)))
           request-alist)
     (push `("model" . ,(or (llm-openai-chat-model provider) "gpt-4o")) 
request-alist)
@@ -185,15 +189,20 @@ STREAMING if non-nil, turn on response streaming."
                          (assoc-default 'message
                                         (aref (assoc-default 'choices 
response) 0)))))
 
+(defun llm-openai-function-call-to-response (fcs)
+  "Convert back from the generic representation to the Open AI.
+FCS is a list of `make-llm-provider-utils-function-call'"
+  `(("tool_calls" .
+     ,(mapcar (lambda (fc)
+                `(("id" . ,(llm-provider-utils-function-call-id fc))
+                  ("type" . "function")
+                  ("function" .
+                   (("arguments" . ,(json-encode 
(llm-provider-utils-function-call-args fc)))
+                    ("name" . ,(llm-provider-utils-function-call-name fc))))))
+              fcs))))
+
 (cl-defmethod llm-provider-populate-function-calls ((_ llm-openai) prompt 
calls)
-  (llm-provider-utils-append-to-prompt
-   prompt
-   (mapcar (lambda (call)
-             `((id . ,(llm-provider-utils-function-call-id call))
-               (function (name . ,(llm-provider-utils-function-call-name call))
-                         (arguments . ,(json-encode
-                                        (llm-provider-utils-function-call-args 
call))))))
-           calls)))
+  (llm-provider-utils-append-to-prompt prompt calls))
 
 (defun llm-openai--get-partial-chat-response (response)
   "Return the text in the partial chat response from RESPONSE.
diff --git a/llm-provider-utils.el b/llm-provider-utils.el
index 91c853510a..dd63f25550 100644
--- a/llm-provider-utils.el
+++ b/llm-provider-utils.el
@@ -512,6 +512,9 @@ This returns a JSON object (a list that can be converted to 
JSON)."
 
 OUTPUT can be a string or a structure in the case of function calls.
 
+FUNC-RESULTS is a list of function results resulting from the LLM
+output, if any.
+
 ROLE will be `assistant' by default, but can be passed in for other roles."
   (setf (llm-chat-prompt-interactions prompt)
         (append (llm-chat-prompt-interactions prompt)
@@ -523,7 +526,7 @@ ROLE will be `assistant' by default, but can be passed in 
for other roles."
                        :content (if (listp output)
                                     output
                                   (format "%s" output))
-                       :function-call-result func-results)))))
+                       :function-call-results func-results)))))
 
 (cl-defstruct llm-provider-utils-function-call
   "A struct to hold information about a function call.
@@ -547,25 +550,34 @@ be either FUNCALLS or TEXT."
   (if-let ((funcalls funcalls))
       ;; If we have function calls, execute them and return the results, and
       ;; it talso takes care of updating the prompt.
-      (llm-provider-utils-execute-function-calls provider prompt funcalls)
+      (let ((results-alist
+             (llm-provider-utils-execute-function-calls provider prompt 
funcalls)))
+        (llm-provider-utils-populate-function-results
+         provider prompt results-alist)
+        (mapcar #'cdr results-alist))
     ;; We probably shouldn't be called if text is nil, but if we do,
     ;; we shouldn't add something invalid to the prompt.
     (when text
       (llm-provider-append-to-prompt provider prompt text))
     text))
 
-(defun llm-provider-utils-populate-function-results (provider prompt func 
result)
-  "Append the RESULT of FUNC to PROMPT.
+(defun llm-provider-utils-populate-function-results (provider prompt 
results-alist)
+  "Append the results in RESULTS-ALIST to the prompt.
+
+PROMPT is the prompt to populate into.
 
-FUNC is a `llm-provider-utils-function-call' struct.
+RESULTS-ALIST is a list of cons of function
+calls (`llm-provider-utils-function-call' structs) and their
+results.
 
 PROVIDER is the struct that configures the user of the LLM."
   (llm-provider-append-to-prompt
-   provider prompt result
-   (make-llm-chat-prompt-function-call-result
-    :call-id (llm-provider-utils-function-call-id func)
-    :function-name (llm-provider-utils-function-call-name func)
-    :result result)))
+   provider prompt nil
+   (mapcar (lambda (c) (make-llm-chat-prompt-function-call-result
+                        :call-id (llm-provider-utils-function-call-id (car c))
+                        :function-name (llm-provider-utils-function-call-name 
(car c))
+                        :result (cddr c)))
+           results-alist)))
 
 (defun llm-provider-utils-execute-function-calls (provider prompt funcalls)
   "Execute FUNCALLS, a list of `llm-provider-utils-function-calls'.
@@ -581,31 +593,32 @@ function call, the result.
 This returns the response suitable for output to the client; a
 cons of functions called and their output."
   (llm-provider-populate-function-calls provider prompt funcalls)
-  (cl-loop for func in funcalls collect
-           (let* ((name (llm-provider-utils-function-call-name func))
-                  (arguments (llm-provider-utils-function-call-args func))
-                  (function (seq-find
-                             (lambda (f) (equal name (llm-function-call-name 
f)))
-                             (llm-chat-prompt-functions prompt))))
-             (cons name
-                   (let* ((args (cl-loop for arg in (llm-function-call-args 
function)
-                                         collect (cdr (seq-find (lambda (a)
-                                                                  (eq (intern
-                                                                       
(llm-function-arg-name arg))
-                                                                      (car a)))
-                                                                arguments))))
-                          (result (apply (llm-function-call-function function) 
args)))
-                     (llm-provider-utils-populate-function-results
-                      provider prompt func result)
-                     (llm--log
-                      'api-funcall
-                      :provider provider
-                      :msg (format "%s --> %s"
-                                   (format "%S"
-                                           (cons (llm-function-call-name 
function)
-                                                 args))
-                                   (format "%s" result)))
-                     result)))))
+  (cl-loop
+   for func in funcalls collect
+   (cons
+    func
+    (let* ((name (llm-provider-utils-function-call-name func))
+           (arguments (llm-provider-utils-function-call-args func))
+           (function (seq-find
+                      (lambda (f) (equal name (llm-function-call-name f)))
+                      (llm-chat-prompt-functions prompt))))
+      (cons name
+            (let* ((args (cl-loop for arg in (llm-function-call-args function)
+                                  collect (cdr (seq-find (lambda (a)
+                                                           (eq (intern
+                                                                
(llm-function-arg-name arg))
+                                                               (car a)))
+                                                         arguments))))
+                   (result (apply (llm-function-call-function function) args)))
+              (llm--log
+               'api-funcall
+               :provider provider
+               :msg (format "%s --> %s"
+                            (format "%S"
+                                    (cons (llm-function-call-name function)
+                                          args))
+                            (format "%s" result)))
+              result))))))
 
 
 ;; This is a useful method for getting out of the request buffer when it's time
diff --git a/llm-vertex.el b/llm-vertex.el
index 33d38723d9..239fc5476d 100644
--- a/llm-vertex.el
+++ b/llm-vertex.el
@@ -193,14 +193,16 @@ the key must be regenerated every hour."
                                             interaction))))
                              (if (eq 'function
                                      (llm-chat-prompt-interaction-role 
interaction))
-                                 (let ((fc 
(llm-chat-prompt-interaction-function-call-result interaction)))
-                                   `(((functionResponse
-                                       .
-                                       ((name . 
,(llm-chat-prompt-function-call-result-function-name fc))
-                                        (response
-                                         .
-                                         ((name . 
,(llm-chat-prompt-function-call-result-function-name fc))
-                                          (content . 
,(llm-chat-prompt-function-call-result-result fc)))))))))
+                                 (mapcar (lambda (fc)
+                                           `(((functionResponse
+                                               .
+                                               ((name . 
,(llm-chat-prompt-function-call-result-function-name fc))
+                                                (response
+                                                 .
+                                                 ((name . 
,(llm-chat-prompt-function-call-result-function-name fc))
+                                                  (content . 
,(llm-chat-prompt-function-call-result-result fc)))))))))
+                                         
(llm-chat-prompt-interaction-function-call-results interaction))
+
                                (llm-chat-prompt-interaction-content 
interaction))))))
                (llm-chat-prompt-interactions prompt))))
    (when (llm-chat-prompt-functions prompt)
@@ -233,7 +235,6 @@ nothing to add, in which case it is nil."
 (cl-defmethod llm-provider-populate-function-calls ((_ llm-vertex) prompt 
calls)
   (llm-provider-utils-append-to-prompt
    prompt
-   ;; For Vertex there is just going to be one call
    (mapcar (lambda (fc)
              `((functionCall
                 .
diff --git a/llm.el b/llm.el
index da4bcaacd7..465ccaae26 100644
--- a/llm.el
+++ b/llm.el
@@ -76,11 +76,11 @@ Use of this directly is deprecated, instead use 
`llm-make-chat-prompt'."
   "This defines a single interaction given as part of a chat prompt.
 ROLE can a symbol, of either `user', `assistant', or `function'.
 
-FUNCTION-CALL-RESULTS is a struct of type
+FUNCTION-CALL-RESULTS is a list of structs of type
 `llm-chat-prompt-function-call-results', which is only populated
-if `role' is `function'.  It stores the results of just one
-function call."
-  role content function-call-result)
+if `role' is `function'.  It stores the results of the function
+calls."
+  role content  function-call-results)
 
 (cl-defstruct llm-chat-prompt-function-call-result
   "This defines the result from a function call.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]