emacs-elpa-diffs
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[elpa] externals/llm 50ad3cbe4c 2/9: Fix issue with never correctly stor


From: ELPA Syncer
Subject: [elpa] externals/llm 50ad3cbe4c 2/9: Fix issue with never correctly storing or applying the context
Date: Thu, 26 Oct 2023 00:58:43 -0400 (EDT)

branch: externals/llm
commit 50ad3cbe4c7bb7e9252ce87024068d7eae53ee8e
Author: Andrew Hyatt <ahyatt@gmail.com>
Commit: Andrew Hyatt <ahyatt@gmail.com>

    Fix issue with never correctly storing or applying the context
    
    There were two issues: when we set the llm-chat-prompt-interaction, we set 
it to
    the value, a vector, not a list of a vector. Additionally, we did not check 
for
    the existence of the context correctly.
    
    This is an alternate fix to https://github.com/ahyatt/llm/pull/5.
---
 llm-ollama.el | 7 ++++---
 1 file changed, 4 insertions(+), 3 deletions(-)

diff --git a/llm-ollama.el b/llm-ollama.el
index d1a7c4b39d..73de3d5b76 100644
--- a/llm-ollama.el
+++ b/llm-ollama.el
@@ -113,7 +113,8 @@ STREAMING if non-nil, turn on response streaming."
                                             (car (last 
(llm-chat-prompt-interactions prompt)))))))
     ;; If the first item isn't an interaction, then it's a conversation which
     ;; we'll set as the chat context.
-    (when (not (type-of (car (llm-chat-prompt-interactions prompt))))
+    (when (not (eq (type-of (car (llm-chat-prompt-interactions prompt)))
+                   'llm-chat-prompt-interaction))
       (push `("context" . ,(car (llm-chat-prompt-interactions prompt))) 
request-alist))
     (push `("prompt" . ,(string-trim text-prompt)) request-alist)
     (push `("model" . ,(llm-ollama-chat-model provider)) request-alist)
@@ -170,7 +171,7 @@ STREAMING if non-nil, turn on response streaming."
                    ;; ollama is run on a user's machine, and it can take a 
while.
                    :timeout llm-ollama-chat-timeout)))
       (setf (llm-chat-prompt-interactions prompt)
-            (assoc-default 'context (llm-ollama--get-final-response output)))
+               (list (assoc-default 'context (llm-ollama--get-final-response 
output))))
       (llm-ollama--get-partial-chat-response output))))
 
 (cl-defmethod llm-chat-async ((provider llm-ollama) prompt response-callback 
error-callback)
@@ -181,7 +182,7 @@ STREAMING if non-nil, turn on response streaming."
       :data (llm-ollama--chat-request provider prompt)
       :on-success-raw (lambda (response)
                         (setf (llm-chat-prompt-interactions prompt)
-                              (assoc-default 'context 
(llm-ollama--get-final-response response)))
+                              (list (assoc-default 'context 
(llm-ollama--get-final-response response))))
                         (funcall response-callback 
(llm-ollama--get-partial-chat-response response)))
       :on-partial (lambda (data)
                     (when-let ((response 
(llm-ollama--get-partial-chat-response data)))



reply via email to

[Prev in Thread] Current Thread [Next in Thread]