[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]
[elpa] externals/llm 4e9be8183d 07/34: Merge branch 'async'
From: |
Andrew Hyatt |
Subject: |
[elpa] externals/llm 4e9be8183d 07/34: Merge branch 'async' |
Date: |
Sat, 16 Sep 2023 01:32:47 -0400 (EDT) |
branch: externals/llm
commit 4e9be8183d11e7bf652328769e6be2ad3d46d1a3
Merge: 3919b77383 16ee85fd11
Author: Andrew Hyatt <ahyatt@gmail.com>
Commit: Andrew Hyatt <ahyatt@gmail.com>
Merge branch 'async'
---
llm-openai.el | 36 +++++++++++++++++++--------------
llm-tester.el | 65 +++++++++++++++++++++++++++++++----------------------------
llm.el | 37 ++++++++++++++++++++++++++++++++--
3 files changed, 90 insertions(+), 48 deletions(-)
diff --git a/llm-openai.el b/llm-openai.el
index 4e91f9c52d..3bc8a06f17 100644
--- a/llm-openai.el
+++ b/llm-openai.el
@@ -50,28 +50,34 @@ EMBEDDING-MODEL is the model to use for embeddings. If
unset, it
will use a reasonable default."
key chat-model embedding-model)
-(cl-defmethod llm-embedding ((provider llm-openai) string)
+(cl-defmethod llm-embedding-async ((provider llm-openai) string
vector-callback error-callback)
(unless (llm-openai-key provider)
(error "To call Open AI API, provide the ekg-embedding-api-key"))
- (let ((resp (request "https://api.openai.com/v1/embeddings"
+ (request "https://api.openai.com/v1/embeddings"
:type "POST"
:headers `(("Authorization" . ,(format "Bearer %s"
ekg-embedding-api-key))
("Content-Type" . "application/json"))
:data (json-encode `(("input" . ,string) ("model" . ,(or
(llm-openai-embedding-model provider) "text-embedding-ada-002"))))
:parser 'json-read
+ :success (cl-function (lambda (&key data &allow-other-keys)
+ (funcall vector-callback
+ (cdr (assoc 'embedding (aref
(cdr (assoc 'data data)) 0))))))
:error (cl-function (lambda (&key error-thrown data
&allow-other-keys)
- (error (format "Problem calling Open AI:
%s, type: %s message: %s"
+ (funcall error-callback 'error
+ (format "Problem calling Open
AI: %s, type: %s message: %s"
(cdr error-thrown)
(assoc-default 'type
(cdar data))
- (assoc-default 'message
(cdar data))))))
- :timeout 2
- :sync t)))
- (cdr (assoc 'embedding (aref (cdr (assoc 'data (request-response-data
resp))) 0)))))
+ (assoc-default 'message
(cdar data))))))))
-(defun llm-openai--chat-response (prompt &optional return-json-spec)
+(defun llm-openai--chat-response (prompt response-callback error-callback
&optional return-json-spec)
"Main method to send a PROMPT as a chat prompt to Open AI.
RETURN-JSON-SPEC, if specified, is a JSON spec to return from the
-Open AI API."
+Open AI API.
+
+RESPONSE-CALLBACK is a function to call with the LLM response.
+
+ERROR-CALLBACK is called if there is an error, with the error
+signal and message."
(unless (llm-openai-key provider)
(error "To call Open AI API, the key must have been set"))
(let (request-alist system-prompt)
@@ -116,14 +122,14 @@ Open AI API."
:data (json-encode request-alist)
:parser 'json-read
:error (cl-function (lambda (&key error-thrown data
&allow-other-keys)
- (error (format "Problem calling Open
AI: %s, type: %s message: %s"
- (cdr error-thrown)
- (assoc-default 'type
(cdar data))
- (assoc-default 'message
(cdar data))))))
- :sync t)))
+ (funcall error-callback
+ (format "Problem calling Open
AI: %s, type: %s message: %s"
+ (cdr error-thrown)
+ (assoc-default 'type
(cdar data))
+ (assoc-default
'message (cdar data)))))))))
(let ((result (cdr (assoc 'content (cdr (assoc 'message (aref (cdr
(assoc 'choices (request-response-data resp))) 0))))))
(func-result (cdr (assoc 'arguments (cdr (assoc 'function_call
(cdr (assoc 'message (aref (cdr (assoc 'choices (request-response-data resp)))
0)))))))))
- (or func-result result)))))
+ (funcall result-callback (or func-result result))))))
(cl-defmethod llm-chat-response ((provider llm-openai) prompt)
(llm-openai--chat-response prompt nil))
diff --git a/llm-tester.el b/llm-tester.el
index 53938ae721..089e5cd5de 100644
--- a/llm-tester.el
+++ b/llm-tester.el
@@ -1,4 +1,4 @@
-;;; llm-tester.el --- Helpers for testing LLM implementation
+;;; llm-tester.el --- Helpers for testing LLM implementation -*-
lexical-binding: t -*-
;; Copyright (c) 2023 Andrew Hyatt <ahyatt@gmail.com>
@@ -36,39 +36,42 @@
(defun llm-tester-embedding (provider)
"Test that PROVIDER can provide embeddings."
- (condition-case nil
- (let ((embedding (llm-embedding provider "This is a test.")))
- (if embedding
- (if (eq (type-of embedding) 'vector)
- (if (> (length embedding) 0)
- (message "SUCCESS: Provider %s provided an embedding of
length %d. First 10 values: %S" (type-of provider)
- (length embedding)
- (seq-subseq embedding 0 (min 10 (length
embedding))))
- (message "ERROR: Provider %s returned an empty embedding"
(type-of provider))))
- (message "ERROR: Provider %s did not return any embedding" (type-of
provider))))
- (not-implemented (message "ERROR: Provider %s could not provide
embeddings." (type-of provider)))))
+ (message "Testing provider %s for embeddings" (type-of provider))
+ (llm-embedding-async provider "This is a test."
+ (lambda (embedding)
+ (if embedding
+ (if (eq (type-of embedding) 'vector)
+ (if (> (length embedding) 0)
+ (message "SUCCESS: Provider %s provided
an embedding of length %d. First 10 values: %S" (type-of provider)
+ (length embedding)
+ (seq-subseq embedding 0 (min 10
(length embedding))))
+ (message "ERROR: Provider %s returned an
empty embedding" (type-of provider))))
+ (message "ERROR: Provider %s did not return any
embedding" (type-of provider))))
+ (lambda (type message)
+ (message "ERROR: Provider %s returned an error of
type %s with message %s" (type-of provider) type message))))
(defun llm-tester-chat (provider)
"Test that PROVIDER can interact with the LLM chat."
- (condition-case nil
- (let ((response (llm-chat-response
- provider
- (make-llm-chat-prompt
- :interactions (list
- (make-llm-chat-prompt-interaction
- :role 'user
- :content "Tell me a random cool feature of
emacs."))
- :context "You must answer all questions as if you were the
butler Jeeves from Jeeves and Wooster. Start all interactions with the phrase,
'Very good, sir.'"
- :examples '(("Tell me the capital of France." . "Very
good, sir. The capital of France is Paris, which I expect you to be familiar
with, since you were just there last week with your Aunt Agatha.")
- ("Could you take me to my favorite place?" .
"Very good, sir. I believe you are referring to the Drone's Club, which I will
take you to after you put on your evening attire."))
- :temperature 0.5
- :max-tokens 100))))
- (if response
- (if (> (length response) 0)
- (message "SUCCESS: Provider %s provided a response %s"
(type-of provider) response)
- (message "ERROR: Provider %s returned an empty response"
(type-of provider)))
- (message "ERROR: Provider %s did not return any response" (type-of
provider))))
- (not-implemented (message "ERROR: Provider %s could not get a chat."
(type-of provider)))))
+ (message "Testing provider %s for chat" (type-of provider))
+ (llm-chat-response provider
+ (make-llm-chat-prompt
+ :interactions (list
+ (make-llm-chat-prompt-interaction
+ :role 'user
+ :content "Tell me a random cool feature
of emacs."))
+ :context "You must answer all questions as if you were
the butler Jeeves from Jeeves and Wooster. Start all interactions with the
phrase, 'Very good, sir.'"
+ :examples '(("Tell me the capital of France." . "Very
good, sir. The capital of France is Paris, which I expect you to be familiar
with, since you were just there last week with your Aunt Agatha.")
+ ("Could you take me to my favorite place?" .
"Very good, sir. I believe you are referring to the Drone's Club, which I will
take you to after you put on your evening attire."))
+ :temperature 0.5
+ :max-tokens 100)
+ (lambda (response)
+ (if response
+ (if (> (length response) 0)
+ (message "SUCCESS: Provider %s provided a
response %s" (type-of provider) response)
+ (message "ERROR: Provider %s returned an empty
response" (type-of provider)))
+ (message "ERROR: Provider %s did not return any
response" (type-of provider))))
+ (lambda (type message)
+ (message "ERROR: Provider %s returned an error of type
%s with message %s" (type-of provider) type message))))
(defun llm-tester-all (provider)
"Test all llm functionality for PROVIDER."
diff --git a/llm.el b/llm.el
index 880fa3a0e7..e527dd9273 100644
--- a/llm.el
+++ b/llm.el
@@ -69,15 +69,48 @@ MAX-TOKENS is the maximum number of tokens to generate.
This is optional.
ROLE can a symbol, of either `user' or `assistant'."
role content)
+(defun llm--run-async-as-sync (f &rest args)
+ "Call async function F, passing ARGS.
+Two args will be appended to the end; a success callback, and an
+error callback. This will block until the async function calls
+one of the callbacks.
+
+The return value will be the value passed into the success callback."
+ (let ((cv (make-condition-variable (make-mutex "llm-chat-response")))
+ (response))
+ (apply f (append args
+ (list
+ (lambda (result)
+ (setq response result)
+ (condition-notify cv))
+ (lambda (type msg)
+ (signal type msg)
+ (condition-notify cv)))))
+ response))
+
(cl-defgeneric llm-chat-response (provider prompt)
"Return a response to PROMPT from PROVIDER.
PROMPT is a `llm-chat-prompt'. The response is a string."
- (ignore provider prompt)
+ (llm--run-async-as-sync #'llm-chat-response-async provider prompt))
+
+(cl-defgeneric llm-chat-response-async (provider prompt response-callback
error-callback)
+ "Return a response to PROMPT from PROVIDER.
+PROMPT is a `llm-chat-prompt'.
+RESPONSE-CALLBACK receives the string response.
+ERROR-CALLBACK receives the error response."
+ (ignore provider prompt response-callback error-callback)
(signal 'not-implemented nil))
(cl-defgeneric llm-embedding (provider string)
"Return a vector embedding of STRING from PROVIDER."
- (ignore provider string)
+ (llm--run-async-as-sync #'llm-embedding-async provider string))
+
+(cl-defgeneric llm-embedding-async (provider string vector-callback
error-callback)
+ "Calculate a vector embedding of STRING from PROVIDER.
+VECTOR-CALLBACK will be called with the vector embedding.
+ERROR-CALLBACK will be called in the event of an error, with an
+error signal and a string message."
+ (ignore provider string vector-callback error-callback)
(signal 'not-implemented nil))
(cl-defgeneric llm-count-tokens (provider string)
- [elpa] externals/llm abbff2aa9d 23/34: Change method name to llm-chat (without "-response"), update README, (continued)
- [elpa] externals/llm abbff2aa9d 23/34: Change method name to llm-chat (without "-response"), update README, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm e94bc937c7 27/34: Fix issue with llm-chat before method having too many arguments, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm 7edd36b2dc 28/34: Fix obsolete or incorrect function calls in llm-fake, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm d4bbe9d84c 29/34: Fix incorrect requires in openai and vertex implementations, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm 723c0b3786 31/34: Minor README whitespace and formatting fixes, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm 8f30feb5c1 32/34: README improvements, including noting the nonfree llm warning, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm 444850a981 24/34: Fix missing word in non-free warning message, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm 0ed280c208 15/34: Add llm-fake, useful for developer testing using the llm methods, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm c55ccf157a 03/34: Clean up package specifications in elisp files, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm 414d25a625 09/34: Removed various unused things, and format fixes, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm 4e9be8183d 07/34: Merge branch 'async',
Andrew Hyatt <=
- [elpa] externals/llm dd20d6353c 21/34: Fix bug on llm-fake's error response to chat-response, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm 40151757de 26/34: Switch to a method of nonfree warnings easier for provider modules, Andrew Hyatt, 2023/09/16
- [elpa] externals/llm ba65755326 30/34: Improve the README with information on providers for end-users, Andrew Hyatt, 2023/09/16