emacs-elpa-diffs
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[nongnu] elpa/gptel b2a2c66f2a 1/2: gptel: Add support for gpt-4o-mini


From: ELPA Syncer
Subject: [nongnu] elpa/gptel b2a2c66f2a 1/2: gptel: Add support for gpt-4o-mini
Date: Thu, 18 Jul 2024 18:59:55 -0400 (EDT)

branch: elpa/gptel
commit b2a2c66f2ae6f56057bedfde4d9f367dab69bb5e
Author: Karthik Chikmagalur <karthikchikmagalur@gmail.com>
Commit: Karthik Chikmagalur <karthikchikmagalur@gmail.com>

    gptel: Add support for gpt-4o-mini
    
    * gptel.el (gptel--openai, gptel-model): Add support for
    gpt-4o-mini.
---
 gptel.el | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/gptel.el b/gptel.el
index 02b94e5377..e642c00cf6 100644
--- a/gptel.el
+++ b/gptel.el
@@ -412,6 +412,7 @@ by the LLM provider's API.
 The current options for ChatGPT are
 - \"gpt-3.5-turbo\"
 - \"gpt-3.5-turbo-16k\"
+- \"gpt-4o-mini\"
 - \"gpt-4\"
 - \"gpt-4o\"
 - \"gpt-4-turbo\"
@@ -424,6 +425,7 @@ To set the model for a chat session interactively call
   :safe #'always
   :type '(choice
           (string :tag "Specify model name")
+          (const :tag "GPT 4 omni mini" "gpt-4o-mini")
           (const :tag "GPT 3.5 turbo" "gpt-3.5-turbo")
           (const :tag "GPT 3.5 turbo 16k" "gpt-3.5-turbo-16k")
           (const :tag "GPT 4" "gpt-4")
@@ -451,9 +453,9 @@ To set the temperature for a chat session interactively call
    "ChatGPT"
    :key 'gptel-api-key
    :stream t
-   :models '("gpt-3.5-turbo" "gpt-3.5-turbo-16k" "gpt-4" "gpt-4o"
-             "gpt-4-turbo" "gpt-4-turbo-preview" "gpt-4-32k"
-             "gpt-4-1106-preview" "gpt-4-0125-preview")))
+   :models '("gpt-3.5-turbo" "gpt-3.5-turbo-16k" "gpt-4o-mini"
+             "gpt-4" "gpt-4o" "gpt-4-turbo" "gpt-4-turbo-preview"
+             "gpt-4-32k" "gpt-4-1106-preview" "gpt-4-0125-preview")))
 
 (defcustom gptel-backend gptel--openai
   "LLM backend to use.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]