emacs-elpa-diffs
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[elpa] externals/ellama 1a3dd96d62 4/7: Add ellama-chat-send-last-messag


From: ELPA Syncer
Subject: [elpa] externals/ellama 1a3dd96d62 4/7: Add ellama-chat-send-last-message to documentation.
Date: Fri, 29 Nov 2024 18:58:09 -0500 (EST)

branch: externals/ellama
commit 1a3dd96d62288f653151b30ad82221c9d8dd6271
Author: Sergey Kostyaev <sskostyaev@gmail.com>
Commit: Sergey Kostyaev <sskostyaev@gmail.com>

    Add ellama-chat-send-last-message to documentation.
---
 README.org | 105 ++++++++++++++++++++++++++++++++-----------------------------
 1 file changed, 56 insertions(+), 49 deletions(-)

diff --git a/README.org b/README.org
index 48c40e50ff..ed7f721104 100644
--- a/README.org
+++ b/README.org
@@ -33,58 +33,61 @@ In that case you should customize ellama configuration like 
this:
 
 #+BEGIN_SRC  emacs-lisp
   (use-package ellama
-    :bind ("C-c e" . ellama-transient-main-menu)
-    :init
-    ;; setup key bindings
-    ;; (setopt ellama-keymap-prefix "C-c e")
-    ;; language you want ellama to translate to
-    (setopt ellama-language "German")
-    ;; could be llm-openai for example
-    (require 'llm-ollama)
-    (setopt ellama-provider
-           (make-llm-ollama
-            ;; this model should be pulled to use it
-            ;; value should be the same as you print in terminal during pull
-            :chat-model "llama3:8b-instruct-q8_0"
-            :embedding-model "nomic-embed-text"
-            :default-chat-non-standard-params '(("num_ctx" . 8192))))
-    (setopt ellama-summarization-provider
+      :bind ("C-c e" . ellama-transient-main-menu)
+      :init
+      ;; setup key bindings
+      ;; (setopt ellama-keymap-prefix "C-c e")
+      ;; language you want ellama to translate to
+      (setopt ellama-language "German")
+      ;; could be llm-openai for example
+      (require 'llm-ollama)
+      (setopt ellama-provider
+         (make-llm-ollama
+              ;; this model should be pulled to use it
+              ;; value should be the same as you print in terminal during pull
+              :chat-model "llama3:8b-instruct-q8_0"
+              :embedding-model "nomic-embed-text"
+              :default-chat-non-standard-params '(("num_ctx" . 8192))))
+      (setopt ellama-summarization-provider
+             (make-llm-ollama
+              :chat-model "qwen2.5:3b"
+              :embedding-model "nomic-embed-text"
+              :default-chat-non-standard-params '(("num_ctx" . 32768))))
+      (setopt ellama-coding-provider
+             (make-llm-ollama
+              :chat-model "qwen2.5-coder:3b"
+              :embedding-model "nomic-embed-text"
+              :default-chat-non-standard-params '(("num_ctx" . 32768))))
+      ;; Predefined llm providers for interactive switching.
+      ;; You shouldn't add ollama providers here - it can be selected 
interactively
+      ;; without it. It is just example.
+      (setopt ellama-providers
+             '(("zephyr" . (make-llm-ollama
+                            :chat-model "zephyr:7b-beta-q6_K"
+                            :embedding-model "zephyr:7b-beta-q6_K"))
+               ("mistral" . (make-llm-ollama
+                             :chat-model "mistral:7b-instruct-v0.2-q6_K"
+                             :embedding-model "mistral:7b-instruct-v0.2-q6_K"))
+               ("mixtral" . (make-llm-ollama
+                             :chat-model "mixtral:8x7b-instruct-v0.1-q3_K_M-4k"
+                             :embedding-model 
"mixtral:8x7b-instruct-v0.1-q3_K_M-4k"))))
+      ;; Naming new sessions with llm
+      (setopt ellama-naming-provider
+             (make-llm-ollama
+              :chat-model "llama3:8b-instruct-q8_0"
+              :embedding-model "nomic-embed-text"
+              :default-chat-non-standard-params '(("stop" . ("\n")))))
+      (setopt ellama-naming-scheme 'ellama-generate-name-by-llm)
+      ;; Translation llm provider
+      (setopt ellama-translation-provider
            (make-llm-ollama
             :chat-model "qwen2.5:3b"
             :embedding-model "nomic-embed-text"
-            :default-chat-non-standard-params '(("num_ctx" . 32768))))
-    (setopt ellama-coding-provider
-           (make-llm-ollama
-            :chat-model "qwen2.5-coder:3b"
-            :embedding-model "nomic-embed-text"
-            :default-chat-non-standard-params '(("num_ctx" . 32768))))
-    ;; Predefined llm providers for interactive switching.
-    ;; You shouldn't add ollama providers here - it can be selected 
interactively
-    ;; without it. It is just example.
-    (setopt ellama-providers
-           '(("zephyr" . (make-llm-ollama
-                          :chat-model "zephyr:7b-beta-q6_K"
-                          :embedding-model "zephyr:7b-beta-q6_K"))
-             ("mistral" . (make-llm-ollama
-                           :chat-model "mistral:7b-instruct-v0.2-q6_K"
-                           :embedding-model "mistral:7b-instruct-v0.2-q6_K"))
-             ("mixtral" . (make-llm-ollama
-                           :chat-model "mixtral:8x7b-instruct-v0.1-q3_K_M-4k"
-                           :embedding-model 
"mixtral:8x7b-instruct-v0.1-q3_K_M-4k"))))
-    ;; Naming new sessions with llm
-    (setopt ellama-naming-provider
-           (make-llm-ollama
-            :chat-model "llama3:8b-instruct-q8_0"
-            :embedding-model "nomic-embed-text"
-            :default-chat-non-standard-params '(("stop" . ("\n")))))
-    (setopt ellama-naming-scheme 'ellama-generate-name-by-llm)
-    ;; Translation llm provider
-    (setopt ellama-translation-provider
-         (make-llm-ollama
-          :chat-model "qwen2.5:3b"
-          :embedding-model "nomic-embed-text"
-          :default-chat-non-standard-params
-          '(("num_ctx" . 32768)))))
+            :default-chat-non-standard-params
+            '(("num_ctx" . 32768))))
+      :config
+      ;; send last message in chat buffer with C-c C-c
+      (add-hook 'org-ctrl-c-ctrl-c-hook #'ellama-chat-send-last-message))
 #+END_SRC
 
 ** Commands
@@ -96,6 +99,10 @@ buffer and continue conversation. If called with universal 
argument
 (~C-u~) will start new session with llm model interactive selection.
 [[imgs/ellama-ask.gif]]
 
+*** ellama-chat-send-last-message
+
+Send last user message extracted from current ellama chat buffer.
+
 *** ellama-ask-about
 
 Ask Ellama about a selected region or the current buffer.



reply via email to

[Prev in Thread] Current Thread [Next in Thread]