emacs-elpa-diffs
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[elpa] externals/ellama b145091ddc 1/4: Improve naming with llm


From: ELPA Syncer
Subject: [elpa] externals/ellama b145091ddc 1/4: Improve naming with llm
Date: Tue, 13 Feb 2024 09:57:58 -0500 (EST)

branch: externals/ellama
commit b145091ddc6538ec23dc2d19f43a345afc001441
Author: Sergey Kostyaev <sskostyaev@gmail.com>
Commit: Sergey Kostyaev <sskostyaev@gmail.com>

    Improve naming with llm
---
 README.org | 60 +++++++++++++++++++++++++++++++++---------------------------
 ellama.el  |  7 +++++--
 2 files changed, 38 insertions(+), 29 deletions(-)

diff --git a/README.org b/README.org
index 1502008197..49c4ad4483 100644
--- a/README.org
+++ b/README.org
@@ -32,33 +32,39 @@ You can use ~ellama~ with other model or other llm provider.
 In that case you should customize ellama configuration like this:
 
 #+BEGIN_SRC  emacs-lisp
-  ;; YOU DON'T NEED NONE OF THIS CODE FOR SIMPLE INSTALL
-  ;; IT IS AN EXAMPLE OF CUSTOMIZATION.
-  (use-package ellama
-    :init
-    ;; language you want ellama to translate to
-    (setopt ellama-language "German")
-    ;; could be llm-openai for example
-    (require 'llm-ollama)
-    (setopt ellama-provider
-                    (make-llm-ollama
-                     ;; this model should be pulled to use it
-                     ;; value should be the same as you print in terminal 
during pull
-                     :chat-model "mistral:7b-instruct-v0.2-q6_K"
-                     :embedding-model "mistral:7b-instruct-v0.2-q6_K"))
-    ;; Predefined llm providers for interactive switching.
-    ;; You shouldn't add ollama providers here - it can be selected 
interactively
-    ;; without it. It is just example.
-    (setopt ellama-providers
-                    '(("zephyr" . (make-llm-ollama
-                                   :chat-model "zephyr:7b-beta-q6_K"
-                                   :embedding-model "zephyr:7b-beta-q6_K"))
-                      ("mistral" . (make-llm-ollama
-                                    :chat-model "mistral:7b-instruct-v0.2-q6_K"
-                                    :embedding-model 
"mistral:7b-instruct-v0.2-q6_K"))
-                      ("mixtral" . (make-llm-ollama
-                                    :chat-model 
"mixtral:8x7b-instruct-v0.1-q3_K_M-4k"
-                                    :embedding-model 
"mixtral:8x7b-instruct-v0.1-q3_K_M-4k")))))
+       ;; YOU DON'T NEED NONE OF THIS CODE FOR SIMPLE INSTALL
+       ;; IT IS AN EXAMPLE OF CUSTOMIZATION.
+       (use-package ellama
+         :init
+         ;; language you want ellama to translate to
+         (setopt ellama-language "German")
+         ;; could be llm-openai for example
+         (require 'llm-ollama)
+         (setopt ellama-provider
+                         (make-llm-ollama
+                          ;; this model should be pulled to use it
+                          ;; value should be the same as you print in terminal 
during pull
+                          :chat-model "mistral:7b-instruct-v0.2-q6_K"
+                          :embedding-model "mistral:7b-instruct-v0.2-q6_K"))
+         ;; Predefined llm providers for interactive switching.
+         ;; You shouldn't add ollama providers here - it can be selected 
interactively
+         ;; without it. It is just example.
+         (setopt ellama-providers
+                         '(("zephyr" . (make-llm-ollama
+                                        :chat-model "zephyr:7b-beta-q6_K"
+                                        :embedding-model 
"zephyr:7b-beta-q6_K"))
+                           ("mistral" . (make-llm-ollama
+                                         :chat-model 
"mistral:7b-instruct-v0.2-q6_K"
+                                         :embedding-model 
"mistral:7b-instruct-v0.2-q6_K"))
+                           ("mixtral" . (make-llm-ollama
+                                         :chat-model 
"mixtral:8x7b-instruct-v0.1-q3_K_M-4k"
+                                         :embedding-model 
"mixtral:8x7b-instruct-v0.1-q3_K_M-4k"))))
+         ;; Naming new sessions with llm
+         (setopt ellama-naming-provider
+                 (make-llm-ollama
+                  :chat-model "mistral:7b-instruct-v0.2-q6_K"
+                  :embedding-model "mistral:7b-instruct-v0.2-q6_K"))
+         (setopt ellama-naming-scheme 'ellama-generate-name-by-llm))
 #+END_SRC
 
 ** Commands
diff --git a/ellama.el b/ellama.el
index 3f5744cdae..ae500febd8 100644
--- a/ellama.el
+++ b/ellama.el
@@ -463,8 +463,11 @@ CONTEXT contains context for next request."
   (let ((provider (or ellama-naming-provider ellama-provider)))
     (string-trim-right
      (string-trim
-      (llm-chat provider (llm-make-simple-chat-prompt
-                         (format ellama-get-name-template prompt))))
+      (seq-first
+       (split-string
+       (llm-chat provider (llm-make-simple-chat-prompt
+                           (format ellama-get-name-template prompt)))
+       "\n")))
      "\\.")))
 
 (defun ellama-generate-name-by-llm (provider _action prompt)



reply via email to

[Prev in Thread] Current Thread [Next in Thread]