emacs-elpa-diffs
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[elpa] externals/llm caa1c6b037: Add new openai variant, llm-openai-comp


From: ELPA Syncer
Subject: [elpa] externals/llm caa1c6b037: Add new openai variant, llm-openai-compatible
Date: Tue, 19 Dec 2023 03:58:08 -0500 (EST)

branch: externals/llm
commit caa1c6b037cc9180e5cc7ab42772aab07a4eaf6d
Author: Andrew Hyatt <ahyatt@gmail.com>
Commit: Andrew Hyatt <ahyatt@gmail.com>

    Add new openai variant, llm-openai-compatible
    
    This will allow users to use the Open AI API with other endpoints (that is,
    URLs).
    
    This should fix https://github.com/ahyatt/llm/issues/9.
---
 NEWS.org      |  2 ++
 README.org    |  3 +++
 llm-openai.el | 82 +++++++++++++++++++++++++++++++++++++++--------------------
 3 files changed, 60 insertions(+), 27 deletions(-)

diff --git a/NEWS.org b/NEWS.org
index e73538ad61..6f267dc5ab 100644
--- a/NEWS.org
+++ b/NEWS.org
@@ -1,3 +1,5 @@
+* Version 0.8
+- Allow users to change the Open AI URL, to allow for proxies and other 
services that re-use the API.
 * Version 0.7
 - Upgrade Google Cloud Vertex to Gemini - previous models are no longer 
available.
 - Added =gemini= provider, which is an alternate endpoint with alternate (and 
easier) authentication and setup compared to Cloud Vertex.
diff --git a/README.org b/README.org
index 006ac007f9..52db291906 100644
--- a/README.org
+++ b/README.org
@@ -24,6 +24,9 @@ You can set up with ~make-llm-openai~, with the following 
parameters:
 - ~:key~, the Open AI key that you get when you sign up to use Open AI's APIs. 
 Remember to keep this private.  This is non-optional.
 - ~:chat-model~: A model name from the 
[[https://platform.openai.com/docs/models/gpt-4][list of Open AI's model 
names.]]  Keep in mind some of these are not available to everyone.  This is 
optional, and will default to a reasonable 3.5 model.
 - ~:embedding-model~: A model name from 
[[https://platform.openai.com/docs/guides/embeddings/embedding-models][list of 
Open AI's embedding model names.]]  This is optional, and will default to a 
reasonable model.
+** Open AI Compatible
+There are many Open AI compatible APIs and proxies of Open AI.  You can set up 
one with ~make-llm-openai-compatible~, with the following parameter:
+- ~:url~, the URL of leading up to the command ("embeddings" or 
"chat/completions").  So, for example, "https://api.openai.com/v1/"; is the URL 
to use Open AI (although if you wanted to do that, just use ~make-llm-openai~ 
instead.
 ** Gemini (not via Google Cloud)
 This is Google's AI model.  You can get an API key via their 
[[https://makersuite.google.com/app/apikey][page on Google AI Studio]].
 Set this up with ~make-llm-gemini~, with the following parameters:
diff --git a/llm-openai.el b/llm-openai.el
index 1160ebfca9..d11163dbc7 100644
--- a/llm-openai.el
+++ b/llm-openai.el
@@ -51,6 +51,15 @@ EMBEDDING-MODEL is the model to use for embeddings.  If 
unset, it
 will use a reasonable default."
   key chat-model embedding-model)
 
+(cl-defstruct (llm-openai-compatible (:include llm-openai))
+  "A structure for other APIs that use the Open AI's API.
+
+URL is the URL to use for the API, up to the command. So, for
+example, if the API for chat is at
+https://api.example.com/v1/chat, then URL should be
+\"https://api.example.com/v1/\".";
+  url)
+
 (cl-defmethod llm-nonfree-message-info ((provider llm-openai))
   (ignore provider)
   (cons "Open AI" "https://openai.com/policies/terms-of-use";))
@@ -78,27 +87,49 @@ MODEL is the embedding model to use, or nil to use the 
default.."
       (error (llm-openai--error-message response))
     (funcall extractor response)))
 
-(cl-defmethod llm-embedding-async ((provider llm-openai) string 
vector-callback error-callback)
+(cl-defmethod llm-openai--check-key ((provider llm-openai))
   (unless (llm-openai-key provider)
-    (error "To call Open AI API, add a key to the `llm-openai' provider."))
+    (error "To call Open AI API, add a key to the `llm-openai' provider.")))
+
+(cl-defmethod llm-openai--check-key ((_ llm-openai-compatible))
+  ;; It isn't always the case that a key is needed for Open AI compatible APIs.
+  )
+
+(defun llm-openai--headers (provider)
+  "From PROVIDER, return the headers to use for a request.
+This is just the key, if it exists."
+  (when (llm-openai-key provider)
+    `(("Authorization" . ,(format "Bearer %s" (llm-openai-key provider))))))
+
+(cl-defmethod llm-openai--url ((_ llm-openai) command)
+  "Return the URL for COMMAND for PROVIDER."
+  (concat "https://api.openai.com/v1/"; command))
+
+(cl-defmethod llm-openai--url ((provider llm-openai-compatible) command)
+  "Return the URL for COMMAND for PROVIDER."
+  (concat (llm-openai-compatible-url provider)
+          (unless (string-suffix-p "/" (llm-openai-compatible-url provider))
+            "/") command))
+
+(cl-defmethod llm-embedding-async ((provider llm-openai) string 
vector-callback error-callback)
+  (llm-openai--check-key provider)
   (let ((buf (current-buffer)))
-    (llm-request-async "https://api.openai.com/v1/embeddings";
-                     :headers `(("Authorization" . ,(format "Bearer %s" 
(llm-openai-key provider))))
-                     :data (llm-openai--embedding-request 
(llm-openai-embedding-model provider) string)
-                     :on-success (lambda (data)
+    (llm-request-async (llm-openai--url provider "embeddings")
+                       :headers (llm-openai--headers provider)
+                       :data (llm-openai--embedding-request 
(llm-openai-embedding-model provider) string)
+                       :on-success (lambda (data)
+                                     (llm-request-callback-in-buffer
+                                      buf vector-callback 
(llm-openai--embedding-extract-response data)))
+                       :on-error (lambda (_ data) 
                                    (llm-request-callback-in-buffer
-                                    buf vector-callback 
(llm-openai--embedding-extract-response data)))
-                     :on-error (lambda (_ data) 
-                                 (llm-request-callback-in-buffer
-                                  buf error-callback 'error
-                                  (llm-openai--error-message data))))))
+                                    buf error-callback 'error
+                                    (llm-openai--error-message data))))))
 
 (cl-defmethod llm-embedding ((provider llm-openai) string)
-  (unless (llm-openai-key provider)
-    (error "To call Open AI API, add a key to the `llm-openai' provider."))
+  (llm-openai--check-key provider)
   (llm-openai--handle-response
-   (llm-request-sync "https://api.openai.com/v1/embeddings";
-               :headers `(("Authorization" . ,(format "Bearer %s" 
(llm-openai-key provider))))
+   (llm-request-sync (llm-openai--url provider "embeddings")
+               :headers (llm-openai--headers provider)
                :data (llm-openai--embedding-request 
(llm-openai-embedding-model provider) string))
    #'llm-openai--embedding-extract-response))
 
@@ -155,11 +186,10 @@ STREAMING if non-nil, turn on response streaming."
     (or func-result result)))
 
 (cl-defmethod llm-chat-async ((provider llm-openai) prompt response-callback 
error-callback)
-  (unless (llm-openai-key provider)
-    (error "To call Open AI API, the key must have been set"))
+  (llm-openai--check-key provider)
   (let ((buf (current-buffer)))
-    (llm-request-async "https://api.openai.com/v1/chat/completions";
-      :headers `(("Authorization" . ,(format "Bearer %s" (llm-openai-key 
provider))))
+    (llm-request-async (llm-openai--url provider "chat/completions")
+      :headers (llm-openai--headers provider)
       :data (llm-openai--chat-request (llm-openai-chat-model provider) prompt)
       :on-success (lambda (data)
                     (let ((response (llm-openai--extract-chat-response data)))
@@ -175,11 +205,10 @@ STREAMING if non-nil, turn on response streaming."
                                      (cdr (assoc 'message errdata)))))))))
 
 (cl-defmethod llm-chat ((provider llm-openai) prompt)
-  (unless (llm-openai-key provider)
-    (error "To call Open AI API, the key must have been set"))
+  (llm-openai--check-key provider)
   (let ((response (llm-openai--handle-response
-                   (llm-request-sync 
"https://api.openai.com/v1/chat/completions";
-                                     :headers `(("Authorization" . ,(format 
"Bearer %s" (llm-openai-key provider))))
+                   (llm-request-sync (llm-openai--url provider 
"chat/completions")
+                                     :headers (llm-openai--headers provider)
                                      :data (llm-openai--chat-request 
(llm-openai-chat-model provider)
                                                                      prompt))
                    #'llm-openai--extract-chat-response)))
@@ -232,11 +261,10 @@ them from 1 to however many are sent.")
     current-response))
 
 (cl-defmethod llm-chat-streaming ((provider llm-openai) prompt 
partial-callback response-callback error-callback)
-  (unless (llm-openai-key provider)
-    (error "To call Open AI API, the key must have been set"))
+  (llm-openai--check-key provider)
   (let ((buf (current-buffer)))
-    (llm-request-async "https://api.openai.com/v1/chat/completions";
-                       :headers `(("Authorization" . ,(format "Bearer %s" 
(llm-openai-key provider))))
+    (llm-request-async (llm-openai--url provider "chat/completions")
+                       :headers (llm-openai--headers provider)
                        :data (llm-openai--chat-request (llm-openai-chat-model 
provider) prompt nil t)
                        :on-error (lambda (_ data)
                                    (let ((errdata (cdr (assoc 'error data))))



reply via email to

[Prev in Thread] Current Thread [Next in Thread]