diff --git a/Makefile b/Makefile index 37993a3..caf8730 100644 --- a/Makefile +++ b/Makefile @@ -33,6 +33,7 @@ test: install -l ./test/robby-request-test.el \ -l ./test/robby-test-env.el \ -l ./test/robby-utils-test.el \ + -l ./test/robby-validation-test.el \ -eval '(ert-run-tests-batch-and-exit "$(MATCH)")' EL_FILES := $(wildcard *.el) @@ -41,7 +42,7 @@ EL_FILES := $(wildcard *.el) checkdoc: for FILE in ${EL_FILES}; do $(EMACS) --batch -L . -l ./test/robby-test-env.el -eval "(checkdoc-file \"$$FILE\")" ; done -compile: install +compile: install clean-compiled $(EMACS) --batch -L . -l ./test/robby-test-env.el -f batch-byte-compile robby-*.el lint: install diff --git a/fixtures/streaming-response-complete-status-200.txt b/fixtures/streaming-response-complete-status-200.txt new file mode 100644 index 0000000..d7916ec --- /dev/null +++ b/fixtures/streaming-response-complete-status-200.txt @@ -0,0 +1,16 @@ +data: +{"id":"chatcmpl-a","object":"chat.completion.chunk","created":1689279638,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + +data: +{"id":"chatcmpl-a","object":"chat.completion.chunk","created":1689279638,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"content":"Hello"},"finish_reason":null}]} + +data: +{"id":"chatcmpl-7bx5i7z5CscbA6mQgHGJ2qlXiv15s","object":"chat.completion.chunk","created":1689279638,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"content":" there!"},"finish_reason":null}]} + +data: +{"id":"chatcmpl-7bx5i7z5CscbA6mQgHGJ2qlXiv15s","object":"chat.completion.chunk","created":1689279638,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + +data: +[DONE] + +HTTP STATUS: 200 diff --git a/fixtures/streaming-response-complete-status-400.txt b/fixtures/streaming-response-complete-status-400.txt new file mode 100644 index 0000000..47cbd33 --- /dev/null +++ b/fixtures/streaming-response-complete-status-400.txt @@ -0,0 +1,17 @@ +data: +{"id":"chatcmpl-a","object":"chat.completion.chunk","created":1689279638,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + +data: +{"id":"chatcmpl-a","object":"chat.completion.chunk","created":1689279638,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"content":"Hello"},"finish_reason":null}]} + +data: +{"id":"chatcmpl-7bx5i7z5CscbA6mQgHGJ2qlXiv15s","object":"chat.completion.chunk","created":1689279638,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{"content":" there!"},"finish_reason":null}]} + +data: +{"id":"chatcmpl-7bx5i7z5CscbA6mQgHGJ2qlXiv15s","object":"chat.completion.chunk","created":1689279638,"model":"gpt-3.5-turbo-0613","choices":[{"index":0,"delta":{},"finish_reason":"stop"}]} + +data: +[DONE] + +HTTP STATUS: 400 + diff --git a/fixtures/streaming-response-openai-model-not-found.txt b/fixtures/streaming-response-openai-model-not-found.txt new file mode 100644 index 0000000..dfdb8c7 --- /dev/null +++ b/fixtures/streaming-response-openai-model-not-found.txt @@ -0,0 +1,12 @@ +{ + "error": { + "message": "The model `adfasdf` does not exist or you do not have access to it.", + "type": "invalid_request_error", + "param": null, + "code": "model_not_found" + } +} + + HTTP STATUS: 404 + + diff --git a/fixtures/streaming-response-togetherai-model-not-found.txt b/fixtures/streaming-response-togetherai-model-not-found.txt new file mode 100644 index 0000000..00b15e3 --- /dev/null +++ b/fixtures/streaming-response-togetherai-model-not-found.txt @@ -0,0 +1 @@ +{"error":{"message":"Unable to access model adfasdf. Please visit https://api.together.xyz to see the list of supported models or contact the owner to request access.","type":"invalid_request_error","param":null,"code":"model_not_found"}} HTTP STATUS: 404 diff --git a/robby-api-key.el b/robby-api-key.el index fa29c1f..4a5ca50 100644 --- a/robby-api-key.el +++ b/robby-api-key.el @@ -7,10 +7,16 @@ ;;; Code: (require 'auth-source) +;; declared in robby-customization.el +(defvar robby-openai-api-key) + +;; declared in robby-providers.el +(declare-function robby--provider-host ()) + (defun robby--get-api-key-from-auth-source () "Get api key from auth source." (if-let ((secret (plist-get (car (auth-source-search - :host "api.openai.com" + :host (robby--provider-host) :user "apikey" :require '(:secret))) :secret))) @@ -19,15 +25,6 @@ secret) (user-error "No `robby-api-key' found in auth source"))) -(defcustom robby-openai-api-key #'robby--get-api-key-from-auth-source - "OpenAI API key. - -A string, or a function that returns the API key." - :group 'robby - :type '(choice - (string :tag "OpenAI API key") - (function :tag "Function that returns the OpenAI API key"))) - (defun robby--get-api-key () "Get api key from `robby-api-key'." (cond diff --git a/robby-customization.el b/robby-customization.el index de2b8d7..eeb7738 100644 --- a/robby-customization.el +++ b/robby-customization.el @@ -6,11 +6,15 @@ ;;; Code: +(require 'map) (require 'spinner) (require 'robby-api-key) -(require 'robby-validation) (require 'robby-utils) +(require 'robby-validation) + +;;; Variable declarations +(defvar robby--provider-settings) ; defined in robby-provider.el ;;; function to validate custom api options (defun robby--validate-custom-api-option (name) @@ -30,6 +34,15 @@ Return an error message if the value is invalid, or nil if it is valid." :group 'tools :tag "robby") +(defcustom robby-openai-api-key #'robby--get-api-key-from-auth-source + "OpenAI API key. + +A string, or a function that returns the API key." + :group 'robby + :type '(choice + (string :tag "OpenAI API key") + (function :tag "Function that returns the OpenAI API key"))) + (defcustom robby-logging nil "Log to *robby-log* buffer if t." :type 'boolean @@ -83,24 +96,38 @@ It should include a `%s' placeholder for the spinner." :type 'string :group 'robby) -(defcustom robby-chat-system-message "You are an AI tool embedded within Emacs. Assist users with their tasks and provide information as needed. Do not engage in harmful or malicious behavior. Please provide helpful information. Answer concisely." +(defcustom robby-chat-system-message + "You are an AI tool embedded within Emacs. Assist users with their tasks and provide information as needed. Do not engage in harmful or malicious behavior. Please provide helpful information. Answer concisely." "System message to use with OpenAI Chat API." :type 'string :group 'robby) -(defcustom robby-api-url "https://api.openai.com/v1/chat/completions" - "URL to use for OpenAI API requests." - :type 'string - :group 'robby) - ;;; chat api options (defgroup robby-chat-api nil "Options to pass to the chat API." :group 'robby) -(defcustom robby-chat-model "gpt-3.5-turbo" - "The model to use with the completions API." - :type 'string +(defun robby--provider-type () + "Get the `robby-provider' custom type. + +Includes a choice for each provider added via +`robby-add-provider'." + `(choice + ,@(seq-map + (lambda (provider-settings) + `(const :tag ,(plist-get (cdr provider-settings) :name) ,(car provider-settings))) + robby--provider-settings))) + +(defcustom robby-provider 'openai + "The AI provider to use." + :type (robby--provider-type) + :group 'robby) + +(defcustom robby-chat-model nil + "The model to use with the chat completions API. + +Nil means use the default model for the provider." + :type '(choice string (const nil)) :group 'robby-chat-api) (defcustom robby-chat-max-tokens 2000 diff --git a/robby-define-command.el b/robby-define-command.el index d713324..6557cab 100644 --- a/robby-define-command.el +++ b/robby-define-command.el @@ -8,7 +8,6 @@ (require 'robby-run-command) - (cl-defmacro robby-define-command (name docstring &key diff --git a/robby-logging.el b/robby-logging.el index ab327ca..9982da2 100644 --- a/robby-logging.el +++ b/robby-logging.el @@ -14,9 +14,9 @@ "Insert MSG in `robby--log' buffer." (if robby-logging (with-current-buffer (get-buffer-create robby--log-buffer) - (insert msg)))) - -(provide 'robby-logging) + (setq buffer-read-only nil) + (insert msg) + (setq buffer-read-only t)))) (provide 'robby-logging) diff --git a/robby-mistralai-provider.el b/robby-mistralai-provider.el new file mode 100644 index 0000000..258fadb --- /dev/null +++ b/robby-mistralai-provider.el @@ -0,0 +1,26 @@ +;;; robby-mistralai-provider.el --- robby mistralai provider -*- lexical-binding:t -*- + +;;; Code: + +(require 'robby-provider) + +(require 'cl-generic) + +(robby-add-provider + :symbol 'mistralai + :name "Mistral AI" + :host "api.mistral.ai" + :default-model "mistral-small" + :models-path "/v1/models") + +;; example: +;; ((object . "error") (message . "Invalid model: gpt-4-turbo-preview") (type . "invalid_model") (param) (code . "1500")) +(cl-defmethod robby-provider-parse-error :around (data &context (robby-provider (eql 'mistralai))) + "Parse mistralai error from response DATA." + (let ((object (alist-get 'object data))) + (when (and (stringp "error") (string= object "error")) + (alist-get 'message data)))) + +(provide 'robby-mistralai-provider) + +;;; robby-mistralai-provider.el ends here diff --git a/robby-models.el b/robby-models.el index 807226d..122c0dd 100644 --- a/robby-models.el +++ b/robby-models.el @@ -9,18 +9,19 @@ (require 'robby-api-key) (require 'robby-request) (require 'robby-customization) +(require 'robby-provider) -(defvar robby-models nil) +(defvar robby--models nil) (defun robby--get-models () "Get the list of available models from OpenAI. -Make request to OpenAI API to get the list of available models." - (if robby-models - robby-models +Make request to OpenAI API to get the list of available models." + (if robby--models + robby--models (let* ((inhibit-message t) (message-log-max nil) - (url "https://api.openai.com/v1/models") + (url (concat "https://" (robby--provider-host) (robby--provider-models-path))) (url-request-method "GET") (url-request-extra-headers `(("Content-Type" . "application/json") @@ -28,17 +29,16 @@ Make request to OpenAI API to get the list of available models." (inhibit-message t) (message-log-max nil)) (with-current-buffer (url-retrieve-synchronously url) + (robby--log (format "Models response: %s\n" (buffer-string))) (goto-char (point-min)) - (re-search-forward "^{") + (re-search-forward "^[[{]") (backward-char 1) (let* ((json-object-type 'alist) (resp (json-read)) - (err (robby--request-parse-error-data resp))) + (err (robby-provider-parse-error resp))) (if err (error "Error fetching models: %S" err) - (let* ((all-models (seq-map (lambda (obj) (cdr (assoc 'id obj))) (cdr (assoc 'data resp)))) - (gpt-models (seq-filter (lambda (name) (string-prefix-p "gpt" name)) all-models))) - (setq robby-models gpt-models)))))))) + (setq robby--models (robby-provider-parse-models resp)))))))) (provide 'robby-models) diff --git a/robby-openai-provider.el b/robby-openai-provider.el new file mode 100644 index 0000000..fa9f935 --- /dev/null +++ b/robby-openai-provider.el @@ -0,0 +1,25 @@ +;;; robby-openai-provider.el --- robby openai provider -*- lexical-binding:t -*- + +;;; Code: + +(require 'robby-provider) + +(require 'cl-generic) + +(robby-add-provider + :symbol 'openai + :name "OpenAI" + :host "api.openai.com" + :default-model "gpt-3.5-turbo" + :models-path "/v1/models") + +(cl-defmethod robby-provider-parse-models :around (data &context (robby-provider (eql 'openai))) + (let ((models (cl-call-next-method data))) + (seq-filter + (lambda (name) + (string-prefix-p "gpt" name)) + models))) + +(provide 'robby-openai-provider) + +;;; robby-openai-provider.el ends here diff --git a/robby-process.el b/robby-process.el index 1304ec3..d4c708b 100644 --- a/robby-process.el +++ b/robby-process.el @@ -26,9 +26,9 @@ Emacs Lisp, do not print messages if SILENTP is t. Note that you cannot currently kill the last robby process if you are using `url-retreive'; you must be using `curl'" (interactive) + (robby--spinner-stop) (if (robby--process-running-p) (progn - (robby--spinner-stop) (kill-process robby--last-process) (when (not silentp) (message "robby process killed"))) diff --git a/robby-provider.el b/robby-provider.el new file mode 100644 index 0000000..25a9388 --- /dev/null +++ b/robby-provider.el @@ -0,0 +1,59 @@ +;;; robby-provider.el --- Support Multiple AI Services -*- lexical-binding:t -*- + +;;; Commentary: + +;;; Code: +(require 'cl-generic) + +;; declared in robby-custom.el +(defvar robby-provider) + +(defvar robby--provider-settings nil + "Global alist of provider settings.") + +(defun robby--get-provider-settings () + "Return the settings of the current provider." + (alist-get robby-provider robby--provider-settings)) + +(defun robby--provider-name () + "Return the name of the current provider." + (plist-get (robby--get-provider-settings) :name)) + +(defun robby--provider-host () + "Return the host of the current provider." + (plist-get (robby--get-provider-settings) :host)) + +(defun robby--provider-default-model () + "Return the default model to use with the the current provider." + (plist-get (robby--get-provider-settings) :default-model)) + +(defun robby--provider-api-base-path () + "Return the API base path of the current provider." + (plist-get (robby--get-provider-settings) :api-base-path)) + +(defun robby--provider-models-path () + "Return the models path of the current provider." + (plist-get (robby--get-provider-settings) :models-path)) + +(cl-defun robby-add-provider (&key symbol name host default-model api-base-path models-path) + "Register a new robby provider." + (let* ((settings `(:name ,name + :host ,host + :default-model ,default-model + :api-base-path ,(or api-base-path "/v1/chat/completions") + :models-path ,(or models-path "/v1/models")))) + (add-to-list 'robby--provider-settings (cons symbol settings)))) + +(cl-defmethod robby-provider-parse-error (data) + "Get error from response DATA. + +DATA is an alist of the JSON parsed response from the provider." + (cdr (assoc 'message (assoc 'error data)))) + +(cl-defmethod robby-provider-parse-models (data) + "Get models from response DATA." + (seq-map (lambda (obj) (cdr (assoc 'id obj))) (cdr (assoc 'data data)))) + +(provide 'robby-provider) + +;;; robby-provider.el ends here diff --git a/robby-request.el b/robby-request.el index 0f1fd13..c4e7a04 100644 --- a/robby-request.el +++ b/robby-request.el @@ -1,32 +1,43 @@ ;;; robby-request.el --- Make robby requests via curl or url-retrieve -*- lexical-binding:t -*- -;;; Commentary: - -;; Provides the `robby--request' function to make requests to the OpenAI API. - -;;; Code: - -(require 'cl-lib) (require 'files) (require 'json) (require 'seq) (require 'url-vars) +(require 'robby-provider) ; require first to make sure robby--provider-settings is defined + (require 'robby-api-key) (require 'robby-customization) (require 'robby-logging) (require 'robby-utils) -;;; util functions -(defun robby--request-parse-error-data (data) - "Get error from response DATA." - (cdr (assoc 'message (assoc 'error data)))) - -(defun robby--request-parse-error-string (err) +;;; request util functions +(defun robby--request-parse-error-string (string) "Get error from JSON string ERR." - (condition-case _err - (robby--request-parse-error-data (json-read-from-string err)) - (error nil))) + (ignore-errors + (robby-provider-parse-error (json-read-from-string string)))) + +(defun robby--request-get-error (string) + "Get error from response STRING, or nil if no error. + +If there is a status code and it is not 200, try to parse the +error message from the response and return that, otherwise return +a generic error message. If status is 200 return nil (no error)." + (let ((provider (robby--provider-name)) + (status (robby--parse-http-status string))) + (if (and (numberp status) (not (eq status 200))) + (let ((error-msg (robby--request-parse-error-string string))) + (if error-msg + (format "%s API error - '%s'" provider error-msg) + (if (numberp status) + (format "Unexpected response status %S from %s API request" status provider) + (format "Unexpected response from %S API request: %S" provider string))))))) + +;; TODO consider passing url to robby--request +(defun robby--chat-url () + "Get the chat API URL." + (concat "https://" (robby--provider-host) (robby--provider-api-base-path))) ;;; curl (defvar robby--curl-options @@ -34,6 +45,7 @@ "--disable" "--silent" "-m 600" + "-w HTTP STATUS: %{http_code}\n" "-H" "Content-Type: application/json")) (defun robby--curl-parse-chunk (remaining data) @@ -68,8 +80,6 @@ of parsed JSON objects: (setq new-remaining (buffer-substring pos (point-max)))))) `(:remaining ,new-remaining :parsed ,(nreverse parsed))))) -(defconst robby--curl-unknown-error "Unexpected error making OpenAI request via curl" ) - (defun robby--curl-parse-response (string remaining streamp) "Parse JSON curl response from data in STRING and REMAINING unparsed text. @@ -100,7 +110,7 @@ STREAMP is non-nil if the response is a stream." "curl" proc-buffer "curl" - robby-api-url + (robby--chat-url) curl-options) (error (funcall on-error err))))) (let ((remaining "") @@ -111,8 +121,9 @@ STREAMP is non-nil if the response is a stream." (set-process-filter proc (lambda (proc string) + (robby--log (format "# Raw curl response chunk:\n%s\n" string)) (condition-case err - (let ((error-msg (robby--request-parse-error-string string))) + (let ((error-msg (robby--request-get-error string))) (if error-msg (progn (setq errored t) @@ -121,8 +132,8 @@ STREAMP is non-nil if the response is a stream." (setq remaining (plist-get resp :remaining)) (funcall on-text :text (plist-get resp :text) :completep nil)))) (error - (kill-process proc) - (error "Robby: unexpected error processing curl response: %S" err)))))) + (error "Robby: unexpected error processing curl response: %S" err) + (kill-process proc)))))) (set-process-sentinel proc (lambda (_proc _status) @@ -131,7 +142,7 @@ STREAMP is non-nil if the response is a stream." (funcall on-text :text text :completep t)) (with-current-buffer proc-buffer (let* ((string (buffer-string)) - (error-msg (robby--request-parse-error-string string))) + (error-msg (robby--request-get-error string))) (if error-msg (funcall on-error error-msg) (let ((resp (robby--curl-parse-response string "" nil))) @@ -156,19 +167,19 @@ ON-ERROR is the callback for when an error is received." (encode-coding-string (json-encode payload) 'utf-8)) (url-request-extra-headers `(("Content-Type" . "application/json") - ("Authorization" . ,(concat "Bearer " (robby--get-api-key))))) + ("Authorization:" . ,(concat "Bearer " (robby--get-api-key))))) (inhibit-message t) (message-log-max nil)) - (robby--log (format "#url-retrieve request JSON payload:\n%s\n" url-request-data)) (url-retrieve - robby-api-url + (robby--chat-url) (lambda (_status) + (robby--log (format "# URL retrieve response buffer contents: %s" (buffer-substring-no-properties (point-min) (point-max)))) (goto-char (point-min)) (re-search-forward "^{") (backward-char 1) (let* ((json-object-type 'alist) (resp (json-read)) - (err (robby--request-parse-error-data resp))) + (err (robby-provider-parse-error resp))) (if err (funcall on-error err) (let ((text (robby--chunk-content resp nil))) diff --git a/robby-run-command.el b/robby-run-command.el index 529a74e..484d2db 100644 --- a/robby-run-command.el +++ b/robby-run-command.el @@ -94,7 +94,17 @@ DOCSTRING is the command's docstring." (setq quoted-options (plist-put quoted-options :historyp t))) (robby--pp-cmd `(robby-define-command ,name ,docstring ,@quoted-options)))) -;;; run command +;;; robby-run-command and helper functions +(defun robby--get-request-input (api-options) + "Get the request input from API-OPTIONS. + +If no model is specified, use the default model for the provider" + (map-merge + ;; add default model if no model specified, since mistral will return an error if no model is specified + 'alist + `(("model" . ,(robby--provider-default-model))) + (robby--options-alist-for-api-request api-options))) + (defun robby--get-response-region (response-buffer) "Return the region to replace in RESPONSE-BUFFER. @@ -116,6 +126,7 @@ of the entire buffer." no-op-pattern no-op-message text + text-processed response-buffer response-region) "Process a cunk of text received from OpenAI. @@ -142,6 +153,8 @@ matches. TEXT is the response from OpenAI. It may be one chunk of the response if streaming is on. +TEXT-PROCESSED is the text processed so far, not including the new TEXT. + RESPONSE-BUFFER is the buffer where the response is written to. RESPONSE-REGION is the region to prepend, append, or replace in @@ -153,7 +166,7 @@ RESPONSE-BUFFER." (end (cdr response-region)) (grounded-text (robby--ground-response text grounding-fns))) (when completep - (robby--history-push basic-prompt text)) + (robby--history-push basic-prompt (concat text text-processed))) (if (and no-op-pattern (string-match-p no-op-pattern text)) (message (or no-op-message) "no action to perform") (when (or completep (> (length grounded-text) 0)) @@ -169,7 +182,7 @@ RESPONSE-BUFFER." "Handle an error ERR from OpenAI." (robby--spinner-stop) (let* ((err-msg (if (stringp err) err (error-message-string err))) - (log-msg (format "Error processing robby request: %s\n" err-msg))) + (log-msg (format "Error running robby command: %s" err-msg))) (robby--log log-msg) (message log-msg)) (when (process-live-p robby--last-process) @@ -270,11 +283,11 @@ value overrides the `robby-stream' customization variable." (prompt-result (if (functionp prompt) (apply prompt prompt-args-with-arg) (format "%s" prompt))) (basic-prompt (robby--format-prompt prompt-result robby-prompt-spec-fn)) (request-input (robby--request-input basic-prompt historyp robby--history robby-chat-system-message)) - (payload (append request-input (robby--options-alist-for-api-request api-options))) + (payload (append request-input (robby--get-request-input api-options))) (response-buffer (get-buffer-create (robby--get-response-buffer action action-args))) (response-region (robby--get-response-region response-buffer)) (streamp (robby--get-stream-p :never-stream-p never-stream-p :no-op-pattern no-op-pattern :grounding-fns grounding-fns)) - (chars-processed 0)) + (text-processed "")) (robby--log (format "# Request body alist:\n%s\n" payload)) @@ -298,16 +311,17 @@ value overrides the `robby-stream' customization variable." :action-args action-args :arg arg :basic-prompt basic-prompt - :chars-processed chars-processed + :chars-processed (length text-processed) :completep completep :grounding-fns grounding-fns :no-op-pattern no-op-pattern :no-op-message no-op-message :response-buffer response-buffer :response-region response-region - :text text)) + :text text + :text-processed text-processed)) (error (robby--handle-error err)))) - (setq chars-processed (+ chars-processed (length text))))) + (setq text-processed (concat text text-processed)))) :on-error (lambda (err) (with-current-buffer response-buffer diff --git a/robby-togetherai-provider.el b/robby-togetherai-provider.el new file mode 100644 index 0000000..6cf6ac8 --- /dev/null +++ b/robby-togetherai-provider.el @@ -0,0 +1,22 @@ +;;; robby-togetherai-provider.el --- robby togetherai provider -*- lexical-binding:t -*- + +;;; Code: + +(require 'robby-provider) + +(require 'cl-generic) + +(robby-add-provider + :symbol 'togetherai + :name "Together AI" + :host "api.together.xyz" + ;; :default-model "togethercomputer/StripedHyena-Nous-7B" + :models-path "/models/info?=") + +(cl-defmethod robby-provider-parse-models (data &context (robby-provider (eql 'togetherai))) + "Get models from response DATA for togetherai." + (seq-map (lambda (elem) (alist-get 'name elem)) data)) + +(provide 'robby-togetherai-provider) + +;;; robby-togetherai-provider.el ends here diff --git a/robby-transients.el b/robby-transients.el index 5d00394..c541034 100644 --- a/robby-transients.el +++ b/robby-transients.el @@ -197,11 +197,12 @@ Only includes options that cannot be nil.") (oset obj value `(,@(robby--options-transient-value)))) ;;; robby-api-options +;;;###autoload (autoload 'robby-api-options "robby" "Chat API options transient." t) (transient-define-prefix robby-api-options () - "Chat API option transient." + "Chat API options transient." :init-value 'robby--init-api-options ["Chat API Options" - ("m" "model" "model=" :always-read t :choices ,(robby--get-models)) + ("m" "model" "model=" :always-read t :choices (lambda () (robby--get-models))) ("t" "max tokens" "max-tokens=" :reader transient-read-number-N+ :always-read t) ("e" "temperature" "temperature=" :reader robby--read-temperature :always-read t) ("p" "top p" "top-p=" :reader robby--read-top-p :always-read t) diff --git a/robby-utils.el b/robby-utils.el index 93b2405..474c4d3 100644 --- a/robby-utils.el +++ b/robby-utils.el @@ -233,6 +233,18 @@ details." response) (funcall grounding-fns response))) +;;; request utils +(defun robby--parse-http-status (resp) + "Parse http status code from response text. + +Use with the curl option --write-out 'HTTP STATUS: %{http_code}\n'. +Returns the status code as a number, or nil if no status code found." + (if (string-match "HTTP STATUS: \\([0-9]+\\)" resp) + (when-let ((m (match-string 1 resp)) + (n (string-to-number m))) + (if (eq n 0) nil n)) + nil)) + (provide 'robby-utils) ;;; robby-utils.el ends here diff --git a/robby.el b/robby.el index ec621ad..0d301d7 100644 --- a/robby.el +++ b/robby.el @@ -31,6 +31,11 @@ (require 'seq) (require 'transient)) +;; require providers +(require 'robby-mistralai-provider) +(require 'robby-openai-provider) +(require 'robby-togetherai-provider) + ;; require files with autoloads, and customization file (require 'robby-customization) (require 'robby-commands) diff --git a/test/robby-request-test.el b/test/robby-request-test.el index 7979fd7..66f244a 100644 --- a/test/robby-request-test.el +++ b/test/robby-request-test.el @@ -3,6 +3,8 @@ (require 'ert) (require 'robby-request) +(require 'robby-openai-provider) +(require 'robby-togetherai-provider) ;;; Code: @@ -10,7 +12,7 @@ "Return filepath's file content." (with-temp-buffer (insert-file-contents filepath) - (buffer-string))) + (buffer-string))) (ert-deftest robby--curl-parse-response--streaming-response () (let ((parsed (robby--curl-parse-response (robby--read-file-into-string "./fixtures/streaming-response-complete.txt") "" t))) @@ -26,6 +28,36 @@ (let ((part2 (robby--curl-parse-response (robby--read-file-into-string "./fixtures/streaming-response-incomplete-part-2.txt") "{\"id\":\"chatcmpl-a\",\"object\":\"chat.completion.chu" t))) (should (equal (plist-get part2 :text) " there!")))) +(ert-deftest robby--request-get-error--no-error-on-200 () + (dolist (provider '(openai togetherai)) + (let ((robby-provider provider) + (resp-string (robby--read-file-into-string "./fixtures/streaming-response-complete-status-200.txt"))) + (should (equal (robby--parse-http-status resp-string) 200)) + (should (null (robby--request-get-error resp-string)))))) + +(ert-deftest robby--request-get-error--openai-error () + (let ((robby-provider 'openai) + (resp-string (robby--read-file-into-string "./fixtures/streaming-response-openai-model-not-found.txt"))) + (should (equal (robby--parse-http-status resp-string) 404)) + (should (equal (robby--request-get-error resp-string) + "OpenAI API error - 'The model `adfasdf` does not exist or you do not have access to it.'")))) + +(ert-deftest robby--request-get-error--together-error () + (let ((robby-provider 'togetherai) + (resp-string (robby--read-file-into-string "./fixtures/streaming-response-togetherai-model-not-found.txt"))) + (should (equal (robby--parse-http-status resp-string) 404)) + (should (equal (robby--request-get-error resp-string) + "Together AI API error - 'Unable to access model adfasdf. Please visit https://api.together.xyz to see the list of supported models or contact the owner to request access.'")))) + +(ert-deftest robby--request-get-error--generic-error-when-400-no-error-message () + (dolist (provider '(openai togetherai)) + (let ((robby-provider provider) + (resp-string (robby--read-file-into-string "./fixtures/streaming-response-complete-status-400.txt"))) + (should (equal (robby--parse-http-status resp-string) 400)) + (should (string-match-p + "Unexpected response status 400 from .+ API request" + (robby--request-get-error resp-string)))))) + (provide 'robby-request-test) ;;; robby-request-test.el ends here diff --git a/test/robby-test-env.el b/test/robby-test-env.el index 0e04351..1953083 100644 --- a/test/robby-test-env.el +++ b/test/robby-test-env.el @@ -2,6 +2,8 @@ (require 'ert) +(require 'robby-provider) + ;;; Code: (setq package-lint-main-file "/Users/stephenmolitor/repos/robby/robby.el") diff --git a/test/robby-utils-test.el b/test/robby-utils-test.el index 165f5c5..8389092 100644 --- a/test/robby-utils-test.el +++ b/test/robby-utils-test.el @@ -2,6 +2,7 @@ (require 'ert) +(require 'robby-customization) (require 'robby-utils) ;;; Code: @@ -86,6 +87,23 @@ ((role . "user") (content . "Where was it played?")) ]))))) +(ert-deftest robby--request-input--with-two-itemhistory () + (should (equal + (robby--request-input + "Bonjour!" + t + '(("hi" . "Hello!") ("answer in french" . "Bien sûr!")) + "I am a helpful assistant.") + + `((messages . + [((role . "system") (content . "I am a helpful assistant.")) + ((role . "user") (content . "hi")) + ((role . "assistant") (content . "Hello!")) + ((role . "user") (content . "answer in french")) + ((role . "assistant") (content . "Bien sûr!")) + ((role . "user") (content . "Bonjour!")) + ]))))) + ;;; chunk content tests (ert-deftest robby--chunk-content--no-streaming () (let ((resp '((choices . [((index . 0) @@ -135,7 +153,30 @@ (should (equal (robby--ground-response "response" #'upcase) "RESPONSE")))) +;;; request utils +(ert-deftest robby--parse-http-status () + (let ((resp "logprobs\":null,\"finish_reason\":\"stop\"}]} + +data: [DONE] + + HTTP STATUS: 200 + +")) + (should (equal + (robby--parse-http-status resp) + 200)))) + +(ert-deftest robby--parse-http-status-no-status () + (let ((chunk "data: {\"id\":\"chatcmpl-9GR46WElbBCZQwFD2WoGeYKnd8I5z\",\"object\":\"chat.completion.chunk\",\"created\":1713704314,\"model\":\"gpt-3.5-turbo-0125\",\"system_fingerprint\":\"fp_c2295e73ad\",\"choices\":[{\"index\":0,\"delta\":{\"role\":\"assistant\",\"content\":\"\"},\"logprobs\":null,\"finish_reason\":null}]} + +data: {\"id\":\"chatcmpl-9GR46WElbBCZQwFD2WoGeYKnd8I5z\",\"object\":\"chat.completion.chunk\",\"created\":1713704314,\"model\":\"gpt-3.5-turbo-0125\",\"system_fingerprint\":\"fp_c2295e73ad\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"Hello\"},\"logprobs\":null,\"finish_reason\":null}]} + +data: {\"id\":\"chatcmpl-9GR46WElbBCZQwFD2WoGeYKnd8I5z\",\"object\":\"chat.completion.chunk\",\"created\":1713704314,\"model\":\"gpt-3.5-turbo-0125\",\"system_fingerprint\":\"fp_c2295e73ad\",\"choices\":[{\"index\":0,\"delta\":{\"content\":\"!\"},\"logprobs\":null,\"finish_reason\":null}]} + + +")) + (should (null (robby--parse-http-status chunk))))) + (provide 'robby-utils-test) ;;; robby-utils-test.el ends here -