Skip to content

Commit

Permalink
Allow context and examples to be set after the first interaction
Browse files Browse the repository at this point in the history
They always will be set on the first interaction, though.  This allows context
and examples to be used with multiple messages, with llm-make-chat-prompt.

This is needed to fix #43.
  • Loading branch information
ahyatt committed Apr 16, 2024
1 parent 35e2b6e commit bfb393b
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 36 deletions.
32 changes: 5 additions & 27 deletions llm-provider-utils-test.el
Original file line number Diff line number Diff line change
Expand Up @@ -25,20 +25,13 @@

(ert-deftest llm-provider-utils-combine-to-system-prompt ()
(let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content "Hello"))
(interaction2 (make-llm-chat-prompt-interaction :role 'assistant :content "Hi! How can I assist you?"))
(example1 (cons "Request 1" "Response 1"))
(example2 (cons "Request 2" "Response 2"))
(prompt-for-first-request
(make-llm-chat-prompt
:context "Example context"
:interactions (list (copy-llm-chat-prompt-interaction interaction1))
:examples (list example1 example2)))
(prompt-for-second-request
(make-llm-chat-prompt
:context "An example context"
:interactions (list (copy-llm-chat-prompt-interaction interaction1)
(copy-llm-chat-prompt-interaction interaction2))
:examples (list example1 example2)))
(prompt-with-existing-system-prompt
(make-llm-chat-prompt
:context "Example context"
Expand All @@ -51,12 +44,8 @@
(should (equal "Example context\nHere are 2 examples of how to respond:\n\nUser: Request 1\nAssistant: Response 1\nUser: Request 2\nAssistant: Response 2"
(llm-chat-prompt-interaction-content (nth 0 (llm-chat-prompt-interactions prompt-for-first-request)))))
(should (equal "Hello" (llm-chat-prompt-interaction-content (nth 1 (llm-chat-prompt-interactions prompt-for-first-request)))))

;; Nothing should be done on the second request.
(should (= 2 (length (llm-chat-prompt-interactions prompt-for-second-request))))
(llm-provider-utils-combine-to-system-prompt prompt-for-second-request)
(should (equal interaction1 (nth 0 (llm-chat-prompt-interactions prompt-for-second-request))))
(should (equal interaction2 (nth 1 (llm-chat-prompt-interactions prompt-for-second-request))))
(should-not (llm-chat-prompt-context prompt-for-first-request))
(should-not (llm-chat-prompt-examples prompt-for-first-request))

;; On the request with the existing system prompt, it should append the new
;; text to the existing system prompt.
Expand All @@ -67,31 +56,20 @@

(ert-deftest llm-provider-utils-combine-to-user-prompt ()
(let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content "Hello"))
(interaction2 (make-llm-chat-prompt-interaction :role 'assistant :content "Hi! How can I assist you?"))
(example1 (cons "Request 1" "Response 1"))
(example2 (cons "Request 2" "Response 2"))
(prompt-for-first-request
(make-llm-chat-prompt
:context "Example context"
:interactions (list (copy-llm-chat-prompt-interaction interaction1))
:examples (list example1 example2)))
(prompt-for-second-request
(make-llm-chat-prompt
:context "An example context"
:interactions (list (copy-llm-chat-prompt-interaction interaction1)
(copy-llm-chat-prompt-interaction interaction2))
:examples (list example1 example2))))
;; In the first request, the system prompt should be prepended to the user request.
(llm-provider-utils-combine-to-user-prompt prompt-for-first-request)
(should (= 1 (length (llm-chat-prompt-interactions prompt-for-first-request))))
(should-not (llm-chat-prompt-context prompt-for-first-request))
(should-not (llm-chat-prompt-examples prompt-for-first-request))
(should (equal "Example context\nHere are 2 examples of how to respond:\n\nUser: Request 1\nAssistant: Response 1\nUser: Request 2\nAssistant: Response 2\nHello"
(llm-chat-prompt-interaction-content (nth 0 (llm-chat-prompt-interactions prompt-for-first-request)))))

;; Nothing should be done on the second request.
(should (= 2 (length (llm-chat-prompt-interactions prompt-for-second-request))))
(llm-provider-utils-combine-to-user-prompt prompt-for-second-request)
(should (equal interaction1 (nth 0 (llm-chat-prompt-interactions prompt-for-second-request))))
(should (equal interaction2 (nth 1 (llm-chat-prompt-interactions prompt-for-second-request))))))
(llm-chat-prompt-interaction-content (nth 0 (llm-chat-prompt-interactions prompt-for-first-request)))))))

(ert-deftest llm-provider-utils-collapse-history ()
(let* ((interaction1 (make-llm-chat-prompt-interaction :role 'user :content "Hello"))
Expand Down
17 changes: 8 additions & 9 deletions llm-provider-utils.el
Original file line number Diff line number Diff line change
Expand Up @@ -290,11 +290,7 @@ This should be used for providers that have a notion of a system prompt.
If there is a system prompt, and no assistant response, add to it.
If there is no system prompt, create one.
If there is an assistance response, do nothing."
(unless (seq-some
(lambda (interaction)
(eq (llm-chat-prompt-interaction-role interaction) 'assistant))
(llm-chat-prompt-interactions prompt))
(let ((system-prompt (seq-find
(let ((system-prompt (seq-find
(lambda (interaction)
(eq (llm-chat-prompt-interaction-role interaction) 'system))
(llm-chat-prompt-interactions prompt)))
Expand All @@ -308,17 +304,20 @@ If there is an assistance response, do nothing."
(push (make-llm-chat-prompt-interaction
:role 'system
:content system-content)
(llm-chat-prompt-interactions prompt)))))))
(llm-chat-prompt-interactions prompt))
(setf (llm-chat-prompt-context prompt) nil
(llm-chat-prompt-examples prompt) nil)))))

(defun llm-provider-utils-combine-to-user-prompt (prompt &optional example-prelude)
"Add context and examples to a user prompt in PROMPT.
This should be used for providers that do not have a notion of a system prompt."
(when (= (length (llm-chat-prompt-interactions prompt)) 1)
(when-let ((system-content (llm-provider-utils-get-system-prompt prompt example-prelude)))
(when-let ((system-content (llm-provider-utils-get-system-prompt prompt example-prelude)))
(setf (llm-chat-prompt-interaction-content (car (llm-chat-prompt-interactions prompt)))
(concat system-content
"\n"
(llm-chat-prompt-interaction-content (car (llm-chat-prompt-interactions prompt))))))))
(llm-chat-prompt-interaction-content (car (llm-chat-prompt-interactions prompt))))
(llm-chat-prompt-context prompt) nil
(llm-chat-prompt-examples prompt) nil)))

(defun llm-provider-utils-collapse-history (prompt &optional history-prelude)
"Collapse history to a single prompt.
Expand Down

0 comments on commit bfb393b

Please sign in to comment.