mirror of
https://github.com/zylon-ai/private-gpt.git
synced 2025-12-22 10:45:42 +01:00
feat(llm): drop default_system_prompt (#1385)
As discussed on Discord, the decision has been made to remove the system prompts by default, to better segregate the API and the UI usages. A concurrent PR (#1353) is enabling the dynamic setting of a system prompt in the UI. Therefore, if UI users want to use a custom system prompt, they can specify one directly in the UI. If the API users want to use a custom prompt, they can pass it directly into their messages that they are passing to the API. In the highlight of the two use case above, it becomes clear that default system_prompt does not need to exist.
This commit is contained in:
parent
f235c50be9
commit
a3ed14c58f
4 changed files with 17 additions and 96 deletions
|
|
@ -18,7 +18,7 @@ from private_gpt.components.llm.prompt_helper import (
|
|||
],
|
||||
)
|
||||
def test_get_prompt_style_success(prompt_style, expected_prompt_style):
|
||||
assert get_prompt_style(prompt_style) == expected_prompt_style
|
||||
assert isinstance(get_prompt_style(prompt_style), expected_prompt_style)
|
||||
|
||||
|
||||
def test_get_prompt_style_failure():
|
||||
|
|
@ -45,20 +45,7 @@ def test_tag_prompt_style_format():
|
|||
|
||||
|
||||
def test_tag_prompt_style_format_with_system_prompt():
|
||||
system_prompt = "This is a system prompt from configuration."
|
||||
prompt_style = TagPromptStyle(default_system_prompt=system_prompt)
|
||||
messages = [
|
||||
ChatMessage(content="Hello, how are you doing?", role=MessageRole.USER),
|
||||
]
|
||||
|
||||
expected_prompt = (
|
||||
f"<|system|>: {system_prompt}\n"
|
||||
"<|user|>: Hello, how are you doing?\n"
|
||||
"<|assistant|>: "
|
||||
)
|
||||
|
||||
assert prompt_style.messages_to_prompt(messages) == expected_prompt
|
||||
|
||||
prompt_style = TagPromptStyle()
|
||||
messages = [
|
||||
ChatMessage(
|
||||
content="FOO BAR Custom sys prompt from messages.", role=MessageRole.SYSTEM
|
||||
|
|
@ -94,22 +81,7 @@ def test_llama2_prompt_style_format():
|
|||
|
||||
|
||||
def test_llama2_prompt_style_with_system_prompt():
|
||||
system_prompt = "This is a system prompt from configuration."
|
||||
prompt_style = Llama2PromptStyle(default_system_prompt=system_prompt)
|
||||
messages = [
|
||||
ChatMessage(content="Hello, how are you doing?", role=MessageRole.USER),
|
||||
]
|
||||
|
||||
expected_prompt = (
|
||||
"<s> [INST] <<SYS>>\n"
|
||||
f" {system_prompt} \n"
|
||||
"<</SYS>>\n"
|
||||
"\n"
|
||||
" Hello, how are you doing? [/INST]"
|
||||
)
|
||||
|
||||
assert prompt_style.messages_to_prompt(messages) == expected_prompt
|
||||
|
||||
prompt_style = Llama2PromptStyle()
|
||||
messages = [
|
||||
ChatMessage(
|
||||
content="FOO BAR Custom sys prompt from messages.", role=MessageRole.SYSTEM
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue