fix: do not add BOS (with last llamacpp-python version)

This commit is contained in:
Javier Martinez 2024-07-30 13:58:28 +02:00
parent 452a045be4
commit 8699644c8d
No known key found for this signature in database
2 changed files with 6 additions and 7 deletions

View file

@ -169,7 +169,7 @@ class Llama3PromptStyle(AbstractPromptStyle):
""" """
def _messages_to_prompt(self, messages: Sequence[ChatMessage]) -> str: def _messages_to_prompt(self, messages: Sequence[ChatMessage]) -> str:
prompt = self.BOS prompt = ""
has_system_message = False has_system_message = False
for i, message in enumerate(messages): for i, message in enumerate(messages):
@ -189,8 +189,7 @@ class Llama3PromptStyle(AbstractPromptStyle):
# Add default system prompt if no system message was provided # Add default system prompt if no system message was provided
if not has_system_message: if not has_system_message:
prompt = ( prompt = (
f"{self.BOS}{self.B_SYS}\n\n{self.DEFAULT_SYSTEM_PROMPT}{self.E_SYS}" f"{self.B_SYS}\n\n{self.DEFAULT_SYSTEM_PROMPT}{self.E_SYS}" + prompt
+ prompt[len(self.BOS) :]
) )
# TODO: Implement tool handling logic # TODO: Implement tool handling logic
@ -199,7 +198,7 @@ class Llama3PromptStyle(AbstractPromptStyle):
def _completion_to_prompt(self, completion: str) -> str: def _completion_to_prompt(self, completion: str) -> str:
return ( return (
f"{self.BOS}{self.B_SYS}\n\n{self.DEFAULT_SYSTEM_PROMPT}{self.E_SYS}" f"{self.B_SYS}\n\n{self.DEFAULT_SYSTEM_PROMPT}{self.E_SYS}"
f"{self.B_INST}user{self.E_INST}\n\n{completion.strip()}{self.EOT}" f"{self.B_INST}user{self.E_INST}\n\n{completion.strip()}{self.EOT}"
f"{self.ASSISTANT_INST}\n\n" f"{self.ASSISTANT_INST}\n\n"
) )

View file

@ -150,7 +150,7 @@ def test_llama3_prompt_style_format():
] ]
expected_prompt = ( expected_prompt = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n" "<|start_header_id|>system<|end_header_id|>\n\n"
"You are a helpful assistant<|eot_id|>" "You are a helpful assistant<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\n" "<|start_header_id|>user<|end_header_id|>\n\n"
"Hello, how are you doing?<|eot_id|>" "Hello, how are you doing?<|eot_id|>"
@ -166,7 +166,7 @@ def test_llama3_prompt_style_with_default_system():
ChatMessage(content="Hello!", role=MessageRole.USER), ChatMessage(content="Hello!", role=MessageRole.USER),
] ]
expected = ( expected = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n" "<|start_header_id|>system<|end_header_id|>\n\n"
f"{prompt_style.DEFAULT_SYSTEM_PROMPT}<|eot_id|>" f"{prompt_style.DEFAULT_SYSTEM_PROMPT}<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\nHello!<|eot_id|>" "<|start_header_id|>user<|end_header_id|>\n\nHello!<|eot_id|>"
"<|start_header_id|>assistant<|end_header_id|>\n\n" "<|start_header_id|>assistant<|end_header_id|>\n\n"
@ -185,7 +185,7 @@ def test_llama3_prompt_style_with_assistant_response():
] ]
expected_prompt = ( expected_prompt = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n" "<|start_header_id|>system<|end_header_id|>\n\n"
"You are a helpful assistant<|eot_id|>" "You are a helpful assistant<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\n" "<|start_header_id|>user<|end_header_id|>\n\n"
"What is the capital of France?<|eot_id|>" "What is the capital of France?<|eot_id|>"