feat: make llama3.1 as default (#2022)

* feat: change ollama default model to llama3.1

* chore: bump versions

* feat: Change default model in local mode to llama3.1

* chore: make sure last poetry version is used

* fix: mypy

* fix: do not add BOS (with last llamacpp-python version)
This commit is contained in:
Javier Martinez 2024-07-31 14:35:36 +02:00 committed by GitHub
parent e54a8fe043
commit 9027d695c1
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
15 changed files with 2227 additions and 2419 deletions

View file

@ -150,7 +150,7 @@ def test_llama3_prompt_style_format():
]
expected_prompt = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"
"<|start_header_id|>system<|end_header_id|>\n\n"
"You are a helpful assistant<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\n"
"Hello, how are you doing?<|eot_id|>"
@ -166,7 +166,7 @@ def test_llama3_prompt_style_with_default_system():
ChatMessage(content="Hello!", role=MessageRole.USER),
]
expected = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"
"<|start_header_id|>system<|end_header_id|>\n\n"
f"{prompt_style.DEFAULT_SYSTEM_PROMPT}<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\nHello!<|eot_id|>"
"<|start_header_id|>assistant<|end_header_id|>\n\n"
@ -185,7 +185,7 @@ def test_llama3_prompt_style_with_assistant_response():
]
expected_prompt = (
"<|begin_of_text|><|start_header_id|>system<|end_header_id|>\n\n"
"<|start_header_id|>system<|end_header_id|>\n\n"
"You are a helpful assistant<|eot_id|>"
"<|start_header_id|>user<|end_header_id|>\n\n"
"What is the capital of France?<|eot_id|>"