mirror of
https://github.com/zylon-ai/private-gpt.git
synced 2025-12-22 20:12:55 +01:00
WIP more prompt format, and more maintainable
This commit is contained in:
parent
3d301d0c6f
commit
76faffb269
11 changed files with 476 additions and 217 deletions
|
|
@ -98,13 +98,33 @@ class LocalSettings(BaseModel):
|
|||
embedding_hf_model_name: str = Field(
|
||||
description="Name of the HuggingFace model to use for embeddings"
|
||||
)
|
||||
prompt_style: Literal["default", "llama2", "tag"] = Field(
|
||||
prompt_style: Literal[
|
||||
"llama_cpp.llama-2",
|
||||
"llama_cpp.alpaca",
|
||||
"llama_cpp.vicuna",
|
||||
"llama_cpp.oasst_llama",
|
||||
"llama_cpp.baichuan-2",
|
||||
"llama_cpp.baichuan",
|
||||
"llama_cpp.openbuddy",
|
||||
"llama_cpp.redpajama-incite",
|
||||
"llama_cpp.snoozy",
|
||||
"llama_cpp.phind",
|
||||
"llama_cpp.intel",
|
||||
"llama_cpp.open-orca",
|
||||
"llama_cpp.mistrallite",
|
||||
"llama_cpp.zephyr",
|
||||
"llama_cpp.chatml",
|
||||
"llama_cpp.openchat",
|
||||
"llama2",
|
||||
"vigogne",
|
||||
"template",
|
||||
] | None = Field(
|
||||
None,
|
||||
description=(
|
||||
"The prompt style to use for the chat engine. "
|
||||
"If `default` - use the default prompt style from the llama_index. It should look like `role: message`.\n"
|
||||
"If None is given - use the default prompt style from the llama_index. It should look like `role: message`.\n"
|
||||
"If `llama2` - use the llama2 prompt style from the llama_index. Based on `<s>`, `[INST]` and `<<SYS>>`.\n"
|
||||
"If `tag` - use the `tag` prompt style. It should look like `<|role|>: message`. \n"
|
||||
"If `llama_cpp.<name>` - use the `<name>` prompt style, implemented by `llama-cpp-python`. \n"
|
||||
"`llama2` is the historic behaviour. `default` might work better with your custom models."
|
||||
),
|
||||
)
|
||||
|
|
@ -118,6 +138,13 @@ class LocalSettings(BaseModel):
|
|||
),
|
||||
)
|
||||
|
||||
template_name: str | None = Field(
|
||||
None,
|
||||
description=(
|
||||
"The name of the template to use for the chat engine, if the `prompt_style` is `template`."
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class EmbeddingSettings(BaseModel):
|
||||
mode: Literal["local", "openai", "sagemaker", "mock"]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue