Multi language support - fern debug (#1307)

---------

Co-authored-by: Louis <lpglm@orange.fr>
Co-authored-by: LeMoussel <cnhx27@gmail.com>
This commit is contained in:
Iván Martínez 2023-11-25 14:34:23 +01:00 committed by GitHub
parent e8d88f8952
commit 944c43bfa8
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
10 changed files with 402 additions and 8 deletions

View file

@ -91,7 +91,28 @@ class VectorstoreSettings(BaseModel):
class LocalSettings(BaseModel):
llm_hf_repo_id: str
llm_hf_model_file: str
embedding_hf_model_name: str
embedding_hf_model_name: str = Field(
description="Name of the HuggingFace model to use for embeddings"
)
prompt_style: Literal["default", "llama2", "tag"] = Field(
"llama2",
description=(
"The prompt style to use for the chat engine. "
"If `default` - use the default prompt style from the llama_index. It should look like `role: message`.\n"
"If `llama2` - use the llama2 prompt style from the llama_index. Based on `<s>`, `[INST]` and `<<SYS>>`.\n"
"If `tag` - use the `tag` prompt style. It should look like `<|role|>: message`. \n"
"`llama2` is the historic behaviour. `default` might work better with your custom models."
),
)
default_system_prompt: str | None = Field(
None,
description=(
"The default system prompt to use for the chat engine. "
"If none is given - use the default system prompt (from the llama_index). "
"Please note that the default prompt might not be the same for all prompt styles. "
"Also note that this is only used if the first message is not a system message. "
),
)
class SagemakerSettings(BaseModel):