mirror of
https://github.com/zylon-ai/private-gpt.git
synced 2025-12-22 23:22:57 +01:00
added max_length for embedding models. Some models crash if not provided, see https://huggingface.co/T-Systems-onsite/cross-en-de-roberta-sentence-transformer/discussions/7#65ce1ca0102df4e9e00ff823
This commit is contained in:
parent
94712824d6
commit
757a8c79fd
3 changed files with 6 additions and 0 deletions
|
|
@ -31,6 +31,7 @@ class EmbeddingComponent:
|
|||
self.embedding_model = HuggingFaceEmbedding(
|
||||
model_name=settings.huggingface.embedding_hf_model_name,
|
||||
cache_folder=str(models_cache_path),
|
||||
max_length=settings.huggingface.embedding_hf_max_length,
|
||||
)
|
||||
case "sagemaker":
|
||||
try:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue