Separate local mode into llms-llama-cpp and embeddings-huggingface for clarity

This commit is contained in:
imartinez 2024-02-29 16:40:11 +01:00
parent 85276893a3
commit c3fe36e070
21 changed files with 186 additions and 106 deletions

View file

@ -18,18 +18,18 @@ class EmbeddingComponent:
embedding_mode = settings.embedding.mode
logger.info("Initializing the embedding model in mode=%s", embedding_mode)
match embedding_mode:
case "local":
case "huggingface":
try:
from llama_index.embeddings.huggingface import ( # type: ignore
HuggingFaceEmbedding,
)
except ImportError as e:
raise ImportError(
"Local dependencies not found, install with `poetry install --extras local`"
"Local dependencies not found, install with `poetry install --extras embeddings-huggingface`"
) from e
self.embedding_model = HuggingFaceEmbedding(
model_name=settings.local.embedding_hf_model_name,
model_name=settings.huggingface.embedding_hf_model_name,
cache_folder=str(models_cache_path),
)
case "sagemaker":
@ -39,7 +39,7 @@ class EmbeddingComponent:
)
except ImportError as e:
raise ImportError(
"Sagemaker dependencies not found, install with `poetry install --extras sagemaker`"
"Sagemaker dependencies not found, install with `poetry install --extras embeddings-sagemaker`"
) from e
self.embedding_model = SagemakerEmbedding(
@ -52,7 +52,7 @@ class EmbeddingComponent:
)
except ImportError as e:
raise ImportError(
"OpenAI dependencies not found, install with `poetry install --extras openai`"
"OpenAI dependencies not found, install with `poetry install --extras embeddings-openai`"
) from e
openai_settings = settings.openai.api_key

View file

@ -7,26 +7,20 @@ import logging
from typing import TYPE_CHECKING, Any
import boto3 # type: ignore
from llama_index.bridge.pydantic import Field
from llama_index.llms import (
from llama_index.core.base.llms.generic_utils import (
completion_response_to_chat_response,
stream_completion_response_to_chat_response,
)
from llama_index.core.bridge.pydantic import Field
from llama_index.core.llms import (
CompletionResponse,
CustomLLM,
LLMMetadata,
)
from llama_index.llms.base import (
from llama_index.core.llms.callbacks import (
llm_chat_callback,
llm_completion_callback,
)
from llama_index.llms.generic_utils import (
completion_response_to_chat_response,
stream_completion_response_to_chat_response,
)
from llama_index.llms.llama_utils import (
completion_to_prompt as generic_completion_to_prompt,
)
from llama_index.llms.llama_utils import (
messages_to_prompt as generic_messages_to_prompt,
)
if TYPE_CHECKING:
from collections.abc import Sequence
@ -161,8 +155,8 @@ class SagemakerLLM(CustomLLM):
model_kwargs = model_kwargs or {}
model_kwargs.update({"n_ctx": context_window, "verbose": verbose})
messages_to_prompt = messages_to_prompt or generic_messages_to_prompt
completion_to_prompt = completion_to_prompt or generic_completion_to_prompt
messages_to_prompt = messages_to_prompt or {}
completion_to_prompt = completion_to_prompt or {}
generate_kwargs = generate_kwargs or {}
generate_kwargs.update(

View file

@ -30,18 +30,18 @@ class LLMComponent:
logger.info("Initializing the LLM in mode=%s", llm_mode)
match settings.llm.mode:
case "local":
case "llamacpp":
try:
from llama_index.llms.llama_cpp import LlamaCPP # type: ignore
except ImportError as e:
raise ImportError(
"Local dependencies not found, install with `poetry install --extras local`"
"Local dependencies not found, install with `poetry install --extras llms-llama-cpp`"
) from e
prompt_style = get_prompt_style(settings.local.prompt_style)
prompt_style = get_prompt_style(settings.llamacpp.prompt_style)
self.llm = LlamaCPP(
model_path=str(models_path / settings.local.llm_hf_model_file),
model_path=str(models_path / settings.llamacpp.llm_hf_model_file),
temperature=0.1,
max_new_tokens=settings.llm.max_new_tokens,
context_window=settings.llm.context_window,
@ -60,7 +60,7 @@ class LLMComponent:
from private_gpt.components.llm.custom.sagemaker import SagemakerLLM
except ImportError as e:
raise ImportError(
"Sagemaker dependencies not found, install with `poetry install --extras sagemaker`"
"Sagemaker dependencies not found, install with `poetry install --extras llms-sagemaker`"
) from e
self.llm = SagemakerLLM(
@ -73,7 +73,7 @@ class LLMComponent:
from llama_index.llms.openai import OpenAI # type: ignore
except ImportError as e:
raise ImportError(
"OpenAI dependencies not found, install with `poetry install --extras openai`"
"OpenAI dependencies not found, install with `poetry install --extras llms-openai`"
) from e
openai_settings = settings.openai
@ -87,7 +87,7 @@ class LLMComponent:
from llama_index.llms.openai_like import OpenAILike # type: ignore
except ImportError as e:
raise ImportError(
"OpenAILike dependencies not found, install with `poetry install --extras openailike`"
"OpenAILike dependencies not found, install with `poetry install --extras llms-openailike`"
) from e
openai_settings = settings.openai
@ -104,7 +104,7 @@ class LLMComponent:
from llama_index.llms.ollama import Ollama # type: ignore
except ImportError as e:
raise ImportError(
"Ollama dependencies not found, install with `poetry install --extras ollama`"
"Ollama dependencies not found, install with `poetry install --extras llms-ollama`"
) from e
ollama_settings = settings.ollama

View file

@ -45,7 +45,7 @@ class VectorStoreComponent:
)
except ImportError as e:
raise ImportError(
"Postgres dependencies not found, install with `poetry install --extras postgres`"
"Postgres dependencies not found, install with `poetry install --extras vector-stores-postgres`"
) from e
if settings.pgvector is None:
@ -72,7 +72,7 @@ class VectorStoreComponent:
)
except ImportError as e:
raise ImportError(
"ChromaDB dependencies not found, install with `poetry install --extras chroma`"
"ChromaDB dependencies not found, install with `poetry install --extras vector-stores-chroma`"
) from e
chroma_settings = ChromaSettings(anonymized_telemetry=False)
@ -99,7 +99,7 @@ class VectorStoreComponent:
from qdrant_client import QdrantClient
except ImportError as e:
raise ImportError(
"Qdrant dependencies not found, install with `poetry install --extras qdrant`"
"Qdrant dependencies not found, install with `poetry install --extras vector-stores-qdrant`"
) from e
if settings.qdrant is None: