Extract optional dependencies

This commit is contained in:
imartinez 2024-02-28 20:28:30 +01:00
parent d0a7d991a2
commit 3373e80850
7 changed files with 255 additions and 416 deletions

505
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -3,7 +3,7 @@ import json
from typing import Any
import boto3
from llama_index.embeddings.base import BaseEmbedding
from llama_index.core.base.embeddings.base import BaseEmbedding
from pydantic import Field, PrivateAttr

View file

@ -19,23 +19,37 @@ class EmbeddingComponent:
logger.info("Initializing the embedding model in mode=%s", embedding_mode)
match embedding_mode:
case "local":
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
try:
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
except ImportError as e:
raise ImportError(
"Local dependencies not found, install with `poetry install --extras local`"
) from e
self.embedding_model = HuggingFaceEmbedding(
model_name=settings.local.embedding_hf_model_name,
cache_folder=str(models_cache_path),
)
case "sagemaker":
from private_gpt.components.embedding.custom.sagemaker import (
SagemakerEmbedding,
)
try:
from private_gpt.components.embedding.custom.sagemaker import (
SagemakerEmbedding,
)
except ImportError as e:
raise ImportError(
"Sagemaker dependencies not found, install with `poetry install --extras sagemaker`"
) from e
self.embedding_model = SagemakerEmbedding(
endpoint_name=settings.sagemaker.embedding_endpoint_name,
)
case "openai":
from llama_index.embeddings.openai import OpenAIEmbedding
try:
from llama_index.embeddings.openai import OpenAIEmbedding
except ImportError as e:
raise ImportError(
"OpenAI dependencies not found, install with `poetry install --extras openai`"
) from e
openai_settings = settings.openai.api_key
self.embedding_model = OpenAIEmbedding(api_key=openai_settings)

View file

@ -32,7 +32,12 @@ class LLMComponent:
logger.info("Initializing the LLM in mode=%s", llm_mode)
match settings.llm.mode:
case "local":
from llama_index.llms.llama_cpp import LlamaCPP
try:
from llama_index.llms.llama_cpp import LlamaCPP
except ImportError as e:
raise ImportError(
"Local dependencies not found, install with `poetry install --extras local`"
) from e
prompt_style = get_prompt_style(settings.local.prompt_style)
@ -52,7 +57,12 @@ class LLMComponent:
)
case "sagemaker":
from private_gpt.components.llm.custom.sagemaker import SagemakerLLM
try:
from private_gpt.components.llm.custom.sagemaker import SagemakerLLM
except ImportError as e:
raise ImportError(
"Sagemaker dependencies not found, install with `poetry install --extras sagemaker`"
) from e
self.llm = SagemakerLLM(
endpoint_name=settings.sagemaker.llm_endpoint_name,
@ -60,7 +70,12 @@ class LLMComponent:
context_window=settings.llm.context_window,
)
case "openai":
from llama_index.llms.openai import OpenAI
try:
from llama_index.llms.openai import OpenAI
except ImportError as e:
raise ImportError(
"OpenAI dependencies not found, install with `poetry install --extras openai`"
) from e
openai_settings = settings.openai
self.llm = OpenAI(
@ -69,7 +84,12 @@ class LLMComponent:
model=openai_settings.model,
)
case "openailike":
from llama_index.llms.openai_like import OpenAILike
try:
from llama_index.llms.openai_like import OpenAILike
except ImportError as e:
raise ImportError(
"OpenAILike dependencies not found, install with `poetry install --extras openailike`"
) from e
openai_settings = settings.openai
self.llm = OpenAILike(
@ -80,12 +100,17 @@ class LLMComponent:
max_tokens=None,
api_version="",
)
case "mock":
self.llm = MockLLM()
case "ollama":
from llama_index.llms.ollama import Ollama
try:
from llama_index.llms.ollama import Ollama
except ImportError as e:
raise ImportError(
"Ollama dependencies not found, install with `poetry install --extras ollama`"
) from e
ollama_settings = settings.ollama
self.llm = Ollama(
model=ollama_settings.model, base_url=ollama_settings.api_base
)
case "mock":
self.llm = MockLLM()

View file

@ -5,7 +5,6 @@ from injector import inject, singleton
from llama_index.core.indices.vector_store import VectorIndexRetriever, VectorStoreIndex
from llama_index.core.vector_stores.types import VectorStore
from private_gpt.components.vector_store.batched_chroma import BatchedChromaVectorStore
from private_gpt.open_ai.extensions.context_filter import ContextFilter
from private_gpt.paths import local_data_path
from private_gpt.settings.settings import Settings
@ -40,7 +39,12 @@ class VectorStoreComponent:
def __init__(self, settings: Settings) -> None:
match settings.vectorstore.database:
case "pgvector":
from llama_index.vector_stores.postgres import PGVectorStore
try:
from llama_index.vector_stores.postgres import PGVectorStore
except ImportError as e:
raise ImportError(
"Postgres dependencies not found, install with `poetry install --extras postgres`"
) from e
if settings.pgvector is None:
raise ValueError(
@ -56,15 +60,15 @@ class VectorStoreComponent:
case "chroma":
try:
from private_gpt.components.vector_store.batched_chroma import \
BatchedChromaVectorStore
import chromadb # type: ignore
from chromadb.config import ( # type: ignore
Settings as ChromaSettings,
)
except ImportError as e:
raise ImportError(
"'chromadb' is not installed."
"To use PrivateGPT with Chroma, install the 'chroma' extra."
"`poetry install --extras chroma`"
"ChromaDB dependencies not found, install with `poetry install --extras chroma`"
) from e
chroma_settings = ChromaSettings(anonymized_telemetry=False)
@ -84,8 +88,13 @@ class VectorStoreComponent:
)
case "qdrant":
from llama_index.vector_stores.qdrant import QdrantVectorStore
from qdrant_client import QdrantClient
try:
from llama_index.vector_stores.qdrant import QdrantVectorStore
from qdrant_client import QdrantClient
except ImportError as e:
raise ImportError(
"Qdrant dependencies not found, install with `poetry install --extras qdrant`"
) from e
if settings.qdrant is None:
logger.info(

View file

@ -52,7 +52,12 @@ def create_app(root_injector: Injector) -> FastAPI:
if settings.ui.enabled:
logger.debug("Importing the UI module")
from private_gpt.ui.ui import PrivateGptUi
try:
from private_gpt.ui.ui import PrivateGptUi
except ImportError as e:
raise ImportError(
"UI dependencies not found, install with `poetry install --extras ui`"
) from e
ui = root_injector.get(PrivateGptUi)
ui.mount_in_app(app, settings.ui.path)

View file

@ -6,29 +6,42 @@ authors = ["Zylon <hi@zylon.ai>"]
[tool.poetry.dependencies]
python = ">=3.11,<3.12"
# PrivateGPT
fastapi = { extras = ["all"], version = "^0.110.0" }
boto3 = "^1.34.51"
python-multipart = "^0.0.9"
injector = "^0.21.0"
pyyaml = "^6.0.1"
python-multipart = "^0.0.9"
watchdog = "^4.0.0"
transformers = "^4.38.1"
# LlamaIndex core libs
llama-index-core = "^0.10.13"
llama-index-readers-file = "^0.1.6"
llama-index-embeddings-huggingface = "^0.1.4"
llama-index-embeddings-openai = "^0.1.6"
llama-index-vector-stores-qdrant = "^0.1.3"
llama-index-vector-stores-chroma = "^0.1.4"
llama-index-llms-llama-cpp = "^0.1.3"
llama-index-llms-openai = "^0.1.6"
llama-index-llms-openai-like = "^0.1.3"
llama-index-llms-ollama = "^0.1.2"
llama-index-vector-stores-postgres = "^0.1.2"
watchdog = "^4.0.0"
qdrant-client = "^1.7.3"
chromadb = {version = "^0.4.13", optional = true}
asyncpg = {version = "^0.29.0", optional = true}
pgvector = {version = "^0.2.5", optional = true}
psycopg2-binary = {version = "^2.9.9", optional = true}
sqlalchemy = {version = "^2.0.27", optional = true}
# Optional LlamaIndex integration libs
llama-index-llms-llama-cpp = {version = "^0.1.3", optional = true}
llama-index-llms-openai = {version = "^0.1.6", optional = true}
llama-index-llms-openai-like = {version ="^0.1.3", optional = true}
llama-index-llms-ollama = {version ="^0.1.2", optional = true}
llama-index-embeddings-huggingface = {version ="^0.1.4", optional = true}
llama-index-embeddings-openai = {version ="^0.1.6", optional = true}
llama-index-vector-stores-qdrant = {version ="^0.1.3", optional = true}
llama-index-vector-stores-chroma = {version ="^0.1.4", optional = true}
llama-index-vector-stores-postgres = {version ="^0.1.2", optional = true}
# Optional Sagemaker dependency
boto3 = {version ="^1.34.51", optional = true}
# Optional UI
gradio = {version ="^4.19.2", optional = true}
[tool.poetry.extras]
ui = ["gradio"]
local = ["llama-index-llms-llama-cpp", "llama-index-embeddings-huggingface"]
openai = ["llama-index-llms-openai", "llama-index-embeddings-openai"]
openai-like = ["llama-index-llms-openai-like"]
ollama = ["llama-index-llms-ollama"]
sagemaker = ["boto3"]
qdrant = ["llama-index-vector-stores-qdrant"]
chroma = ["llama-index-vector-stores-chroma"]
postgres = ["llama-index-vector-stores-postgres"]
[tool.poetry.group.dev.dependencies]
black = "^22"
@ -40,26 +53,6 @@ ruff = "^0"
pytest-asyncio = "^0.21.1"
types-pyyaml = "^6.0.12.12"
# Dependencies for gradio UI
[tool.poetry.group.ui]
optional = true
[tool.poetry.group.ui.dependencies]
gradio = "^4.19.0"
[tool.poetry.group.local]
optional = true
[tool.poetry.group.local.dependencies]
llama-cpp-python = "^0.2.23"
numpy = "1.26.0"
sentence-transformers = "^2.2.2"
# https://stackoverflow.com/questions/76327419/valueerror-libcublas-so-0-9-not-found-in-the-system-path
torch = ">=2.0.0, !=2.0.1, !=2.1.0"
transformers = "^4.34.0"
[tool.poetry.extras]
chroma = ["chromadb"]
pgvector = ["sqlalchemy", "pgvector", "psycopg2-binary", "asyncpg"]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"