Update poetry lock (#1209)

* Update the version of llama_index used to fix transient openai errors

* Update poetry.lock file

* Make `local` mode the default mode by default
This commit is contained in:
lopagela 2023-11-11 22:44:19 +01:00 committed by GitHub
parent a22969ad1f
commit a579c9bdc5
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 313 additions and 268 deletions

View file

@ -44,12 +44,12 @@ class BatchedChromaVectorStore(ChromaVectorStore):
)
self.chroma_client = chroma_client
def add(self, nodes: list[BaseNode]) -> list[str]:
def add(self, nodes: list[BaseNode], **add_kwargs: Any) -> list[str]:
"""Add nodes to index, batching the insertion to avoid issues.
Args:
nodes: List[BaseNode]: list of nodes with embeddings
add_kwargs: _
"""
if not self.chroma_client:
raise ValueError("Client not initialized")

View file

@ -46,8 +46,11 @@ class VectorStoreComponent:
"make_this_parameterizable_per_api_call"
) # TODO
self.vector_store = BatchedChromaVectorStore(
chroma_client=chroma_client, chroma_collection=chroma_collection
self.vector_store = typing.cast(
VectorStore,
BatchedChromaVectorStore(
chroma_client=chroma_client, chroma_collection=chroma_collection
),
)
@staticmethod

View file

@ -1,6 +1,7 @@
import time
import uuid
from collections.abc import Iterator
from typing import Literal
from llama_index.llms import ChatResponse, CompletionResponse
from pydantic import BaseModel, Field
@ -21,7 +22,7 @@ class OpenAIMessage(BaseModel):
(providing a default response, not AI generated).
"""
role: str = Field(default="user", enum=["assistant", "system", "user"])
role: Literal["assistant", "system", "user"] = Field(default="user")
content: str | None
@ -46,9 +47,9 @@ class OpenAICompletion(BaseModel):
"""
id: str
object: str = Field("completion", enum=["completion", "completion.chunk"])
object: Literal["completion", "completion.chunk"] = Field(default="completion")
created: int = Field(..., examples=[1623340000])
model: str = Field(enum=["private-gpt"])
model: Literal["private-gpt"]
choices: list[OpenAIChoice]
@classmethod

View file

@ -1,3 +1,5 @@
from typing import Literal
from fastapi import APIRouter
from pydantic import BaseModel, Field
@ -16,8 +18,8 @@ class ChunksBody(BaseModel):
class ChunksResponse(BaseModel):
object: str = Field(enum=["list"])
model: str = Field(enum=["private-gpt"])
object: Literal["list"]
model: Literal["private-gpt"]
data: list[Chunk]

View file

@ -1,4 +1,4 @@
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Literal
from injector import inject, singleton
from llama_index import ServiceContext, StorageContext, VectorStoreIndex
@ -19,7 +19,7 @@ if TYPE_CHECKING:
class Chunk(BaseModel):
object: str = Field(enum=["context.chunk"])
object: Literal["context.chunk"]
score: float = Field(examples=[0.023])
document: IngestedDoc
text: str = Field(examples=["Outbound sales increased 20%, driven by new leads."])

View file

@ -1,5 +1,7 @@
from typing import Literal
from fastapi import APIRouter
from pydantic import BaseModel, Field
from pydantic import BaseModel
from private_gpt.di import root_injector
from private_gpt.server.embeddings.embeddings_service import (
@ -15,8 +17,8 @@ class EmbeddingsBody(BaseModel):
class EmbeddingsResponse(BaseModel):
object: str = Field(enum=["list"])
model: str = Field(enum=["private-gpt"])
object: Literal["list"]
model: Literal["private-gpt"]
data: list[Embedding]

View file

@ -1,3 +1,5 @@
from typing import Literal
from injector import inject, singleton
from pydantic import BaseModel, Field
@ -6,7 +8,7 @@ from private_gpt.components.embedding.embedding_component import EmbeddingCompon
class Embedding(BaseModel):
index: int
object: str = Field(enum=["embedding"])
object: Literal["embedding"]
embedding: list[float] = Field(examples=[[0.0023064255, -0.009327292]])

View file

@ -1,3 +1,5 @@
from typing import Literal
from fastapi import APIRouter
from pydantic import BaseModel, Field
@ -5,7 +7,7 @@ health_router = APIRouter()
class HealthResponse(BaseModel):
status: str = Field(enum=["ok"])
status: Literal["ok"] = Field(default="ok")
@health_router.get("/health", tags=["Health"])

View file

@ -1,5 +1,7 @@
from typing import Literal
from fastapi import APIRouter, HTTPException, UploadFile
from pydantic import BaseModel, Field
from pydantic import BaseModel
from private_gpt.di import root_injector
from private_gpt.server.ingest.ingest_service import IngestedDoc, IngestService
@ -8,8 +10,8 @@ ingest_router = APIRouter(prefix="/v1")
class IngestResponse(BaseModel):
object: str = Field(enum=["list"])
model: str = Field(enum=["private-gpt"])
object: Literal["list"]
model: Literal["private-gpt"]
data: list[IngestedDoc]

View file

@ -1,7 +1,7 @@
import logging
import tempfile
from pathlib import Path
from typing import TYPE_CHECKING, Any, AnyStr
from typing import TYPE_CHECKING, Any, AnyStr, Literal
from injector import inject, singleton
from llama_index import (
@ -40,7 +40,7 @@ logger = logging.getLogger(__name__)
class IngestedDoc(BaseModel):
object: str = Field(enum=["ingest.document"])
object: Literal["ingest.document"]
doc_id: str = Field(examples=["c202d5e6-7b69-4869-81cc-dd574ee8ee11"])
doc_metadata: dict[str, Any] | None = Field(
examples=[

View file

@ -1,3 +1,5 @@
from typing import Literal
from pydantic import BaseModel, Field
from private_gpt.settings.settings_loader import load_active_profiles
@ -57,7 +59,7 @@ class DataSettings(BaseModel):
class LLMSettings(BaseModel):
mode: str = Field(enum=["local", "open_ai", "sagemaker", "mock"])
mode: Literal["local", "open_ai", "sagemaker", "mock"]
class LocalSettings(BaseModel):