mirror of
https://github.com/zylon-ai/private-gpt.git
synced 2025-12-22 10:45:42 +01:00
feat: Upgrade to LlamaIndex to 0.10 (#1663)
* Extract optional dependencies * Separate local mode into llms-llama-cpp and embeddings-huggingface for clarity * Support Ollama embeddings * Upgrade to llamaindex 0.10.14. Remove legacy use of ServiceContext in ContextChatEngine * Fix vector retriever filters
This commit is contained in:
parent
12f3a39e8a
commit
45f05711eb
43 changed files with 1474 additions and 1396 deletions
|
|
@ -3,7 +3,7 @@ import uuid
|
|||
from collections.abc import Iterator
|
||||
from typing import Literal
|
||||
|
||||
from llama_index.llms import ChatResponse, CompletionResponse
|
||||
from llama_index.core.llms import ChatResponse, CompletionResponse
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from private_gpt.server.chunks.chunks_service import Chunk
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue