mirror of
https://github.com/zylon-ai/private-gpt.git
synced 2025-12-22 23:22:57 +01:00
Updated chat history and items id with uuid
This commit is contained in:
parent
4bc9dd7870
commit
ee0e1cd839
13 changed files with 85 additions and 29 deletions
|
|
@ -1,8 +1,8 @@
|
|||
"""Create chat history and item
|
||||
"""create chat history and items
|
||||
|
||||
Revision ID: 7bd9152cf172
|
||||
Revision ID: 9957402017dc
|
||||
Revises:
|
||||
Create Date: 2024-04-03 16:23:24.813222
|
||||
Create Date: 2024-04-04 11:31:53.261330
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
|
@ -12,7 +12,7 @@ import sqlalchemy as sa
|
|||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '7bd9152cf172'
|
||||
revision: str = '9957402017dc'
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
|
@ -21,7 +21,7 @@ depends_on: Union[str, Sequence[str], None] = None
|
|||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('chat_history',
|
||||
sa.Column('conversation_id', sa.Integer(), nullable=False),
|
||||
sa.Column('conversation_id', sa.UUID(), nullable=False),
|
||||
sa.Column('title', sa.String(length=255), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
|
|
@ -36,7 +36,7 @@ def upgrade() -> None:
|
|||
sa.Column('created_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('like', sa.Boolean(), nullable=True),
|
||||
sa.Column('conversation_id', sa.Integer(), nullable=False),
|
||||
sa.Column('conversation_id', sa.UUID(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['conversation_id'], ['chat_history.conversation_id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
32
alembic/versions/eb18396f592a_create_title_event.py
Normal file
32
alembic/versions/eb18396f592a_create_title_event.py
Normal file
|
|
@ -0,0 +1,32 @@
|
|||
"""create title event
|
||||
|
||||
Revision ID: eb18396f592a
|
||||
Revises: 9957402017dc
|
||||
Create Date: 2024-04-04 11:47:55.600187
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'eb18396f592a'
|
||||
down_revision: Union[str, None] = '9957402017dc'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('chat_history', sa.Column('_title_generated', sa.Boolean(), nullable=True))
|
||||
# op.create_unique_constraint('unique_user_role', 'user_roles', ['user_id', 'role_id', 'company_id'])
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
# op.drop_constraint('unique_user_role', 'user_roles', type_='unique')
|
||||
op.drop_column('chat_history', '_title_generated')
|
||||
# ### end Alembic commands ###
|
||||
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
14
poetry.lock
generated
14
poetry.lock
generated
|
|
@ -1,4 +1,4 @@
|
|||
# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "aiofiles"
|
||||
|
|
@ -6511,6 +6511,16 @@ h2 = ["h2 (>=4,<5)"]
|
|||
socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"]
|
||||
zstd = ["zstandard (>=0.18.0)"]
|
||||
|
||||
[[package]]
|
||||
name = "uuid"
|
||||
version = "1.30"
|
||||
description = "UUID object and generation functions (Python 2.3 or higher)"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "uuid-1.30.tar.gz", hash = "sha256:1f87cc004ac5120466f36c5beae48b4c48cc411968eed0eaecd3da82aa96193f"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "uvicorn"
|
||||
version = "0.28.0"
|
||||
|
|
@ -7147,4 +7157,4 @@ vector-stores-qdrant = ["llama-index-vector-stores-qdrant"]
|
|||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = ">=3.11,<3.12"
|
||||
content-hash = "b0715db49b2b01f8b6c1b3d122d93de58ef052be999114390fab9fa19cc9e794"
|
||||
content-hash = "57d64e26ba5f7100c0f6d61135808b190a9f6b8c95a6407b984b7cbea244d5f5"
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ import logging
|
|||
from injector import inject, singleton
|
||||
from llama_index.core.embeddings import BaseEmbedding, MockEmbedding
|
||||
|
||||
from private_gpt.paths import models_cache_path
|
||||
from private_gpt.paths import models_cache_path, models_path
|
||||
from private_gpt.settings.settings import Settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
|
@ -27,7 +27,6 @@ class EmbeddingComponent:
|
|||
raise ImportError(
|
||||
"Local dependencies not found, install with `poetry install --extras embeddings-huggingface`"
|
||||
) from e
|
||||
|
||||
self.embedding_model = HuggingFaceEmbedding(
|
||||
model_name=settings.huggingface.embedding_hf_model_name,
|
||||
cache_folder=str(models_cache_path),
|
||||
|
|
|
|||
|
|
@ -18,11 +18,12 @@ from private_gpt.server.chat.chat_router import ChatBody, chat_completion
|
|||
from private_gpt.server.utils.auth import authenticated
|
||||
from private_gpt.users.api import deps
|
||||
from private_gpt.users import crud, models, schemas
|
||||
import uuid
|
||||
completions_router = APIRouter(prefix="/v1", dependencies=[Depends(authenticated)])
|
||||
|
||||
|
||||
class CompletionsBody(BaseModel):
|
||||
conversation_id: int
|
||||
conversation_id: uuid.UUID
|
||||
prompt: str
|
||||
system_prompt: str | None = None
|
||||
use_context: bool = False
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
import traceback
|
||||
|
||||
import uuid
|
||||
from sqlalchemy.orm import Session
|
||||
from fastapi.responses import JSONResponse
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Security
|
||||
|
|
@ -63,9 +63,9 @@ def create_chat_history(
|
|||
)
|
||||
|
||||
|
||||
@router.get("/{chat_history_id}", response_model=schemas.ChatHistory)
|
||||
@router.get("/{conversation_id}", response_model=schemas.ChatHistory)
|
||||
def read_chat_history(
|
||||
chat_history_id: int,
|
||||
conversation_id: uuid.UUID,
|
||||
db: Session = Depends(deps.get_db),
|
||||
current_user: models.User = Security(
|
||||
deps.get_current_user,
|
||||
|
|
@ -75,7 +75,7 @@ def read_chat_history(
|
|||
Read a chat history by ID
|
||||
"""
|
||||
try:
|
||||
chat_history = crud.chat.get_by_id(db, id=chat_history_id)
|
||||
chat_history = crud.chat.get_by_id(db, id=conversation_id)
|
||||
if chat_history is None or chat_history.user_id != current_user.id:
|
||||
raise HTTPException(
|
||||
status_code=404, detail="Chat history not found")
|
||||
|
|
@ -121,3 +121,5 @@ def delete_chat_history(
|
|||
status_code=500,
|
||||
detail="Internal Server Error",
|
||||
)
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -2,14 +2,14 @@ from typing import Optional, List, Union, Dict, Any
|
|||
from fastapi import HTTPException, status
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
import uuid
|
||||
from private_gpt.users.crud.base import CRUDBase
|
||||
from private_gpt.users.models.chat import ChatHistory, ChatItem
|
||||
from private_gpt.users.schemas.chat import ChatHistoryCreate, ChatHistoryCreate, ChatItemCreate, ChatItemUpdate
|
||||
|
||||
|
||||
class CRUDChat(CRUDBase[ChatHistory, ChatHistoryCreate, ChatHistoryCreate]):
|
||||
def get_by_id(self, db: Session, *, id: int) -> Optional[ChatHistory]:
|
||||
def get_by_id(self, db: Session, *, id: uuid.UUID) -> Optional[ChatHistory]:
|
||||
return db.query(self.model).filter(ChatHistory.conversation_id == id).first()
|
||||
|
||||
|
||||
|
|
|
|||
|
|
@ -1,15 +1,17 @@
|
|||
import uuid
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Text, Boolean
|
||||
from private_gpt.users.db.base_class import Base
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey, Text, Boolean, event
|
||||
|
||||
from private_gpt.users.db.base_class import Base
|
||||
|
||||
class ChatHistory(Base):
|
||||
"""Models a chat history table"""
|
||||
|
||||
__tablename__ = "chat_history"
|
||||
|
||||
conversation_id = Column(Integer, nullable=False, primary_key=True)
|
||||
conversation_id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
|
||||
title = Column(String(255), nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.now)
|
||||
updated_at = Column(DateTime, default=datetime.now,
|
||||
|
|
@ -18,6 +20,7 @@ class ChatHistory(Base):
|
|||
user = relationship("User", back_populates="chat_histories")
|
||||
chat_items = relationship(
|
||||
"ChatItem", back_populates="chat_history", cascade="all, delete-orphan")
|
||||
_title_generated = Column(Boolean, default=False)
|
||||
|
||||
def __init__(self, user_id, chat_items=None, **kwargs):
|
||||
super().__init__(**kwargs)
|
||||
|
|
@ -32,7 +35,7 @@ class ChatHistory(Base):
|
|||
first_user_chat_item = user_chat_items[0]
|
||||
self.title = first_user_chat_item.content[:30]
|
||||
else:
|
||||
self.title = "Untitled Chat"
|
||||
self.title = str(self.conversation_id)
|
||||
|
||||
def __repr__(self):
|
||||
"""Returns string representation of model instance"""
|
||||
|
|
@ -51,10 +54,18 @@ class ChatItem(Base):
|
|||
updated_at = Column(DateTime, default=datetime.now,
|
||||
onupdate=datetime.now)
|
||||
like = Column(Boolean, default=True)
|
||||
conversation_id = Column(Integer, ForeignKey(
|
||||
conversation_id = Column(UUID(as_uuid=True), ForeignKey(
|
||||
"chat_history.conversation_id"), nullable=False)
|
||||
chat_history = relationship("ChatHistory", back_populates="chat_items")
|
||||
|
||||
def __repr__(self):
|
||||
"""Returns string representation of model instance"""
|
||||
return f"<ChatItem {self.id!r}>"
|
||||
|
||||
|
||||
@event.listens_for(ChatHistory, "after_insert")
|
||||
def receive_after_insert(mapper, connection, target):
|
||||
"""Update title after insertion to reflect the conversation_id"""
|
||||
if not target._title_generated:
|
||||
target.generate_title()
|
||||
target._title_generated = True
|
||||
|
|
@ -1,10 +1,10 @@
|
|||
from datetime import datetime
|
||||
from typing import List, Optional
|
||||
from pydantic import BaseModel
|
||||
|
||||
import uuid
|
||||
|
||||
class ChatItemBase(BaseModel):
|
||||
conversation_id: int
|
||||
conversation_id: uuid.UUID
|
||||
sender: str
|
||||
content: Optional[str]
|
||||
|
||||
|
|
@ -38,10 +38,10 @@ class ChatHistoryUpdate(ChatHistoryBase):
|
|||
chat_items: Optional[List[ChatItemCreate]]
|
||||
|
||||
class Chat(BaseModel):
|
||||
conversation_id: int
|
||||
conversation_id: uuid.UUID
|
||||
|
||||
class ChatHistory(ChatHistoryBase):
|
||||
conversation_id: int
|
||||
conversation_id: uuid.UUID
|
||||
created_at: datetime
|
||||
updated_at: datetime
|
||||
chat_items: List[ChatItem]
|
||||
|
|
@ -50,7 +50,7 @@ class ChatHistory(ChatHistoryBase):
|
|||
orm_mode = True
|
||||
|
||||
class ChatDelete(BaseModel):
|
||||
conversation_id: int
|
||||
conversation_id: uuid.UUID
|
||||
|
||||
class CreateChatHistory(BaseModel):
|
||||
user_id: int
|
||||
|
|
|
|||
|
|
@ -53,6 +53,7 @@ gradio = {version ="^4.19.2", optional = true}
|
|||
aiofiles = "^23.2.1"
|
||||
timm = "^0.9.16"
|
||||
fastapi-filter = {extras = ["sqlalchemy"], version = "^1.1.0"}
|
||||
uuid = "^1.30"
|
||||
|
||||
[tool.poetry.extras]
|
||||
ui = ["gradio"]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue