From 0eb24a54ec230bcc79bb39c029384d581997c888 Mon Sep 17 00:00:00 2001 From: Javier Martinez Date: Mon, 29 Jul 2024 08:46:35 +0200 Subject: [PATCH] fix: when two user messages were sent --- private_gpt/ui/ui.py | 29 +++++++++++++---------------- 1 file changed, 13 insertions(+), 16 deletions(-) diff --git a/private_gpt/ui/ui.py b/private_gpt/ui/ui.py index c4bc72a..5e422e7 100644 --- a/private_gpt/ui/ui.py +++ b/private_gpt/ui/ui.py @@ -1,6 +1,4 @@ """This file should be imported if and only if you want to run the UI locally.""" - -import itertools import logging import time from collections.abc import Iterable @@ -113,21 +111,20 @@ class PrivateGptUi: yield full_response def build_history() -> list[ChatMessage]: - history_messages: list[ChatMessage] = list( - itertools.chain( - *[ - [ - ChatMessage(content=interaction[0], role=MessageRole.USER), - ChatMessage( - # Remove from history content the Sources information - content=interaction[1].split(SOURCES_SEPARATOR)[0], - role=MessageRole.ASSISTANT, - ), - ] - for interaction in history - ] + history_messages: list[ChatMessage] = [] + + for interaction in history: + history_messages.append( + ChatMessage(content=interaction[0], role=MessageRole.USER) ) - ) + if len(interaction) > 1 and interaction[1] is not None: + history_messages.append( + ChatMessage( + # Remove from history content the Sources information + content=interaction[1].split(SOURCES_SEPARATOR)[0], + role=MessageRole.ASSISTANT, + ) + ) # max 20 messages to try to avoid context overflow return history_messages[:20]