Add timeout parameter for better support of openailike LLM tools on local computer (like LM Studio).

Reuse other parameters to improve settings of the OpenAILike object.
This commit is contained in:
jcbonnet-fwd 2024-04-18 15:15:13 +02:00
parent 08c4ab175e
commit 507ae62414
3 changed files with 13 additions and 1 deletions

View file

@ -105,8 +105,12 @@ class LLMComponent:
api_key=openai_settings.api_key,
model=openai_settings.model,
is_chat_model=True,
max_tokens=None,
max_tokens=settings.llm.max_new_tokens,
api_version="",
temperature=settings.llm.temperature,
tokenizer=settings.llm.tokenizer,
timeout=openai_settings.request_timeout,
reuse_client=False,
)
case "ollama":
try: