import json
import logging
import os
from typing import Annotated, AsyncGenerator, List, Optional
from fastapi import APIRouter, Depends, HTTPException
from fastapi.responses import StreamingResponse
import common.dependencies as DI
from common import auth
from common.configuration import Configuration
from components.llm.common import (ChatRequest, LlmParams, LlmPredictParams,
Message)
from components.llm.deepinfra_api import DeepInfraApi
from components.llm.utils import append_llm_response_to_history
from components.services.dataset import DatasetService
from components.services.dialogue import DialogueService
from components.services.entity import EntityService
from components.services.llm_config import LLMConfigService
from components.services.llm_prompt import LlmPromptService
from components.services.log import LogService
from schemas.log import LogCreateSchema
router = APIRouter(prefix='/llm', tags=['LLM chat'])
logger = logging.getLogger(__name__)
conf = DI.get_config()
llm_params = LlmParams(
**{
"url": conf.llm_config.base_url,
"model": conf.llm_config.model,
"tokenizer": "unsloth/Llama-3.3-70B-Instruct",
"type": "deepinfra",
"default": True,
"predict_params": LlmPredictParams(
temperature=0.15,
top_p=0.95,
min_p=0.05,
seed=42,
repetition_penalty=1.2,
presence_penalty=1.1,
n_predict=2000,
),
"api_key": os.environ.get(conf.llm_config.api_key_env),
"context_length": 128000,
}
)
# TODO: унести в DI
llm_api = DeepInfraApi(params=llm_params)
# TODO: Вынести
def get_last_user_message(chat_request: ChatRequest) -> Optional[Message]:
return next(
(
msg
for msg in reversed(chat_request.history)
if msg.role == "user"
),
None,
)
def insert_search_results_to_message(
chat_request: ChatRequest, new_content: str
) -> bool:
for msg in reversed(chat_request.history):
if msg.role == "user" and (
msg.searchResults is None or not msg.searchResults
):
msg.content = new_content
return True
return False
def try_insert_search_results(
chat_request: ChatRequest, search_results: str
) -> bool:
for msg in reversed(chat_request.history):
if msg.role == "user":
msg.searchResults = search_results
msg.searchEntities = []
return True
return False
def try_insert_reasoning(
chat_request: ChatRequest, reasoning: str
):
for msg in reversed(chat_request.history):
if msg.role == "user":
msg.reasoning = reasoning
def collapse_history_to_first_message(chat_history: List[Message]) -> List[Message]:
"""
Сворачивает историю в первое сообщение и возвращает новый объект ChatRequest.
Формат:
текст сообщения
текст reasoning
текст search-results
текст ответа
текст reasoning
текст search-results
user:
текст последнего запроса
assistant:
"""
if not chat_history:
return []
last_user_message = chat_history[-1]
if chat_history[-1].role != "user":
logger.warning("Last message is not user message")
# Собираем историю в одну строку
collapsed_content = []
collapsed_content.append("\n")
for msg in chat_history[:-1]:
if msg.content.strip():
tabulated_content = msg.content.strip().replace("\n", "\n\t\t")
collapsed_content.append(f"\t<{msg.role.strip()}>\n\t\t{tabulated_content}\n\t{msg.role.strip()}>\n")
if msg.role == "user":
tabulated_reasoning = msg.reasoning.strip().replace("\n", "\n\t\t")
tabulated_search_results = msg.searchResults.strip().replace("\n", "\n\t\t")
# collapsed_content.append(f"\t\n\t\t{tabulated_reasoning}\n\t\n")
# collapsed_content.append(f"\t\n\t\t{tabulated_search_results}\n\t\n")
collapsed_content.append("\n")
collapsed_content.append("\n")
if last_user_message.content.strip():
tabulated_content = last_user_message.content.strip().replace("\n", "\n\t\t")
tabulated_reasoning = last_user_message.reasoning.strip().replace("\n", "\n\t\t")
tabulated_search_results = last_user_message.searchResults.strip().replace("\n", "\n\t\t")
# collapsed_content.append(f"\t\n\t\t{tabulated_reasoning}\n\t\n")
collapsed_content.append(f"\t\n\t\t{tabulated_search_results}\n\t\n")
collapsed_content.append(f"\t\n\t\t{tabulated_content}\n\n")
collapsed_content.append("\n")
collapsed_content.append("