Spaces:
Running
Running
File size: 2,836 Bytes
ba907cd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 |
"""
LLM service for model management and interaction.
This module provides services for LLM model creation and management.
"""
import os
from typing import Optional
from langchain_groq import ChatGroq
from langchain_community.tools.tavily_search import TavilySearchResults
from configs.config import Config, ErrorMessages
def create_llm_model(model_name: str) -> ChatGroq:
"""
Create and configure an LLM model.
Args:
model_name: Name of the model to create
Returns:
Configured ChatGroq instance
Raises:
ValueError: If API key is missing for the model
"""
if not os.getenv("GROQ_API_KEY") and "llama" in model_name:
raise ValueError(ErrorMessages.GROQ_API_KEY_MISSING)
llm = ChatGroq(
model=model_name,
api_key=os.getenv("GROQ_API_KEY"),
temperature=Config.LLM_TEMPERATURE,
max_tokens=Config.MAX_TOKENS
)
return llm
def create_tavily_search_tool() -> Optional[TavilySearchResults]:
"""
Create Tavily search tool with error handling.
Returns:
TavilySearchResults instance or None if creation fails
"""
try:
if not os.getenv("TAVILY_API_KEY"):
print(f"Warning: {ErrorMessages.TAVILY_API_KEY_MISSING}")
return None
return TavilySearchResults(
max_results=Config.TAVILY_MAX_RESULTS,
search_depth=Config.TAVILY_SEARCH_DEPTH,
include_answer=Config.TAVILY_INCLUDE_ANSWER,
include_raw_content=Config.TAVILY_INCLUDE_RAW_CONTENT
)
except Exception as e:
print(f"Warning: Could not create Tavily tool: {e}")
return None
def validate_api_keys(model_name: str, use_search: bool = False) -> None:
"""
Validate that required API keys are available.
Args:
model_name: LLM model name
use_search: Whether web search is requested
Raises:
ValueError: If required API keys are missing
"""
if not os.getenv("GROQ_API_KEY") and "llama" in model_name:
raise ValueError(ErrorMessages.GROQ_API_KEY_MISSING)
if use_search and not os.getenv("TAVILY_API_KEY"):
print(f"Warning: {ErrorMessages.TAVILY_API_KEY_MISSING}")
def get_available_models() -> list:
"""
Get list of available models.
Returns:
List of available model configurations
"""
from configs.config import ModelConfig
return ModelConfig.AVAILABLE_MODELS
def is_model_supported(model_name: str) -> bool:
"""
Check if a model is supported.
Args:
model_name: Model name to check
Returns:
True if model is supported, False otherwise
"""
from configs.config import ModelConfig
return ModelConfig.is_valid_model(model_name)
|