vella-backend / _utils /LLMs /LLM_class.py
luanpoppe
feat: adicionando query dinamicamente
a263183
raw
history blame
1.05 kB
from typing import Literal, cast
from pydantic import SecretStr
from setup.environment import default_model
from setup.easy_imports import ChatOpenAI, ChatGoogleGenerativeAI
import os
deepseek_api_key = cast(str, os.environ.get("DEEPSEEKK_API_KEY"))
google_api_key = cast(str, os.environ.get("GOOGLE_API_KEY_PEIXE"))
class LLM:
def __init__(self):
pass
# def create_GPT_model(self, model=default_model):
# return ChatOpen()
def deepseek(self, model="deepseek-chat"):
return ChatOpenAI(
api_key=SecretStr(deepseek_api_key),
base_url="https://api.deepseek.com/v1",
model=model,
)
def google_gemini(
self,
model: Literal[
"gemini-2.5-pro-exp-03-25", "gemini-2.0-flash"
] = "gemini-2.0-flash",
):
return ChatGoogleGenerativeAI(
api_key=SecretStr(google_api_key),
model=model,
temperature=0,
max_tokens=None,
timeout=None,
max_retries=2,
)