luanpoppe
feat: adicionando opção de gemini 2.5 flash
99fb68e
raw
history blame
2.44 kB
from typing import Literal, cast
from pydantic import SecretStr
from setup.environment import default_model
from setup.easy_imports import ChatOpenAI, ChatGoogleGenerativeAI
import os
from langchain_core.messages import HumanMessage
deepseek_api_key = cast(str, os.environ.get("DEEPSEEKK_API_KEY"))
google_api_key = cast(str, os.environ.get("GOOGLE_API_KEY_PEIXE"))
open_ai_token = cast(str, os.environ.get("OPENAI_API_KEY"))
Google_llms = Literal[
"gemini-2.5-pro-preview-05-06",
"gemini-2.0-flash",
"gemini-2.0-flash-lite",
"gemini-2.5-flash-preview-04-17",
]
class LLM:
def __init__(self):
pass
def open_ai(self, model="gpt-4o-mini"):
return ChatOpenAI(api_key=SecretStr(open_ai_token), model=model)
def deepseek(self, model="deepseek-chat"):
return ChatOpenAI(
api_key=SecretStr(deepseek_api_key),
base_url="https://api.deepseek.com/v1",
model=model,
)
def google_gemini(
self,
model: Google_llms = "gemini-2.0-flash",
):
return ChatGoogleGenerativeAI(
api_key=SecretStr(google_api_key),
model=model,
temperature=0,
max_tokens=None,
timeout=None,
max_retries=2,
)
async def google_gemini_ainvoke(
self,
prompt: str,
model: Google_llms = "gemini-2.0-flash",
max_retries: int = 3,
):
for attempt in range(max_retries):
try:
response = await self.google_gemini(model).ainvoke(
[HumanMessage(content=prompt)]
)
if isinstance(response.content, list):
response.content = "\n".join(response.content) # type: ignore
return response
except Exception as e:
model = "gemini-2.0-flash"
print(f"Attempt {attempt + 1} failed with error: {e}")
# Final attempt fallback logic (optional)
try:
print("Final attempt with fallback model...")
response = await self.open_ai("chat-gpt-4o-mini").ainvoke(
[HumanMessage(content=prompt)]
)
return response
except Exception as e:
raise Exception(
"Failed to generate the final document after 5 retries and the fallback attempt with chat-gpt-4o-mini."
) from e