Spaces:
Running
Running
luanpoppe
commited on
Commit
·
9fa4a00
1
Parent(s):
3d09051
feat: adicionando mais testes do contextual
Browse files
_utils/gerar_documento_utils/llm_calls.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import os
|
2 |
-
from typing import Literal
|
3 |
|
4 |
from pydantic import SecretStr
|
5 |
from _utils.langchain_utils.LLM_class import LLM
|
@@ -62,7 +62,7 @@ async def agemini_answer(
|
|
62 |
model: Literal[
|
63 |
"gemini-2.5-pro-exp-03-25", "gemini-2.0-flash", "gemini-2.0-flash-lite"
|
64 |
] = "gemini-2.0-flash",
|
65 |
-
):
|
66 |
gemini = llm.google_gemini(model)
|
67 |
resposta = await gemini.ainvoke([HumanMessage(content=prompt)])
|
68 |
-
return resposta.content
|
|
|
1 |
import os
|
2 |
+
from typing import Literal, cast
|
3 |
|
4 |
from pydantic import SecretStr
|
5 |
from _utils.langchain_utils.LLM_class import LLM
|
|
|
62 |
model: Literal[
|
63 |
"gemini-2.5-pro-exp-03-25", "gemini-2.0-flash", "gemini-2.0-flash-lite"
|
64 |
] = "gemini-2.0-flash",
|
65 |
+
) -> str:
|
66 |
gemini = llm.google_gemini(model)
|
67 |
resposta = await gemini.ainvoke([HumanMessage(content=prompt)])
|
68 |
+
return cast(str, resposta.content)
|
_utils/gerar_documento_utils/tests/fixtures/lista_com_20_chunks.py
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
_utils/gerar_documento_utils/tests/test_contextual_retriever_inside.py
CHANGED
@@ -19,7 +19,10 @@ from .fixtures.resposta_llm_com_20_chunks import (
|
|
19 |
resposta_llm_com_20_chunks_fixture_array,
|
20 |
lista_20_ids_fixture,
|
21 |
)
|
22 |
-
from .fixtures.lista_com_20_chunks import
|
|
|
|
|
|
|
23 |
from .fixtures.resumo_auxiliar import resumo_auxiliar_fixture
|
24 |
from .fixtures import mock_llm_call_uma_lista_de_20_chunks
|
25 |
|
@@ -399,3 +402,23 @@ class TestContextuRetriever:
|
|
399 |
|
400 |
print("resultado: ", resultado)
|
401 |
assert resultado == []
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
19 |
resposta_llm_com_20_chunks_fixture_array,
|
20 |
lista_20_ids_fixture,
|
21 |
)
|
22 |
+
from .fixtures.lista_com_20_chunks import (
|
23 |
+
lista_com_20_chunks_fixture,
|
24 |
+
lista_de_listas_com_60_chunks_fixture,
|
25 |
+
)
|
26 |
from .fixtures.resumo_auxiliar import resumo_auxiliar_fixture
|
27 |
from .fixtures import mock_llm_call_uma_lista_de_20_chunks
|
28 |
|
|
|
402 |
|
403 |
print("resultado: ", resultado)
|
404 |
assert resultado == []
|
405 |
+
|
406 |
+
@pytest.mark.asyncio
|
407 |
+
async def test_contextualize_all_chunks_success(
|
408 |
+
self,
|
409 |
+
):
|
410 |
+
resultado = await self.context_retrieval.contextualize_all_chunks(
|
411 |
+
lista_de_listas_com_60_chunks_fixture,
|
412 |
+
resumo_auxiliar_fixture,
|
413 |
+
self.axiom,
|
414 |
+
)
|
415 |
+
|
416 |
+
print("resultado: ", resultado)
|
417 |
+
assert resultado
|
418 |
+
assert len(resultado) == 60
|
419 |
+
for r in resultado:
|
420 |
+
assert isinstance(r, ContextualizedChunk)
|
421 |
+
# assert isinstance(r[1], str)
|
422 |
+
# assert isinstance(r[2], str)
|
423 |
+
# assert len(r[1]) > 0
|
424 |
+
# assert len(r[2]) > 0
|