File size: 2,218 Bytes
26ed9d3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
from langchain_openai import ChatOpenAI
from langchain.prompts.prompt import PromptTemplate
from typing import Tuple, List
from langchain.schema import format_document


import gradio as gr

from langchain.chat_models import ChatOpenAI
import os
from langchain_openai import ChatOpenAI
import os

DEFAULT_DOCUMENT_PROMPT = PromptTemplate.from_template(template="{page_content}")


def make_pairs(lst):
    """from a list of even lenght, make tupple pairs"""
    return [(lst[i], lst[i + 1]) for i in range(0, len(lst), 2)]

def reset_textbox():
    return gr.update(value="")

def _combine_documents(
    docs, document_prompt=DEFAULT_DOCUMENT_PROMPT, document_separator="\n\n"
):
    doc_strings = [f"Document {i}: \n'''\n{format_document(doc, document_prompt)}\n'''" for i, doc in enumerate(docs, 1)]
    return document_separator.join(doc_strings)


def _format_chat_history(chat_history: List[Tuple]) -> str:
    buffer = ""
    for dialogue_turn in chat_history:
        human = "Human: " + dialogue_turn[0]
        ai = "Assistant: " + dialogue_turn[1]
        buffer += "\n" + "\n".join([human, ai])
    return buffer

def _format_chat_history(chat_history: List[Tuple]) -> str:
    turn = 1
    buffer = []
    for dialogue in chat_history:
        buffer.append(("Human: " if turn else "Assistant: ") + dialogue.content)
        turn ^= 1
    return "\n".join(buffer) + "\n"

def get_llm(model="gpt-4o-mini",max_tokens=1024, temperature=0.0, streaming=True,timeout=30, **kwargs):
    
    llm = ChatOpenAI(
        model=model,
        api_key=os.environ.get("OPENAI_API_KEY", None),
        max_tokens = max_tokens,
        streaming = streaming,
        temperature=temperature,
        timeout = timeout,
        **kwargs,
    )

    return llm



def make_html_source(source,i):
    meta = source.metadata
    # content = source.page_content.split(":",1)[1].strip()
    content = source.page_content.strip()


    card = f"""
    <div class="card" id="doc{i}">
        <div class="card-content">
            <h2>Document {i} - Meeting {meta["meeting_number"]} - title {meta['Title']} - Issues {meta['Issues']}</h2>
            <p>{content}</p>
        </div>

    </div>
    """
    return card