File size: 2,979 Bytes
da27d7f
e994f7a
43c8935
da27d7f
43c8935
 
 
 
 
da27d7f
e994f7a
da27d7f
9b03fdf
 
e994f7a
9b03fdf
e994f7a
da27d7f
e994f7a
 
 
 
 
da27d7f
 
6f83d4a
43c8935
 
da27d7f
e994f7a
da27d7f
6f83d4a
 
 
 
e994f7a
 
 
 
 
 
6f83d4a
 
 
 
 
 
 
 
 
 
 
 
 
e994f7a
 
 
 
 
6f83d4a
 
 
 
 
 
 
e994f7a
 
 
 
 
6f83d4a
 
 
 
 
 
 
e994f7a
 
 
 
 
6f83d4a
 
 
e994f7a
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
import json
import time

class OpenaiStreamOutputer:
    """
    Create chat completion - OpenAI API Documentation
    * https://platform.openai.com/docs/api-reference/chat/create
    """

    def __init__(self):
        current_time = int(time.time())
        self.default_data = {
            "id": "chatcmpl-hugginface",
            "object": "chat.completion.chunk",
            "created": current_time,
            "model": "hugginface",
            "system_fingerprint": "fp_44709d6fcb",
            "choices": [],
            "usage": {
                "prompt_tokens": 0,
                "completion_tokens": 0,
                "total_tokens": 0
            }
        }

    def data_to_string(self, data={}, content_type=""):
        data_str = f"{json.dumps(data)}"
        return data_str

    def output(self, content=None, content_type="Completions", tokens_count=0) -> str:
        data = self.default_data.copy()
        if content_type == "Role":
            data["choices"] = [
                {
                    "index": 0,
                    "message": {
                        "role": "assistant",
                        "content": content,
                    },
                    "logprobs": None,
                    "finish_reason": "stop"
                }
            ]
        elif content_type in [
            "Completions",
            "InternalSearchQuery",
            "InternalSearchResult",
            "SuggestedResponses",
        ]:
            if content_type in ["InternalSearchQuery", "InternalSearchResult"]:
                content += "\n"
            data["choices"] = [
                {
                    "index": 0,
                    "message": {
                        "role": "user",
                        "content": content,
                    },
                    "logprobs": None,
                    "finish_reason": None,
                }
            ]
        elif content_type == "Finished":
            data["choices"] = [
                {
                    "index": 0,
                    "message": {
                        "role": "assistant",
                        "content": content,
                    },
                    "logprobs": None,
                    "finish_reason": "stop",
                }
            ]
        else:
            data["choices"] = [
                {
                    "index": 0,
                    "message": {
                        "role": "assistant",
                        "content": content,
                    },
                    "logprobs": None,
                    "finish_reason": None,
                }
            ]

        # Update token counts
        data["usage"]["prompt_tokens"] += tokens_count
        data["usage"]["completion_tokens"] += len(content.split())
        data["usage"]["total_tokens"] = data["usage"]["prompt_tokens"] + data["usage"]["completion_tokens"]

        return self.data_to_string(data, content_type)