Spaces:
Sleeping
Sleeping
File size: 3,814 Bytes
bf6d237 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 |
import pytest
from llama_index.llms import ChatMessage, MessageRole
from private_gpt.components.llm.prompt_helper import (
DefaultPromptStyle,
Llama2PromptStyle,
TagPromptStyle,
get_prompt_style,
)
@pytest.mark.parametrize(
("prompt_style", "expected_prompt_style"),
[
("default", DefaultPromptStyle),
("llama2", Llama2PromptStyle),
("tag", TagPromptStyle),
],
)
def test_get_prompt_style_success(prompt_style, expected_prompt_style):
assert get_prompt_style(prompt_style) == expected_prompt_style
def test_get_prompt_style_failure():
prompt_style = "unknown"
with pytest.raises(ValueError) as exc_info:
get_prompt_style(prompt_style)
assert str(exc_info.value) == f"Unknown prompt_style='{prompt_style}'"
def test_tag_prompt_style_format():
prompt_style = TagPromptStyle()
messages = [
ChatMessage(content="You are an AI assistant.", role=MessageRole.SYSTEM),
ChatMessage(content="Hello, how are you doing?", role=MessageRole.USER),
]
expected_prompt = (
"<|system|>: You are an AI assistant.\n"
"<|user|>: Hello, how are you doing?\n"
"<|assistant|>: "
)
assert prompt_style.messages_to_prompt(messages) == expected_prompt
def test_tag_prompt_style_format_with_system_prompt():
system_prompt = "This is a system prompt from configuration."
prompt_style = TagPromptStyle(default_system_prompt=system_prompt)
messages = [
ChatMessage(content="Hello, how are you doing?", role=MessageRole.USER),
]
expected_prompt = (
f"<|system|>: {system_prompt}\n"
"<|user|>: Hello, how are you doing?\n"
"<|assistant|>: "
)
assert prompt_style.messages_to_prompt(messages) == expected_prompt
messages = [
ChatMessage(
content="FOO BAR Custom sys prompt from messages.", role=MessageRole.SYSTEM
),
ChatMessage(content="Hello, how are you doing?", role=MessageRole.USER),
]
expected_prompt = (
"<|system|>: FOO BAR Custom sys prompt from messages.\n"
"<|user|>: Hello, how are you doing?\n"
"<|assistant|>: "
)
assert prompt_style.messages_to_prompt(messages) == expected_prompt
def test_llama2_prompt_style_format():
prompt_style = Llama2PromptStyle()
messages = [
ChatMessage(content="You are an AI assistant.", role=MessageRole.SYSTEM),
ChatMessage(content="Hello, how are you doing?", role=MessageRole.USER),
]
expected_prompt = (
"<s> [INST] <<SYS>>\n"
" You are an AI assistant. \n"
"<</SYS>>\n"
"\n"
" Hello, how are you doing? [/INST]"
)
assert prompt_style.messages_to_prompt(messages) == expected_prompt
def test_llama2_prompt_style_with_system_prompt():
system_prompt = "This is a system prompt from configuration."
prompt_style = Llama2PromptStyle(default_system_prompt=system_prompt)
messages = [
ChatMessage(content="Hello, how are you doing?", role=MessageRole.USER),
]
expected_prompt = (
"<s> [INST] <<SYS>>\n"
f" {system_prompt} \n"
"<</SYS>>\n"
"\n"
" Hello, how are you doing? [/INST]"
)
assert prompt_style.messages_to_prompt(messages) == expected_prompt
messages = [
ChatMessage(
content="FOO BAR Custom sys prompt from messages.", role=MessageRole.SYSTEM
),
ChatMessage(content="Hello, how are you doing?", role=MessageRole.USER),
]
expected_prompt = (
"<s> [INST] <<SYS>>\n"
" FOO BAR Custom sys prompt from messages. \n"
"<</SYS>>\n"
"\n"
" Hello, how are you doing? [/INST]"
)
assert prompt_style.messages_to_prompt(messages) == expected_prompt
|