|
from typing import List, Union, cast |
|
|
|
from litellm.litellm_core_utils.prompt_templates.common_utils import ( |
|
convert_content_list_to_str, |
|
) |
|
from litellm.types.llms.openai import ( |
|
AllMessageValues, |
|
AllPromptValues, |
|
OpenAITextCompletionUserMessage, |
|
) |
|
|
|
|
|
def is_tokens_or_list_of_tokens(value: List): |
|
|
|
if isinstance(value, list) and all(isinstance(item, int) for item in value): |
|
return True |
|
|
|
if isinstance(value, list) and all( |
|
isinstance(item, list) and all(isinstance(i, int) for i in item) |
|
for item in value |
|
): |
|
return True |
|
return False |
|
|
|
|
|
def _transform_prompt( |
|
messages: Union[List[AllMessageValues], List[OpenAITextCompletionUserMessage]], |
|
) -> AllPromptValues: |
|
if len(messages) == 1: |
|
message_content = messages[0].get("content") |
|
if ( |
|
message_content |
|
and isinstance(message_content, list) |
|
and is_tokens_or_list_of_tokens(message_content) |
|
): |
|
openai_prompt: AllPromptValues = cast(AllPromptValues, message_content) |
|
else: |
|
openai_prompt = "" |
|
content = convert_content_list_to_str(cast(AllMessageValues, messages[0])) |
|
openai_prompt += content |
|
else: |
|
prompt_str_list: List[str] = [] |
|
for m in messages: |
|
try: |
|
content = convert_content_list_to_str(cast(AllMessageValues, m)) |
|
prompt_str_list.append(content) |
|
except Exception as e: |
|
raise e |
|
openai_prompt = prompt_str_list |
|
return openai_prompt |
|
|