|
""" |
|
Translate from OpenAI's `/v1/chat/completions` to VLLM's `/v1/chat/completions` |
|
""" |
|
|
|
from typing import Optional, Tuple |
|
|
|
from litellm.secret_managers.main import get_secret_str |
|
|
|
from ....utils import _remove_additional_properties, _remove_strict_from_schema |
|
from ...openai.chat.gpt_transformation import OpenAIGPTConfig |
|
|
|
|
|
class HostedVLLMChatConfig(OpenAIGPTConfig): |
|
def map_openai_params( |
|
self, |
|
non_default_params: dict, |
|
optional_params: dict, |
|
model: str, |
|
drop_params: bool, |
|
) -> dict: |
|
_tools = non_default_params.pop("tools", None) |
|
if _tools is not None: |
|
|
|
_tools = _remove_additional_properties(_tools) |
|
|
|
_tools = _remove_strict_from_schema(_tools) |
|
if _tools is not None: |
|
non_default_params["tools"] = _tools |
|
return super().map_openai_params( |
|
non_default_params, optional_params, model, drop_params |
|
) |
|
|
|
def _get_openai_compatible_provider_info( |
|
self, api_base: Optional[str], api_key: Optional[str] |
|
) -> Tuple[Optional[str], Optional[str]]: |
|
api_base = api_base or get_secret_str("HOSTED_VLLM_API_BASE") |
|
dynamic_api_key = ( |
|
api_key or get_secret_str("HOSTED_VLLM_API_KEY") or "fake-api-key" |
|
) |
|
return api_base, dynamic_api_key |
|
|