llmgaurdrails / models.py
Sasidhar's picture
Update models.py
fbd111a verified
raw
history blame
458 Bytes
from pydantic import BaseModel
class GuardrailsConfig(BaseModel):
factual_consistency: bool = True
toxicity: bool = True
# Extend with more flags for additional guardrails
# Request model now includes both the response and the configuration.
class CheckRequest(BaseModel):
response: str
config: GuardrailsConfig = GuardrailsConfig() # Default config if not provided
class CheckResponse(BaseModel):
grounded: bool
details: dict