diff --git a/api/.idea/icon.png b/api/.idea/icon.png
new file mode 100644
index 0000000000000000000000000000000000000000..3e262633f3484b4709ae9b71870c6e254f9c0205
Binary files /dev/null and b/api/.idea/icon.png differ
diff --git a/api/.idea/vcs.xml b/api/.idea/vcs.xml
new file mode 100644
index 0000000000000000000000000000000000000000..b7af618884ac3bb98b40de88f62a7471071e2f39
--- /dev/null
+++ b/api/.idea/vcs.xml
@@ -0,0 +1,17 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/api/.vscode/launch.json.example b/api/.vscode/launch.json.example
new file mode 100644
index 0000000000000000000000000000000000000000..b9e32e2511a0ca7733ee7a0ed31ffec6bf423b2f
--- /dev/null
+++ b/api/.vscode/launch.json.example
@@ -0,0 +1,61 @@
+{
+ "version": "0.2.0",
+ "compounds": [
+ {
+ "name": "Launch Flask and Celery",
+ "configurations": ["Python: Flask", "Python: Celery"]
+ }
+ ],
+ "configurations": [
+ {
+ "name": "Python: Flask",
+ "consoleName": "Flask",
+ "type": "debugpy",
+ "request": "launch",
+ "python": "${workspaceFolder}/.venv/bin/python",
+ "cwd": "${workspaceFolder}",
+ "envFile": ".env",
+ "module": "flask",
+ "justMyCode": true,
+ "jinja": true,
+ "env": {
+ "FLASK_APP": "app.py",
+ "GEVENT_SUPPORT": "True"
+ },
+ "args": [
+ "run",
+ "--port=5001"
+ ]
+ },
+ {
+ "name": "Python: Celery",
+ "consoleName": "Celery",
+ "type": "debugpy",
+ "request": "launch",
+ "python": "${workspaceFolder}/.venv/bin/python",
+ "cwd": "${workspaceFolder}",
+ "module": "celery",
+ "justMyCode": true,
+ "envFile": ".env",
+ "console": "integratedTerminal",
+ "env": {
+ "FLASK_APP": "app.py",
+ "FLASK_DEBUG": "1",
+ "GEVENT_SUPPORT": "True"
+ },
+ "args": [
+ "-A",
+ "app.celery",
+ "worker",
+ "-P",
+ "gevent",
+ "-c",
+ "1",
+ "--loglevel",
+ "DEBUG",
+ "-Q",
+ "dataset,generation,mail,ops_trace,app_deletion"
+ ]
+ }
+ ]
+}
diff --git a/api/configs/__init__.py b/api/configs/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..3a172601c963827212c7a0ef3dc023ca02f200a2
--- /dev/null
+++ b/api/configs/__init__.py
@@ -0,0 +1,3 @@
+from .app_config import DifyConfig
+
+dify_config = DifyConfig()
diff --git a/api/configs/app_config.py b/api/configs/app_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..ac1ce9db100ea405fddcd9b433b4df5ec2f7cc77
--- /dev/null
+++ b/api/configs/app_config.py
@@ -0,0 +1,96 @@
+import logging
+from typing import Any
+
+from pydantic.fields import FieldInfo
+from pydantic_settings import BaseSettings, PydanticBaseSettingsSource, SettingsConfigDict
+
+from .deploy import DeploymentConfig
+from .enterprise import EnterpriseFeatureConfig
+from .extra import ExtraServiceConfig
+from .feature import FeatureConfig
+from .middleware import MiddlewareConfig
+from .packaging import PackagingInfo
+from .remote_settings_sources import RemoteSettingsSource, RemoteSettingsSourceConfig, RemoteSettingsSourceName
+from .remote_settings_sources.apollo import ApolloSettingsSource
+
+logger = logging.getLogger(__name__)
+
+
+class RemoteSettingsSourceFactory(PydanticBaseSettingsSource):
+ def __init__(self, settings_cls: type[BaseSettings]):
+ super().__init__(settings_cls)
+
+ def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
+ raise NotImplementedError
+
+ def __call__(self) -> dict[str, Any]:
+ current_state = self.current_state
+ remote_source_name = current_state.get("REMOTE_SETTINGS_SOURCE_NAME")
+ if not remote_source_name:
+ return {}
+
+ remote_source: RemoteSettingsSource | None = None
+ match remote_source_name:
+ case RemoteSettingsSourceName.APOLLO:
+ remote_source = ApolloSettingsSource(current_state)
+ case _:
+ logger.warning(f"Unsupported remote source: {remote_source_name}")
+ return {}
+
+ d: dict[str, Any] = {}
+
+ for field_name, field in self.settings_cls.model_fields.items():
+ field_value, field_key, value_is_complex = remote_source.get_field_value(field, field_name)
+ field_value = remote_source.prepare_field_value(field_name, field, field_value, value_is_complex)
+ if field_value is not None:
+ d[field_key] = field_value
+
+ return d
+
+
+class DifyConfig(
+ # Packaging info
+ PackagingInfo,
+ # Deployment configs
+ DeploymentConfig,
+ # Feature configs
+ FeatureConfig,
+ # Middleware configs
+ MiddlewareConfig,
+ # Extra service configs
+ ExtraServiceConfig,
+ # Remote source configs
+ RemoteSettingsSourceConfig,
+ # Enterprise feature configs
+ # **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
+ EnterpriseFeatureConfig,
+):
+ model_config = SettingsConfigDict(
+ # read from dotenv format config file
+ env_file=".env",
+ env_file_encoding="utf-8",
+ # ignore extra attributes
+ extra="ignore",
+ )
+
+ # Before adding any config,
+ # please consider to arrange it in the proper config group of existed or added
+ # for better readability and maintainability.
+ # Thanks for your concentration and consideration.
+
+ @classmethod
+ def settings_customise_sources(
+ cls,
+ settings_cls: type[BaseSettings],
+ init_settings: PydanticBaseSettingsSource,
+ env_settings: PydanticBaseSettingsSource,
+ dotenv_settings: PydanticBaseSettingsSource,
+ file_secret_settings: PydanticBaseSettingsSource,
+ ) -> tuple[PydanticBaseSettingsSource, ...]:
+ return (
+ init_settings,
+ env_settings,
+ RemoteSettingsSourceFactory(settings_cls),
+ dotenv_settings,
+ file_secret_settings,
+ )
diff --git a/api/configs/deploy/__init__.py b/api/configs/deploy/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..950936d3c6546211220177a37034e371d864db36
--- /dev/null
+++ b/api/configs/deploy/__init__.py
@@ -0,0 +1,28 @@
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class DeploymentConfig(BaseSettings):
+ """
+ Configuration settings for application deployment
+ """
+
+ APPLICATION_NAME: str = Field(
+ description="Name of the application, used for identification and logging purposes",
+ default="langgenius/dify",
+ )
+
+ DEBUG: bool = Field(
+ description="Enable debug mode for additional logging and development features",
+ default=False,
+ )
+
+ EDITION: str = Field(
+ description="Deployment edition of the application (e.g., 'SELF_HOSTED', 'CLOUD')",
+ default="SELF_HOSTED",
+ )
+
+ DEPLOY_ENV: str = Field(
+ description="Deployment environment (e.g., 'PRODUCTION', 'DEVELOPMENT'), default to PRODUCTION",
+ default="PRODUCTION",
+ )
diff --git a/api/configs/enterprise/__init__.py b/api/configs/enterprise/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..eda6345e145a95689e0106d8b486a109822f334c
--- /dev/null
+++ b/api/configs/enterprise/__init__.py
@@ -0,0 +1,20 @@
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class EnterpriseFeatureConfig(BaseSettings):
+ """
+ Configuration for enterprise-level features.
+ **Before using, please contact business@dify.ai by email to inquire about licensing matters.**
+ """
+
+ ENTERPRISE_ENABLED: bool = Field(
+ description="Enable or disable enterprise-level features."
+ "Before using, please contact business@dify.ai by email to inquire about licensing matters.",
+ default=False,
+ )
+
+ CAN_REPLACE_LOGO: bool = Field(
+ description="Allow customization of the enterprise logo.",
+ default=False,
+ )
diff --git a/api/configs/extra/__init__.py b/api/configs/extra/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4543b5389d1b92fb8149f74f2762cfa6e2fca37c
--- /dev/null
+++ b/api/configs/extra/__init__.py
@@ -0,0 +1,10 @@
+from configs.extra.notion_config import NotionConfig
+from configs.extra.sentry_config import SentryConfig
+
+
+class ExtraServiceConfig(
+ # place the configs in alphabet order
+ NotionConfig,
+ SentryConfig,
+):
+ pass
diff --git a/api/configs/extra/notion_config.py b/api/configs/extra/notion_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..f9c4d7346399ad11087abfa6cc3370db590a6d4d
--- /dev/null
+++ b/api/configs/extra/notion_config.py
@@ -0,0 +1,36 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class NotionConfig(BaseSettings):
+ """
+ Configuration settings for Notion integration
+ """
+
+ NOTION_CLIENT_ID: Optional[str] = Field(
+ description="Client ID for Notion API authentication. Required for OAuth 2.0 flow.",
+ default=None,
+ )
+
+ NOTION_CLIENT_SECRET: Optional[str] = Field(
+ description="Client secret for Notion API authentication. Required for OAuth 2.0 flow.",
+ default=None,
+ )
+
+ NOTION_INTEGRATION_TYPE: Optional[str] = Field(
+ description="Type of Notion integration."
+ " Set to 'internal' for internal integrations, or None for public integrations.",
+ default=None,
+ )
+
+ NOTION_INTERNAL_SECRET: Optional[str] = Field(
+ description="Secret key for internal Notion integrations. Required when NOTION_INTEGRATION_TYPE is 'internal'.",
+ default=None,
+ )
+
+ NOTION_INTEGRATION_TOKEN: Optional[str] = Field(
+ description="Integration token for Notion API access. Used for direct API calls without OAuth flow.",
+ default=None,
+ )
diff --git a/api/configs/extra/sentry_config.py b/api/configs/extra/sentry_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..f76a6bdb95ca5b6eccf2db2e86a7bd765b5d0fc2
--- /dev/null
+++ b/api/configs/extra/sentry_config.py
@@ -0,0 +1,28 @@
+from typing import Optional
+
+from pydantic import Field, NonNegativeFloat
+from pydantic_settings import BaseSettings
+
+
+class SentryConfig(BaseSettings):
+ """
+ Configuration settings for Sentry error tracking and performance monitoring
+ """
+
+ SENTRY_DSN: Optional[str] = Field(
+ description="Sentry Data Source Name (DSN)."
+ " This is the unique identifier of your Sentry project, used to send events to the correct project.",
+ default=None,
+ )
+
+ SENTRY_TRACES_SAMPLE_RATE: NonNegativeFloat = Field(
+ description="Sample rate for Sentry performance monitoring traces."
+ " Value between 0.0 and 1.0, where 1.0 means 100% of traces are sent to Sentry.",
+ default=1.0,
+ )
+
+ SENTRY_PROFILES_SAMPLE_RATE: NonNegativeFloat = Field(
+ description="Sample rate for Sentry profiling."
+ " Value between 0.0 and 1.0, where 1.0 means 100% of profiles are sent to Sentry.",
+ default=1.0,
+ )
diff --git a/api/configs/feature/__init__.py b/api/configs/feature/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d791f51c4345170dbed5d01785a633dfb86f6e52
--- /dev/null
+++ b/api/configs/feature/__init__.py
@@ -0,0 +1,822 @@
+from typing import Annotated, Literal, Optional
+
+from pydantic import (
+ AliasChoices,
+ Field,
+ HttpUrl,
+ NegativeInt,
+ NonNegativeInt,
+ PositiveFloat,
+ PositiveInt,
+ computed_field,
+)
+from pydantic_settings import BaseSettings
+
+from configs.feature.hosted_service import HostedServiceConfig
+
+
+class SecurityConfig(BaseSettings):
+ """
+ Security-related configurations for the application
+ """
+
+ SECRET_KEY: str = Field(
+ description="Secret key for secure session cookie signing."
+ "Make sure you are changing this key for your deployment with a strong key."
+ "Generate a strong key using `openssl rand -base64 42` or set via the `SECRET_KEY` environment variable.",
+ default="",
+ )
+
+ RESET_PASSWORD_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
+ description="Duration in minutes for which a password reset token remains valid",
+ default=5,
+ )
+
+ LOGIN_DISABLED: bool = Field(
+ description="Whether to disable login checks",
+ default=False,
+ )
+
+ ADMIN_API_KEY_ENABLE: bool = Field(
+ description="Whether to enable admin api key for authentication",
+ default=False,
+ )
+
+ ADMIN_API_KEY: Optional[str] = Field(
+ description="admin api key for authentication",
+ default=None,
+ )
+
+
+class AppExecutionConfig(BaseSettings):
+ """
+ Configuration parameters for application execution
+ """
+
+ APP_MAX_EXECUTION_TIME: PositiveInt = Field(
+ description="Maximum allowed execution time for the application in seconds",
+ default=1200,
+ )
+ APP_MAX_ACTIVE_REQUESTS: NonNegativeInt = Field(
+ description="Maximum number of concurrent active requests per app (0 for unlimited)",
+ default=0,
+ )
+
+
+class CodeExecutionSandboxConfig(BaseSettings):
+ """
+ Configuration for the code execution sandbox environment
+ """
+
+ CODE_EXECUTION_ENDPOINT: HttpUrl = Field(
+ description="URL endpoint for the code execution service",
+ default="http://sandbox:8194",
+ )
+
+ CODE_EXECUTION_API_KEY: str = Field(
+ description="API key for accessing the code execution service",
+ default="dify-sandbox",
+ )
+
+ CODE_EXECUTION_CONNECT_TIMEOUT: Optional[float] = Field(
+ description="Connection timeout in seconds for code execution requests",
+ default=10.0,
+ )
+
+ CODE_EXECUTION_READ_TIMEOUT: Optional[float] = Field(
+ description="Read timeout in seconds for code execution requests",
+ default=60.0,
+ )
+
+ CODE_EXECUTION_WRITE_TIMEOUT: Optional[float] = Field(
+ description="Write timeout in seconds for code execution request",
+ default=10.0,
+ )
+
+ CODE_MAX_NUMBER: PositiveInt = Field(
+ description="Maximum allowed numeric value in code execution",
+ default=9223372036854775807,
+ )
+
+ CODE_MIN_NUMBER: NegativeInt = Field(
+ description="Minimum allowed numeric value in code execution",
+ default=-9223372036854775807,
+ )
+
+ CODE_MAX_DEPTH: PositiveInt = Field(
+ description="Maximum allowed depth for nested structures in code execution",
+ default=5,
+ )
+
+ CODE_MAX_PRECISION: PositiveInt = Field(
+ description="Maximum number of decimal places for floating-point numbers in code execution",
+ default=20,
+ )
+
+ CODE_MAX_STRING_LENGTH: PositiveInt = Field(
+ description="Maximum allowed length for strings in code execution",
+ default=80000,
+ )
+
+ CODE_MAX_STRING_ARRAY_LENGTH: PositiveInt = Field(
+ description="Maximum allowed length for string arrays in code execution",
+ default=30,
+ )
+
+ CODE_MAX_OBJECT_ARRAY_LENGTH: PositiveInt = Field(
+ description="Maximum allowed length for object arrays in code execution",
+ default=30,
+ )
+
+ CODE_MAX_NUMBER_ARRAY_LENGTH: PositiveInt = Field(
+ description="Maximum allowed length for numeric arrays in code execution",
+ default=1000,
+ )
+
+
+class EndpointConfig(BaseSettings):
+ """
+ Configuration for various application endpoints and URLs
+ """
+
+ CONSOLE_API_URL: str = Field(
+ description="Base URL for the console API,"
+ "used for login authentication callback or notion integration callbacks",
+ default="",
+ )
+
+ CONSOLE_WEB_URL: str = Field(
+ description="Base URL for the console web interface,used for frontend references and CORS configuration",
+ default="",
+ )
+
+ SERVICE_API_URL: str = Field(
+ description="Base URL for the service API, displayed to users for API access",
+ default="",
+ )
+
+ APP_WEB_URL: str = Field(
+ description="Base URL for the web application, used for frontend references",
+ default="",
+ )
+
+
+class FileAccessConfig(BaseSettings):
+ """
+ Configuration for file access and handling
+ """
+
+ FILES_URL: str = Field(
+ description="Base URL for file preview or download,"
+ " used for frontend display and multi-model inputs"
+ "Url is signed and has expiration time.",
+ validation_alias=AliasChoices("FILES_URL", "CONSOLE_API_URL"),
+ alias_priority=1,
+ default="",
+ )
+
+ FILES_ACCESS_TIMEOUT: int = Field(
+ description="Expiration time in seconds for file access URLs",
+ default=300,
+ )
+
+
+class FileUploadConfig(BaseSettings):
+ """
+ Configuration for file upload limitations
+ """
+
+ UPLOAD_FILE_SIZE_LIMIT: NonNegativeInt = Field(
+ description="Maximum allowed file size for uploads in megabytes",
+ default=15,
+ )
+
+ UPLOAD_FILE_BATCH_LIMIT: NonNegativeInt = Field(
+ description="Maximum number of files allowed in a single upload batch",
+ default=5,
+ )
+
+ UPLOAD_IMAGE_FILE_SIZE_LIMIT: NonNegativeInt = Field(
+ description="Maximum allowed image file size for uploads in megabytes",
+ default=10,
+ )
+
+ UPLOAD_VIDEO_FILE_SIZE_LIMIT: NonNegativeInt = Field(
+ description="video file size limit in Megabytes for uploading files",
+ default=100,
+ )
+
+ UPLOAD_AUDIO_FILE_SIZE_LIMIT: NonNegativeInt = Field(
+ description="audio file size limit in Megabytes for uploading files",
+ default=50,
+ )
+
+ BATCH_UPLOAD_LIMIT: NonNegativeInt = Field(
+ description="Maximum number of files allowed in a batch upload operation",
+ default=20,
+ )
+
+ WORKFLOW_FILE_UPLOAD_LIMIT: PositiveInt = Field(
+ description="Maximum number of files allowed in a workflow upload operation",
+ default=10,
+ )
+
+
+class HttpConfig(BaseSettings):
+ """
+ HTTP-related configurations for the application
+ """
+
+ API_COMPRESSION_ENABLED: bool = Field(
+ description="Enable or disable gzip compression for HTTP responses",
+ default=False,
+ )
+
+ inner_CONSOLE_CORS_ALLOW_ORIGINS: str = Field(
+ description="Comma-separated list of allowed origins for CORS in the console",
+ validation_alias=AliasChoices("CONSOLE_CORS_ALLOW_ORIGINS", "CONSOLE_WEB_URL"),
+ default="",
+ )
+
+ @computed_field
+ def CONSOLE_CORS_ALLOW_ORIGINS(self) -> list[str]:
+ return self.inner_CONSOLE_CORS_ALLOW_ORIGINS.split(",")
+
+ inner_WEB_API_CORS_ALLOW_ORIGINS: str = Field(
+ description="",
+ validation_alias=AliasChoices("WEB_API_CORS_ALLOW_ORIGINS"),
+ default="*",
+ )
+
+ @computed_field
+ def WEB_API_CORS_ALLOW_ORIGINS(self) -> list[str]:
+ return self.inner_WEB_API_CORS_ALLOW_ORIGINS.split(",")
+
+ HTTP_REQUEST_MAX_CONNECT_TIMEOUT: Annotated[
+ PositiveInt, Field(ge=10, description="Maximum connection timeout in seconds for HTTP requests")
+ ] = 10
+
+ HTTP_REQUEST_MAX_READ_TIMEOUT: Annotated[
+ PositiveInt, Field(ge=60, description="Maximum read timeout in seconds for HTTP requests")
+ ] = 60
+
+ HTTP_REQUEST_MAX_WRITE_TIMEOUT: Annotated[
+ PositiveInt, Field(ge=10, description="Maximum write timeout in seconds for HTTP requests")
+ ] = 20
+
+ HTTP_REQUEST_NODE_MAX_BINARY_SIZE: PositiveInt = Field(
+ description="Maximum allowed size in bytes for binary data in HTTP requests",
+ default=10 * 1024 * 1024,
+ )
+
+ HTTP_REQUEST_NODE_MAX_TEXT_SIZE: PositiveInt = Field(
+ description="Maximum allowed size in bytes for text data in HTTP requests",
+ default=1 * 1024 * 1024,
+ )
+
+ SSRF_DEFAULT_MAX_RETRIES: PositiveInt = Field(
+ description="Maximum number of retries for network requests (SSRF)",
+ default=3,
+ )
+
+ SSRF_PROXY_ALL_URL: Optional[str] = Field(
+ description="Proxy URL for HTTP or HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
+ default=None,
+ )
+
+ SSRF_PROXY_HTTP_URL: Optional[str] = Field(
+ description="Proxy URL for HTTP requests to prevent Server-Side Request Forgery (SSRF)",
+ default=None,
+ )
+
+ SSRF_PROXY_HTTPS_URL: Optional[str] = Field(
+ description="Proxy URL for HTTPS requests to prevent Server-Side Request Forgery (SSRF)",
+ default=None,
+ )
+
+ SSRF_DEFAULT_TIME_OUT: PositiveFloat = Field(
+ description="The default timeout period used for network requests (SSRF)",
+ default=5,
+ )
+
+ SSRF_DEFAULT_CONNECT_TIME_OUT: PositiveFloat = Field(
+ description="The default connect timeout period used for network requests (SSRF)",
+ default=5,
+ )
+
+ SSRF_DEFAULT_READ_TIME_OUT: PositiveFloat = Field(
+ description="The default read timeout period used for network requests (SSRF)",
+ default=5,
+ )
+
+ SSRF_DEFAULT_WRITE_TIME_OUT: PositiveFloat = Field(
+ description="The default write timeout period used for network requests (SSRF)",
+ default=5,
+ )
+
+ RESPECT_XFORWARD_HEADERS_ENABLED: bool = Field(
+ description="Enable handling of X-Forwarded-For, X-Forwarded-Proto, and X-Forwarded-Port headers"
+ " when the app is behind a single trusted reverse proxy.",
+ default=False,
+ )
+
+
+class InnerAPIConfig(BaseSettings):
+ """
+ Configuration for internal API functionality
+ """
+
+ INNER_API: bool = Field(
+ description="Enable or disable the internal API",
+ default=False,
+ )
+
+ INNER_API_KEY: Optional[str] = Field(
+ description="API key for accessing the internal API",
+ default=None,
+ )
+
+
+class LoggingConfig(BaseSettings):
+ """
+ Configuration for application logging
+ """
+
+ LOG_LEVEL: str = Field(
+ description="Logging level, default to INFO. Set to ERROR for production environments.",
+ default="INFO",
+ )
+
+ LOG_FILE: Optional[str] = Field(
+ description="File path for log output.",
+ default=None,
+ )
+
+ LOG_FILE_MAX_SIZE: PositiveInt = Field(
+ description="Maximum file size for file rotation retention, the unit is megabytes (MB)",
+ default=20,
+ )
+
+ LOG_FILE_BACKUP_COUNT: PositiveInt = Field(
+ description="Maximum file backup count file rotation retention",
+ default=5,
+ )
+
+ LOG_FORMAT: str = Field(
+ description="Format string for log messages",
+ default="%(asctime)s.%(msecs)03d %(levelname)s [%(threadName)s] [%(filename)s:%(lineno)d] - %(message)s",
+ )
+
+ LOG_DATEFORMAT: Optional[str] = Field(
+ description="Date format string for log timestamps",
+ default=None,
+ )
+
+ LOG_TZ: Optional[str] = Field(
+ description="Timezone for log timestamps (e.g., 'America/New_York')",
+ default="UTC",
+ )
+
+
+class ModelLoadBalanceConfig(BaseSettings):
+ """
+ Configuration for model load balancing
+ """
+
+ MODEL_LB_ENABLED: bool = Field(
+ description="Enable or disable load balancing for models",
+ default=False,
+ )
+
+
+class BillingConfig(BaseSettings):
+ """
+ Configuration for platform billing features
+ """
+
+ BILLING_ENABLED: bool = Field(
+ description="Enable or disable billing functionality",
+ default=False,
+ )
+
+
+class UpdateConfig(BaseSettings):
+ """
+ Configuration for application update checks
+ """
+
+ CHECK_UPDATE_URL: str = Field(
+ description="URL to check for application updates",
+ default="https://updates.dify.ai",
+ )
+
+
+class WorkflowConfig(BaseSettings):
+ """
+ Configuration for workflow execution
+ """
+
+ WORKFLOW_MAX_EXECUTION_STEPS: PositiveInt = Field(
+ description="Maximum number of steps allowed in a single workflow execution",
+ default=500,
+ )
+
+ WORKFLOW_MAX_EXECUTION_TIME: PositiveInt = Field(
+ description="Maximum execution time in seconds for a single workflow",
+ default=1200,
+ )
+
+ WORKFLOW_CALL_MAX_DEPTH: PositiveInt = Field(
+ description="Maximum allowed depth for nested workflow calls",
+ default=5,
+ )
+
+ WORKFLOW_PARALLEL_DEPTH_LIMIT: PositiveInt = Field(
+ description="Maximum allowed depth for nested parallel executions",
+ default=3,
+ )
+
+ MAX_VARIABLE_SIZE: PositiveInt = Field(
+ description="Maximum size in bytes for a single variable in workflows. Default to 200 KB.",
+ default=200 * 1024,
+ )
+
+
+class WorkflowNodeExecutionConfig(BaseSettings):
+ """
+ Configuration for workflow node execution
+ """
+
+ MAX_SUBMIT_COUNT: PositiveInt = Field(
+ description="Maximum number of submitted thread count in a ThreadPool for parallel node execution",
+ default=100,
+ )
+
+
+class AuthConfig(BaseSettings):
+ """
+ Configuration for authentication and OAuth
+ """
+
+ OAUTH_REDIRECT_PATH: str = Field(
+ description="Redirect path for OAuth authentication callbacks",
+ default="/console/api/oauth/authorize",
+ )
+
+ GITHUB_CLIENT_ID: Optional[str] = Field(
+ description="GitHub OAuth client ID",
+ default=None,
+ )
+
+ GITHUB_CLIENT_SECRET: Optional[str] = Field(
+ description="GitHub OAuth client secret",
+ default=None,
+ )
+
+ GOOGLE_CLIENT_ID: Optional[str] = Field(
+ description="Google OAuth client ID",
+ default=None,
+ )
+
+ GOOGLE_CLIENT_SECRET: Optional[str] = Field(
+ description="Google OAuth client secret",
+ default=None,
+ )
+
+ ACCESS_TOKEN_EXPIRE_MINUTES: PositiveInt = Field(
+ description="Expiration time for access tokens in minutes",
+ default=60,
+ )
+
+ REFRESH_TOKEN_EXPIRE_DAYS: PositiveFloat = Field(
+ description="Expiration time for refresh tokens in days",
+ default=30,
+ )
+
+ LOGIN_LOCKOUT_DURATION: PositiveInt = Field(
+ description="Time (in seconds) a user must wait before retrying login after exceeding the rate limit.",
+ default=86400,
+ )
+
+ FORGOT_PASSWORD_LOCKOUT_DURATION: PositiveInt = Field(
+ description="Time (in seconds) a user must wait before retrying password reset after exceeding the rate limit.",
+ default=86400,
+ )
+
+
+class ModerationConfig(BaseSettings):
+ """
+ Configuration for content moderation
+ """
+
+ MODERATION_BUFFER_SIZE: PositiveInt = Field(
+ description="Size of the buffer for content moderation processing",
+ default=300,
+ )
+
+
+class ToolConfig(BaseSettings):
+ """
+ Configuration for tool management
+ """
+
+ TOOL_ICON_CACHE_MAX_AGE: PositiveInt = Field(
+ description="Maximum age in seconds for caching tool icons",
+ default=3600,
+ )
+
+
+class MailConfig(BaseSettings):
+ """
+ Configuration for email services
+ """
+
+ MAIL_TYPE: Optional[str] = Field(
+ description="Email service provider type ('smtp' or 'resend'), default to None.",
+ default=None,
+ )
+
+ MAIL_DEFAULT_SEND_FROM: Optional[str] = Field(
+ description="Default email address to use as the sender",
+ default=None,
+ )
+
+ RESEND_API_KEY: Optional[str] = Field(
+ description="API key for Resend email service",
+ default=None,
+ )
+
+ RESEND_API_URL: Optional[str] = Field(
+ description="API URL for Resend email service",
+ default=None,
+ )
+
+ SMTP_SERVER: Optional[str] = Field(
+ description="SMTP server hostname",
+ default=None,
+ )
+
+ SMTP_PORT: Optional[int] = Field(
+ description="SMTP server port number",
+ default=465,
+ )
+
+ SMTP_USERNAME: Optional[str] = Field(
+ description="Username for SMTP authentication",
+ default=None,
+ )
+
+ SMTP_PASSWORD: Optional[str] = Field(
+ description="Password for SMTP authentication",
+ default=None,
+ )
+
+ SMTP_USE_TLS: bool = Field(
+ description="Enable TLS encryption for SMTP connections",
+ default=False,
+ )
+
+ SMTP_OPPORTUNISTIC_TLS: bool = Field(
+ description="Enable opportunistic TLS for SMTP connections",
+ default=False,
+ )
+
+ EMAIL_SEND_IP_LIMIT_PER_MINUTE: PositiveInt = Field(
+ description="Maximum number of emails allowed to be sent from the same IP address in a minute",
+ default=50,
+ )
+
+
+class RagEtlConfig(BaseSettings):
+ """
+ Configuration for RAG ETL processes
+ """
+
+ # TODO: This config is not only for rag etl, it is also for file upload, we should move it to file upload config
+ ETL_TYPE: str = Field(
+ description="RAG ETL type ('dify' or 'Unstructured'), default to 'dify'",
+ default="dify",
+ )
+
+ KEYWORD_DATA_SOURCE_TYPE: str = Field(
+ description="Data source type for keyword extraction"
+ " ('database' or other supported types), default to 'database'",
+ default="database",
+ )
+
+ UNSTRUCTURED_API_URL: Optional[str] = Field(
+ description="API URL for Unstructured.io service",
+ default=None,
+ )
+
+ UNSTRUCTURED_API_KEY: Optional[str] = Field(
+ description="API key for Unstructured.io service",
+ default="",
+ )
+
+ SCARF_NO_ANALYTICS: Optional[str] = Field(
+ description="This is about whether to disable Scarf analytics in Unstructured library.",
+ default="false",
+ )
+
+
+class DataSetConfig(BaseSettings):
+ """
+ Configuration for dataset management
+ """
+
+ PLAN_SANDBOX_CLEAN_DAY_SETTING: PositiveInt = Field(
+ description="Interval in days for dataset cleanup operations - plan: sandbox",
+ default=30,
+ )
+
+ PLAN_PRO_CLEAN_DAY_SETTING: PositiveInt = Field(
+ description="Interval in days for dataset cleanup operations - plan: pro and team",
+ default=7,
+ )
+
+ DATASET_OPERATOR_ENABLED: bool = Field(
+ description="Enable or disable dataset operator functionality",
+ default=False,
+ )
+
+ TIDB_SERVERLESS_NUMBER: PositiveInt = Field(
+ description="number of tidb serverless cluster",
+ default=500,
+ )
+
+ CREATE_TIDB_SERVICE_JOB_ENABLED: bool = Field(
+ description="Enable or disable create tidb service job",
+ default=False,
+ )
+
+ PLAN_SANDBOX_CLEAN_MESSAGE_DAY_SETTING: PositiveInt = Field(
+ description="Interval in days for message cleanup operations - plan: sandbox",
+ default=30,
+ )
+
+
+class WorkspaceConfig(BaseSettings):
+ """
+ Configuration for workspace management
+ """
+
+ INVITE_EXPIRY_HOURS: PositiveInt = Field(
+ description="Expiration time in hours for workspace invitation links",
+ default=72,
+ )
+
+
+class IndexingConfig(BaseSettings):
+ """
+ Configuration for indexing operations
+ """
+
+ INDEXING_MAX_SEGMENTATION_TOKENS_LENGTH: PositiveInt = Field(
+ description="Maximum token length for text segmentation during indexing",
+ default=4000,
+ )
+
+ CHILD_CHUNKS_PREVIEW_NUMBER: PositiveInt = Field(
+ description="Maximum number of child chunks to preview",
+ default=50,
+ )
+
+
+class MultiModalTransferConfig(BaseSettings):
+ MULTIMODAL_SEND_FORMAT: Literal["base64", "url"] = Field(
+ description="Format for sending files in multimodal contexts ('base64' or 'url'), default is base64",
+ default="base64",
+ )
+
+
+class CeleryBeatConfig(BaseSettings):
+ CELERY_BEAT_SCHEDULER_TIME: int = Field(
+ description="Interval in days for Celery Beat scheduler execution, default to 1 day",
+ default=1,
+ )
+
+
+class PositionConfig(BaseSettings):
+ POSITION_PROVIDER_PINS: str = Field(
+ description="Comma-separated list of pinned model providers",
+ default="",
+ )
+
+ POSITION_PROVIDER_INCLUDES: str = Field(
+ description="Comma-separated list of included model providers",
+ default="",
+ )
+
+ POSITION_PROVIDER_EXCLUDES: str = Field(
+ description="Comma-separated list of excluded model providers",
+ default="",
+ )
+
+ POSITION_TOOL_PINS: str = Field(
+ description="Comma-separated list of pinned tools",
+ default="",
+ )
+
+ POSITION_TOOL_INCLUDES: str = Field(
+ description="Comma-separated list of included tools",
+ default="",
+ )
+
+ POSITION_TOOL_EXCLUDES: str = Field(
+ description="Comma-separated list of excluded tools",
+ default="",
+ )
+
+ @property
+ def POSITION_PROVIDER_PINS_LIST(self) -> list[str]:
+ return [item.strip() for item in self.POSITION_PROVIDER_PINS.split(",") if item.strip() != ""]
+
+ @property
+ def POSITION_PROVIDER_INCLUDES_SET(self) -> set[str]:
+ return {item.strip() for item in self.POSITION_PROVIDER_INCLUDES.split(",") if item.strip() != ""}
+
+ @property
+ def POSITION_PROVIDER_EXCLUDES_SET(self) -> set[str]:
+ return {item.strip() for item in self.POSITION_PROVIDER_EXCLUDES.split(",") if item.strip() != ""}
+
+ @property
+ def POSITION_TOOL_PINS_LIST(self) -> list[str]:
+ return [item.strip() for item in self.POSITION_TOOL_PINS.split(",") if item.strip() != ""]
+
+ @property
+ def POSITION_TOOL_INCLUDES_SET(self) -> set[str]:
+ return {item.strip() for item in self.POSITION_TOOL_INCLUDES.split(",") if item.strip() != ""}
+
+ @property
+ def POSITION_TOOL_EXCLUDES_SET(self) -> set[str]:
+ return {item.strip() for item in self.POSITION_TOOL_EXCLUDES.split(",") if item.strip() != ""}
+
+
+class LoginConfig(BaseSettings):
+ ENABLE_EMAIL_CODE_LOGIN: bool = Field(
+ description="whether to enable email code login",
+ default=False,
+ )
+ ENABLE_EMAIL_PASSWORD_LOGIN: bool = Field(
+ description="whether to enable email password login",
+ default=True,
+ )
+ ENABLE_SOCIAL_OAUTH_LOGIN: bool = Field(
+ description="whether to enable github/google oauth login",
+ default=False,
+ )
+ EMAIL_CODE_LOGIN_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
+ description="expiry time in minutes for email code login token",
+ default=5,
+ )
+ ALLOW_REGISTER: bool = Field(
+ description="whether to enable register",
+ default=False,
+ )
+ ALLOW_CREATE_WORKSPACE: bool = Field(
+ description="whether to enable create workspace",
+ default=False,
+ )
+
+
+class AccountConfig(BaseSettings):
+ ACCOUNT_DELETION_TOKEN_EXPIRY_MINUTES: PositiveInt = Field(
+ description="Duration in minutes for which a account deletion token remains valid",
+ default=5,
+ )
+
+
+class FeatureConfig(
+ # place the configs in alphabet order
+ AppExecutionConfig,
+ AuthConfig, # Changed from OAuthConfig to AuthConfig
+ BillingConfig,
+ CodeExecutionSandboxConfig,
+ DataSetConfig,
+ EndpointConfig,
+ FileAccessConfig,
+ FileUploadConfig,
+ HttpConfig,
+ InnerAPIConfig,
+ IndexingConfig,
+ LoggingConfig,
+ MailConfig,
+ ModelLoadBalanceConfig,
+ ModerationConfig,
+ MultiModalTransferConfig,
+ PositionConfig,
+ RagEtlConfig,
+ SecurityConfig,
+ ToolConfig,
+ UpdateConfig,
+ WorkflowConfig,
+ WorkflowNodeExecutionConfig,
+ WorkspaceConfig,
+ LoginConfig,
+ AccountConfig,
+ # hosted services config
+ HostedServiceConfig,
+ CeleryBeatConfig,
+):
+ pass
diff --git a/api/configs/feature/hosted_service/__init__.py b/api/configs/feature/hosted_service/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..71d06f4623fb1d62d65086f84711e4c047f36a77
--- /dev/null
+++ b/api/configs/feature/hosted_service/__init__.py
@@ -0,0 +1,239 @@
+from typing import Optional
+
+from pydantic import Field, NonNegativeInt, computed_field
+from pydantic_settings import BaseSettings
+
+
+class HostedCreditConfig(BaseSettings):
+ HOSTED_MODEL_CREDIT_CONFIG: str = Field(
+ description="Model credit configuration in format 'model:credits,model:credits', e.g., 'gpt-4:20,gpt-4o:10'",
+ default="",
+ )
+
+ def get_model_credits(self, model_name: str) -> int:
+ """
+ Get credit value for a specific model name.
+ Returns 1 if model is not found in configuration (default credit).
+
+ :param model_name: The name of the model to search for
+ :return: The credit value for the model
+ """
+ if not self.HOSTED_MODEL_CREDIT_CONFIG:
+ return 1
+
+ try:
+ credit_map = dict(
+ item.strip().split(":", 1) for item in self.HOSTED_MODEL_CREDIT_CONFIG.split(",") if ":" in item
+ )
+
+ # Search for matching model pattern
+ for pattern, credit in credit_map.items():
+ if pattern.strip() == model_name:
+ return int(credit)
+ return 1 # Default quota if no match found
+ except (ValueError, AttributeError):
+ return 1 # Return default quota if parsing fails
+
+
+class HostedOpenAiConfig(BaseSettings):
+ """
+ Configuration for hosted OpenAI service
+ """
+
+ HOSTED_OPENAI_API_KEY: Optional[str] = Field(
+ description="API key for hosted OpenAI service",
+ default=None,
+ )
+
+ HOSTED_OPENAI_API_BASE: Optional[str] = Field(
+ description="Base URL for hosted OpenAI API",
+ default=None,
+ )
+
+ HOSTED_OPENAI_API_ORGANIZATION: Optional[str] = Field(
+ description="Organization ID for hosted OpenAI service",
+ default=None,
+ )
+
+ HOSTED_OPENAI_TRIAL_ENABLED: bool = Field(
+ description="Enable trial access to hosted OpenAI service",
+ default=False,
+ )
+
+ HOSTED_OPENAI_TRIAL_MODELS: str = Field(
+ description="Comma-separated list of available models for trial access",
+ default="gpt-3.5-turbo,"
+ "gpt-3.5-turbo-1106,"
+ "gpt-3.5-turbo-instruct,"
+ "gpt-3.5-turbo-16k,"
+ "gpt-3.5-turbo-16k-0613,"
+ "gpt-3.5-turbo-0613,"
+ "gpt-3.5-turbo-0125,"
+ "text-davinci-003",
+ )
+
+ HOSTED_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
+ description="Quota limit for hosted OpenAI service usage",
+ default=200,
+ )
+
+ HOSTED_OPENAI_PAID_ENABLED: bool = Field(
+ description="Enable paid access to hosted OpenAI service",
+ default=False,
+ )
+
+ HOSTED_OPENAI_PAID_MODELS: str = Field(
+ description="Comma-separated list of available models for paid access",
+ default="gpt-4,"
+ "gpt-4-turbo-preview,"
+ "gpt-4-turbo-2024-04-09,"
+ "gpt-4-1106-preview,"
+ "gpt-4-0125-preview,"
+ "gpt-3.5-turbo,"
+ "gpt-3.5-turbo-16k,"
+ "gpt-3.5-turbo-16k-0613,"
+ "gpt-3.5-turbo-1106,"
+ "gpt-3.5-turbo-0613,"
+ "gpt-3.5-turbo-0125,"
+ "gpt-3.5-turbo-instruct,"
+ "text-davinci-003",
+ )
+
+
+class HostedAzureOpenAiConfig(BaseSettings):
+ """
+ Configuration for hosted Azure OpenAI service
+ """
+
+ HOSTED_AZURE_OPENAI_ENABLED: bool = Field(
+ description="Enable hosted Azure OpenAI service",
+ default=False,
+ )
+
+ HOSTED_AZURE_OPENAI_API_KEY: Optional[str] = Field(
+ description="API key for hosted Azure OpenAI service",
+ default=None,
+ )
+
+ HOSTED_AZURE_OPENAI_API_BASE: Optional[str] = Field(
+ description="Base URL for hosted Azure OpenAI API",
+ default=None,
+ )
+
+ HOSTED_AZURE_OPENAI_QUOTA_LIMIT: NonNegativeInt = Field(
+ description="Quota limit for hosted Azure OpenAI service usage",
+ default=200,
+ )
+
+
+class HostedAnthropicConfig(BaseSettings):
+ """
+ Configuration for hosted Anthropic service
+ """
+
+ HOSTED_ANTHROPIC_API_BASE: Optional[str] = Field(
+ description="Base URL for hosted Anthropic API",
+ default=None,
+ )
+
+ HOSTED_ANTHROPIC_API_KEY: Optional[str] = Field(
+ description="API key for hosted Anthropic service",
+ default=None,
+ )
+
+ HOSTED_ANTHROPIC_TRIAL_ENABLED: bool = Field(
+ description="Enable trial access to hosted Anthropic service",
+ default=False,
+ )
+
+ HOSTED_ANTHROPIC_QUOTA_LIMIT: NonNegativeInt = Field(
+ description="Quota limit for hosted Anthropic service usage",
+ default=600000,
+ )
+
+ HOSTED_ANTHROPIC_PAID_ENABLED: bool = Field(
+ description="Enable paid access to hosted Anthropic service",
+ default=False,
+ )
+
+
+class HostedMinmaxConfig(BaseSettings):
+ """
+ Configuration for hosted Minmax service
+ """
+
+ HOSTED_MINIMAX_ENABLED: bool = Field(
+ description="Enable hosted Minmax service",
+ default=False,
+ )
+
+
+class HostedSparkConfig(BaseSettings):
+ """
+ Configuration for hosted Spark service
+ """
+
+ HOSTED_SPARK_ENABLED: bool = Field(
+ description="Enable hosted Spark service",
+ default=False,
+ )
+
+
+class HostedZhipuAIConfig(BaseSettings):
+ """
+ Configuration for hosted ZhipuAI service
+ """
+
+ HOSTED_ZHIPUAI_ENABLED: bool = Field(
+ description="Enable hosted ZhipuAI service",
+ default=False,
+ )
+
+
+class HostedModerationConfig(BaseSettings):
+ """
+ Configuration for hosted Moderation service
+ """
+
+ HOSTED_MODERATION_ENABLED: bool = Field(
+ description="Enable hosted Moderation service",
+ default=False,
+ )
+
+ HOSTED_MODERATION_PROVIDERS: str = Field(
+ description="Comma-separated list of moderation providers",
+ default="",
+ )
+
+
+class HostedFetchAppTemplateConfig(BaseSettings):
+ """
+ Configuration for fetching app templates
+ """
+
+ HOSTED_FETCH_APP_TEMPLATES_MODE: str = Field(
+ description="Mode for fetching app templates: remote, db, or builtin default to remote,",
+ default="remote",
+ )
+
+ HOSTED_FETCH_APP_TEMPLATES_REMOTE_DOMAIN: str = Field(
+ description="Domain for fetching remote app templates",
+ default="https://tmpl.dify.ai",
+ )
+
+
+class HostedServiceConfig(
+ # place the configs in alphabet order
+ HostedAnthropicConfig,
+ HostedAzureOpenAiConfig,
+ HostedFetchAppTemplateConfig,
+ HostedMinmaxConfig,
+ HostedOpenAiConfig,
+ HostedSparkConfig,
+ HostedZhipuAIConfig,
+ # moderation
+ HostedModerationConfig,
+ # credit config
+ HostedCreditConfig,
+):
+ pass
diff --git a/api/configs/middleware/__init__.py b/api/configs/middleware/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f6a44eaa471e6283fc396ec646c11788fca6afbf
--- /dev/null
+++ b/api/configs/middleware/__init__.py
@@ -0,0 +1,279 @@
+from typing import Any, Literal, Optional
+from urllib.parse import quote_plus
+
+from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt, computed_field
+from pydantic_settings import BaseSettings
+
+from .cache.redis_config import RedisConfig
+from .storage.aliyun_oss_storage_config import AliyunOSSStorageConfig
+from .storage.amazon_s3_storage_config import S3StorageConfig
+from .storage.azure_blob_storage_config import AzureBlobStorageConfig
+from .storage.baidu_obs_storage_config import BaiduOBSStorageConfig
+from .storage.google_cloud_storage_config import GoogleCloudStorageConfig
+from .storage.huawei_obs_storage_config import HuaweiCloudOBSStorageConfig
+from .storage.oci_storage_config import OCIStorageConfig
+from .storage.opendal_storage_config import OpenDALStorageConfig
+from .storage.supabase_storage_config import SupabaseStorageConfig
+from .storage.tencent_cos_storage_config import TencentCloudCOSStorageConfig
+from .storage.volcengine_tos_storage_config import VolcengineTOSStorageConfig
+from .vdb.analyticdb_config import AnalyticdbConfig
+from .vdb.baidu_vector_config import BaiduVectorDBConfig
+from .vdb.chroma_config import ChromaConfig
+from .vdb.couchbase_config import CouchbaseConfig
+from .vdb.elasticsearch_config import ElasticsearchConfig
+from .vdb.lindorm_config import LindormConfig
+from .vdb.milvus_config import MilvusConfig
+from .vdb.myscale_config import MyScaleConfig
+from .vdb.oceanbase_config import OceanBaseVectorConfig
+from .vdb.opensearch_config import OpenSearchConfig
+from .vdb.oracle_config import OracleConfig
+from .vdb.pgvector_config import PGVectorConfig
+from .vdb.pgvectors_config import PGVectoRSConfig
+from .vdb.qdrant_config import QdrantConfig
+from .vdb.relyt_config import RelytConfig
+from .vdb.tencent_vector_config import TencentVectorDBConfig
+from .vdb.tidb_on_qdrant_config import TidbOnQdrantConfig
+from .vdb.tidb_vector_config import TiDBVectorConfig
+from .vdb.upstash_config import UpstashConfig
+from .vdb.vikingdb_config import VikingDBConfig
+from .vdb.weaviate_config import WeaviateConfig
+
+
+class StorageConfig(BaseSettings):
+ STORAGE_TYPE: Literal[
+ "opendal",
+ "s3",
+ "aliyun-oss",
+ "azure-blob",
+ "baidu-obs",
+ "google-storage",
+ "huawei-obs",
+ "oci-storage",
+ "tencent-cos",
+ "volcengine-tos",
+ "supabase",
+ "local",
+ ] = Field(
+ description="Type of storage to use."
+ " Options: 'opendal', '(deprecated) local', 's3', 'aliyun-oss', 'azure-blob', 'baidu-obs', 'google-storage', "
+ "'huawei-obs', 'oci-storage', 'tencent-cos', 'volcengine-tos', 'supabase'. Default is 'opendal'.",
+ default="opendal",
+ )
+
+ STORAGE_LOCAL_PATH: str = Field(
+ description="Path for local storage when STORAGE_TYPE is set to 'local'.",
+ default="storage",
+ deprecated=True,
+ )
+
+
+class VectorStoreConfig(BaseSettings):
+ VECTOR_STORE: Optional[str] = Field(
+ description="Type of vector store to use for efficient similarity search."
+ " Set to None if not using a vector store.",
+ default=None,
+ )
+
+ VECTOR_STORE_WHITELIST_ENABLE: Optional[bool] = Field(
+ description="Enable whitelist for vector store.",
+ default=False,
+ )
+
+
+class KeywordStoreConfig(BaseSettings):
+ KEYWORD_STORE: str = Field(
+ description="Method for keyword extraction and storage."
+ " Default is 'jieba', a Chinese text segmentation library.",
+ default="jieba",
+ )
+
+
+class DatabaseConfig(BaseSettings):
+ DB_HOST: str = Field(
+ description="Hostname or IP address of the database server.",
+ default="localhost",
+ )
+
+ DB_PORT: PositiveInt = Field(
+ description="Port number for database connection.",
+ default=5432,
+ )
+
+ DB_USERNAME: str = Field(
+ description="Username for database authentication.",
+ default="postgres",
+ )
+
+ DB_PASSWORD: str = Field(
+ description="Password for database authentication.",
+ default="",
+ )
+
+ DB_DATABASE: str = Field(
+ description="Name of the database to connect to.",
+ default="dify",
+ )
+
+ DB_CHARSET: str = Field(
+ description="Character set for database connection.",
+ default="",
+ )
+
+ DB_EXTRAS: str = Field(
+ description="Additional database connection parameters. Example: 'keepalives_idle=60&keepalives=1'",
+ default="",
+ )
+
+ SQLALCHEMY_DATABASE_URI_SCHEME: str = Field(
+ description="Database URI scheme for SQLAlchemy connection.",
+ default="postgresql",
+ )
+
+ @computed_field
+ def SQLALCHEMY_DATABASE_URI(self) -> str:
+ db_extras = (
+ f"{self.DB_EXTRAS}&client_encoding={self.DB_CHARSET}" if self.DB_CHARSET else self.DB_EXTRAS
+ ).strip("&")
+ db_extras = f"?{db_extras}" if db_extras else ""
+ return (
+ f"{self.SQLALCHEMY_DATABASE_URI_SCHEME}://"
+ f"{quote_plus(self.DB_USERNAME)}:{quote_plus(self.DB_PASSWORD)}@{self.DB_HOST}:{self.DB_PORT}/{self.DB_DATABASE}"
+ f"{db_extras}"
+ )
+
+ SQLALCHEMY_POOL_SIZE: NonNegativeInt = Field(
+ description="Maximum number of database connections in the pool.",
+ default=30,
+ )
+
+ SQLALCHEMY_MAX_OVERFLOW: NonNegativeInt = Field(
+ description="Maximum number of connections that can be created beyond the pool_size.",
+ default=10,
+ )
+
+ SQLALCHEMY_POOL_RECYCLE: NonNegativeInt = Field(
+ description="Number of seconds after which a connection is automatically recycled.",
+ default=3600,
+ )
+
+ SQLALCHEMY_POOL_PRE_PING: bool = Field(
+ description="If True, enables connection pool pre-ping feature to check connections.",
+ default=False,
+ )
+
+ SQLALCHEMY_ECHO: bool | str = Field(
+ description="If True, SQLAlchemy will log all SQL statements.",
+ default=False,
+ )
+
+ @computed_field
+ def SQLALCHEMY_ENGINE_OPTIONS(self) -> dict[str, Any]:
+ return {
+ "pool_size": self.SQLALCHEMY_POOL_SIZE,
+ "max_overflow": self.SQLALCHEMY_MAX_OVERFLOW,
+ "pool_recycle": self.SQLALCHEMY_POOL_RECYCLE,
+ "pool_pre_ping": self.SQLALCHEMY_POOL_PRE_PING,
+ "connect_args": {"options": "-c timezone=UTC"},
+ }
+
+
+class CeleryConfig(DatabaseConfig):
+ CELERY_BACKEND: str = Field(
+ description="Backend for Celery task results. Options: 'database', 'redis'.",
+ default="database",
+ )
+
+ CELERY_BROKER_URL: Optional[str] = Field(
+ description="URL of the message broker for Celery tasks.",
+ default=None,
+ )
+
+ CELERY_USE_SENTINEL: Optional[bool] = Field(
+ description="Whether to use Redis Sentinel for high availability.",
+ default=False,
+ )
+
+ CELERY_SENTINEL_MASTER_NAME: Optional[str] = Field(
+ description="Name of the Redis Sentinel master.",
+ default=None,
+ )
+
+ CELERY_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
+ description="Timeout for Redis Sentinel socket operations in seconds.",
+ default=0.1,
+ )
+
+ @computed_field
+ def CELERY_RESULT_BACKEND(self) -> str | None:
+ return (
+ "db+{}".format(self.SQLALCHEMY_DATABASE_URI)
+ if self.CELERY_BACKEND == "database"
+ else self.CELERY_BROKER_URL
+ )
+
+ @property
+ def BROKER_USE_SSL(self) -> bool:
+ return self.CELERY_BROKER_URL.startswith("rediss://") if self.CELERY_BROKER_URL else False
+
+
+class InternalTestConfig(BaseSettings):
+ """
+ Configuration settings for Internal Test
+ """
+
+ AWS_SECRET_ACCESS_KEY: Optional[str] = Field(
+ description="Internal test AWS secret access key",
+ default=None,
+ )
+
+ AWS_ACCESS_KEY_ID: Optional[str] = Field(
+ description="Internal test AWS access key ID",
+ default=None,
+ )
+
+
+class MiddlewareConfig(
+ # place the configs in alphabet order
+ CeleryConfig,
+ DatabaseConfig,
+ KeywordStoreConfig,
+ RedisConfig,
+ # configs of storage and storage providers
+ StorageConfig,
+ AliyunOSSStorageConfig,
+ AzureBlobStorageConfig,
+ BaiduOBSStorageConfig,
+ GoogleCloudStorageConfig,
+ HuaweiCloudOBSStorageConfig,
+ OCIStorageConfig,
+ OpenDALStorageConfig,
+ S3StorageConfig,
+ SupabaseStorageConfig,
+ TencentCloudCOSStorageConfig,
+ VolcengineTOSStorageConfig,
+ # configs of vdb and vdb providers
+ VectorStoreConfig,
+ AnalyticdbConfig,
+ ChromaConfig,
+ MilvusConfig,
+ MyScaleConfig,
+ OpenSearchConfig,
+ OracleConfig,
+ PGVectorConfig,
+ PGVectoRSConfig,
+ QdrantConfig,
+ RelytConfig,
+ TencentVectorDBConfig,
+ TiDBVectorConfig,
+ WeaviateConfig,
+ ElasticsearchConfig,
+ CouchbaseConfig,
+ InternalTestConfig,
+ VikingDBConfig,
+ UpstashConfig,
+ TidbOnQdrantConfig,
+ LindormConfig,
+ OceanBaseVectorConfig,
+ BaiduVectorDBConfig,
+):
+ pass
diff --git a/api/configs/middleware/cache/__init__.py b/api/configs/middleware/cache/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/api/configs/middleware/cache/redis_config.py b/api/configs/middleware/cache/redis_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..2e98c31ec356df51540e2a43cd261acb016ad3dc
--- /dev/null
+++ b/api/configs/middleware/cache/redis_config.py
@@ -0,0 +1,85 @@
+from typing import Optional
+
+from pydantic import Field, NonNegativeInt, PositiveFloat, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class RedisConfig(BaseSettings):
+ """
+ Configuration settings for Redis connection
+ """
+
+ REDIS_HOST: str = Field(
+ description="Hostname or IP address of the Redis server",
+ default="localhost",
+ )
+
+ REDIS_PORT: PositiveInt = Field(
+ description="Port number on which the Redis server is listening",
+ default=6379,
+ )
+
+ REDIS_USERNAME: Optional[str] = Field(
+ description="Username for Redis authentication (if required)",
+ default=None,
+ )
+
+ REDIS_PASSWORD: Optional[str] = Field(
+ description="Password for Redis authentication (if required)",
+ default=None,
+ )
+
+ REDIS_DB: NonNegativeInt = Field(
+ description="Redis database number to use (0-15)",
+ default=0,
+ )
+
+ REDIS_USE_SSL: bool = Field(
+ description="Enable SSL/TLS for the Redis connection",
+ default=False,
+ )
+
+ REDIS_USE_SENTINEL: Optional[bool] = Field(
+ description="Enable Redis Sentinel mode for high availability",
+ default=False,
+ )
+
+ REDIS_SENTINELS: Optional[str] = Field(
+ description="Comma-separated list of Redis Sentinel nodes (host:port)",
+ default=None,
+ )
+
+ REDIS_SENTINEL_SERVICE_NAME: Optional[str] = Field(
+ description="Name of the Redis Sentinel service to monitor",
+ default=None,
+ )
+
+ REDIS_SENTINEL_USERNAME: Optional[str] = Field(
+ description="Username for Redis Sentinel authentication (if required)",
+ default=None,
+ )
+
+ REDIS_SENTINEL_PASSWORD: Optional[str] = Field(
+ description="Password for Redis Sentinel authentication (if required)",
+ default=None,
+ )
+
+ REDIS_SENTINEL_SOCKET_TIMEOUT: Optional[PositiveFloat] = Field(
+ description="Socket timeout in seconds for Redis Sentinel connections",
+ default=0.1,
+ )
+
+ REDIS_USE_CLUSTERS: bool = Field(
+ description="Enable Redis Clusters mode for high availability",
+ default=False,
+ )
+
+ REDIS_CLUSTERS: Optional[str] = Field(
+ description="Comma-separated list of Redis Clusters nodes (host:port)",
+ default=None,
+ )
+
+ REDIS_CLUSTERS_PASSWORD: Optional[str] = Field(
+ description="Password for Redis Clusters authentication (if required)",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/aliyun_oss_storage_config.py b/api/configs/middleware/storage/aliyun_oss_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..07eb527170b2ea4be4f83a9713c7f8a52791fd4d
--- /dev/null
+++ b/api/configs/middleware/storage/aliyun_oss_storage_config.py
@@ -0,0 +1,45 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class AliyunOSSStorageConfig(BaseSettings):
+ """
+ Configuration settings for Aliyun Object Storage Service (OSS)
+ """
+
+ ALIYUN_OSS_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the Aliyun OSS bucket to store and retrieve objects",
+ default=None,
+ )
+
+ ALIYUN_OSS_ACCESS_KEY: Optional[str] = Field(
+ description="Access key ID for authenticating with Aliyun OSS",
+ default=None,
+ )
+
+ ALIYUN_OSS_SECRET_KEY: Optional[str] = Field(
+ description="Secret access key for authenticating with Aliyun OSS",
+ default=None,
+ )
+
+ ALIYUN_OSS_ENDPOINT: Optional[str] = Field(
+ description="URL of the Aliyun OSS endpoint for your chosen region",
+ default=None,
+ )
+
+ ALIYUN_OSS_REGION: Optional[str] = Field(
+ description="Aliyun OSS region where your bucket is located (e.g., 'oss-cn-hangzhou')",
+ default=None,
+ )
+
+ ALIYUN_OSS_AUTH_VERSION: Optional[str] = Field(
+ description="Version of the authentication protocol to use with Aliyun OSS (e.g., 'v4')",
+ default=None,
+ )
+
+ ALIYUN_OSS_PATH: Optional[str] = Field(
+ description="Base path within the bucket to store objects (e.g., 'my-app-data/')",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/amazon_s3_storage_config.py b/api/configs/middleware/storage/amazon_s3_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..f2d94b12ffa979abb0dc2b759fee3184657cac12
--- /dev/null
+++ b/api/configs/middleware/storage/amazon_s3_storage_config.py
@@ -0,0 +1,45 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class S3StorageConfig(BaseSettings):
+ """
+ Configuration settings for S3-compatible object storage
+ """
+
+ S3_ENDPOINT: Optional[str] = Field(
+ description="URL of the S3-compatible storage endpoint (e.g., 'https://s3.amazonaws.com')",
+ default=None,
+ )
+
+ S3_REGION: Optional[str] = Field(
+ description="Region where the S3 bucket is located (e.g., 'us-east-1')",
+ default=None,
+ )
+
+ S3_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the S3 bucket to store and retrieve objects",
+ default=None,
+ )
+
+ S3_ACCESS_KEY: Optional[str] = Field(
+ description="Access key ID for authenticating with the S3 service",
+ default=None,
+ )
+
+ S3_SECRET_KEY: Optional[str] = Field(
+ description="Secret access key for authenticating with the S3 service",
+ default=None,
+ )
+
+ S3_ADDRESS_STYLE: str = Field(
+ description="S3 addressing style: 'auto', 'path', or 'virtual'",
+ default="auto",
+ )
+
+ S3_USE_AWS_MANAGED_IAM: bool = Field(
+ description="Use AWS managed IAM roles for authentication instead of access/secret keys",
+ default=False,
+ )
diff --git a/api/configs/middleware/storage/azure_blob_storage_config.py b/api/configs/middleware/storage/azure_blob_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..b7ab5247a9d4dd7bac86bf799fc0eba01aa90799
--- /dev/null
+++ b/api/configs/middleware/storage/azure_blob_storage_config.py
@@ -0,0 +1,30 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class AzureBlobStorageConfig(BaseSettings):
+ """
+ Configuration settings for Azure Blob Storage
+ """
+
+ AZURE_BLOB_ACCOUNT_NAME: Optional[str] = Field(
+ description="Name of the Azure Storage account (e.g., 'mystorageaccount')",
+ default=None,
+ )
+
+ AZURE_BLOB_ACCOUNT_KEY: Optional[str] = Field(
+ description="Access key for authenticating with the Azure Storage account",
+ default=None,
+ )
+
+ AZURE_BLOB_CONTAINER_NAME: Optional[str] = Field(
+ description="Name of the Azure Blob container to store and retrieve objects",
+ default=None,
+ )
+
+ AZURE_BLOB_ACCOUNT_URL: Optional[str] = Field(
+ description="URL of the Azure Blob storage endpoint (e.g., 'https://mystorageaccount.blob.core.windows.net')",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/baidu_obs_storage_config.py b/api/configs/middleware/storage/baidu_obs_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..e7913b0acc337c1a70cede88ec3563c0464da8be
--- /dev/null
+++ b/api/configs/middleware/storage/baidu_obs_storage_config.py
@@ -0,0 +1,30 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class BaiduOBSStorageConfig(BaseSettings):
+ """
+ Configuration settings for Baidu Object Storage Service (OBS)
+ """
+
+ BAIDU_OBS_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the Baidu OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
+ default=None,
+ )
+
+ BAIDU_OBS_ACCESS_KEY: Optional[str] = Field(
+ description="Access Key ID for authenticating with Baidu OBS",
+ default=None,
+ )
+
+ BAIDU_OBS_SECRET_KEY: Optional[str] = Field(
+ description="Secret Access Key for authenticating with Baidu OBS",
+ default=None,
+ )
+
+ BAIDU_OBS_ENDPOINT: Optional[str] = Field(
+ description="URL of the Baidu OSS endpoint for your chosen region (e.g., 'https://.bj.bcebos.com')",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/google_cloud_storage_config.py b/api/configs/middleware/storage/google_cloud_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5d763d7f5c615783bb543f19f7a7bca006bdca0
--- /dev/null
+++ b/api/configs/middleware/storage/google_cloud_storage_config.py
@@ -0,0 +1,20 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class GoogleCloudStorageConfig(BaseSettings):
+ """
+ Configuration settings for Google Cloud Storage
+ """
+
+ GOOGLE_STORAGE_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the Google Cloud Storage bucket to store and retrieve objects (e.g., 'my-gcs-bucket')",
+ default=None,
+ )
+
+ GOOGLE_STORAGE_SERVICE_ACCOUNT_JSON_BASE64: Optional[str] = Field(
+ description="Base64-encoded JSON key file for Google Cloud service account authentication",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/huawei_obs_storage_config.py b/api/configs/middleware/storage/huawei_obs_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..be983b5187d2716e4aae0b49212a863c48dc1286
--- /dev/null
+++ b/api/configs/middleware/storage/huawei_obs_storage_config.py
@@ -0,0 +1,30 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class HuaweiCloudOBSStorageConfig(BaseSettings):
+ """
+ Configuration settings for Huawei Cloud Object Storage Service (OBS)
+ """
+
+ HUAWEI_OBS_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the Huawei Cloud OBS bucket to store and retrieve objects (e.g., 'my-obs-bucket')",
+ default=None,
+ )
+
+ HUAWEI_OBS_ACCESS_KEY: Optional[str] = Field(
+ description="Access Key ID for authenticating with Huawei Cloud OBS",
+ default=None,
+ )
+
+ HUAWEI_OBS_SECRET_KEY: Optional[str] = Field(
+ description="Secret Access Key for authenticating with Huawei Cloud OBS",
+ default=None,
+ )
+
+ HUAWEI_OBS_SERVER: Optional[str] = Field(
+ description="Endpoint URL for Huawei Cloud OBS (e.g., 'https://obs.cn-north-4.myhuaweicloud.com')",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/oci_storage_config.py b/api/configs/middleware/storage/oci_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..edc245bcac59bbbca26b4a6fec3e7cab68cfd890
--- /dev/null
+++ b/api/configs/middleware/storage/oci_storage_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class OCIStorageConfig(BaseSettings):
+ """
+ Configuration settings for Oracle Cloud Infrastructure (OCI) Object Storage
+ """
+
+ OCI_ENDPOINT: Optional[str] = Field(
+ description="URL of the OCI Object Storage endpoint (e.g., 'https://objectstorage.us-phoenix-1.oraclecloud.com')",
+ default=None,
+ )
+
+ OCI_REGION: Optional[str] = Field(
+ description="OCI region where the bucket is located (e.g., 'us-phoenix-1')",
+ default=None,
+ )
+
+ OCI_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the OCI Object Storage bucket to store and retrieve objects (e.g., 'my-oci-bucket')",
+ default=None,
+ )
+
+ OCI_ACCESS_KEY: Optional[str] = Field(
+ description="Access key (also known as API key) for authenticating with OCI Object Storage",
+ default=None,
+ )
+
+ OCI_SECRET_KEY: Optional[str] = Field(
+ description="Secret key associated with the access key for authenticating with OCI Object Storage",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/opendal_storage_config.py b/api/configs/middleware/storage/opendal_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef38070e53bb7c23707db81da7aea26c0c7896ea
--- /dev/null
+++ b/api/configs/middleware/storage/opendal_storage_config.py
@@ -0,0 +1,9 @@
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class OpenDALStorageConfig(BaseSettings):
+ OPENDAL_SCHEME: str = Field(
+ default="fs",
+ description="OpenDAL scheme.",
+ )
diff --git a/api/configs/middleware/storage/supabase_storage_config.py b/api/configs/middleware/storage/supabase_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..dcf7c20cf9e05738379a130befa66da8578f0574
--- /dev/null
+++ b/api/configs/middleware/storage/supabase_storage_config.py
@@ -0,0 +1,25 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class SupabaseStorageConfig(BaseSettings):
+ """
+ Configuration settings for Supabase Object Storage Service
+ """
+
+ SUPABASE_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the Supabase bucket to store and retrieve objects (e.g., 'dify-bucket')",
+ default=None,
+ )
+
+ SUPABASE_API_KEY: Optional[str] = Field(
+ description="API KEY for authenticating with Supabase",
+ default=None,
+ )
+
+ SUPABASE_URL: Optional[str] = Field(
+ description="URL of the Supabase",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/tencent_cos_storage_config.py b/api/configs/middleware/storage/tencent_cos_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..255c4e8938e0fb220cccb546a49036fa15f2b806
--- /dev/null
+++ b/api/configs/middleware/storage/tencent_cos_storage_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class TencentCloudCOSStorageConfig(BaseSettings):
+ """
+ Configuration settings for Tencent Cloud Object Storage (COS)
+ """
+
+ TENCENT_COS_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the Tencent Cloud COS bucket to store and retrieve objects",
+ default=None,
+ )
+
+ TENCENT_COS_REGION: Optional[str] = Field(
+ description="Tencent Cloud region where the COS bucket is located (e.g., 'ap-guangzhou')",
+ default=None,
+ )
+
+ TENCENT_COS_SECRET_ID: Optional[str] = Field(
+ description="SecretId for authenticating with Tencent Cloud COS (part of API credentials)",
+ default=None,
+ )
+
+ TENCENT_COS_SECRET_KEY: Optional[str] = Field(
+ description="SecretKey for authenticating with Tencent Cloud COS (part of API credentials)",
+ default=None,
+ )
+
+ TENCENT_COS_SCHEME: Optional[str] = Field(
+ description="Protocol scheme for COS requests: 'https' (recommended) or 'http'",
+ default=None,
+ )
diff --git a/api/configs/middleware/storage/volcengine_tos_storage_config.py b/api/configs/middleware/storage/volcengine_tos_storage_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..06c3ae4d3e63f8396f6a1d1866a63e967db25870
--- /dev/null
+++ b/api/configs/middleware/storage/volcengine_tos_storage_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class VolcengineTOSStorageConfig(BaseSettings):
+ """
+ Configuration settings for Volcengine Tinder Object Storage (TOS)
+ """
+
+ VOLCENGINE_TOS_BUCKET_NAME: Optional[str] = Field(
+ description="Name of the Volcengine TOS bucket to store and retrieve objects (e.g., 'my-tos-bucket')",
+ default=None,
+ )
+
+ VOLCENGINE_TOS_ACCESS_KEY: Optional[str] = Field(
+ description="Access Key ID for authenticating with Volcengine TOS",
+ default=None,
+ )
+
+ VOLCENGINE_TOS_SECRET_KEY: Optional[str] = Field(
+ description="Secret Access Key for authenticating with Volcengine TOS",
+ default=None,
+ )
+
+ VOLCENGINE_TOS_ENDPOINT: Optional[str] = Field(
+ description="URL of the Volcengine TOS endpoint (e.g., 'https://tos-cn-beijing.volces.com')",
+ default=None,
+ )
+
+ VOLCENGINE_TOS_REGION: Optional[str] = Field(
+ description="Volcengine region where the TOS bucket is located (e.g., 'cn-beijing')",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/analyticdb_config.py b/api/configs/middleware/vdb/analyticdb_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..cb8dc7d724fff9f052dcb1e24f9e4b3b3778c986
--- /dev/null
+++ b/api/configs/middleware/vdb/analyticdb_config.py
@@ -0,0 +1,51 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class AnalyticdbConfig(BaseSettings):
+ """
+ Configuration for connecting to Alibaba Cloud AnalyticDB for PostgreSQL.
+ Refer to the following documentation for details on obtaining credentials:
+ https://www.alibabacloud.com/help/en/analyticdb-for-postgresql/getting-started/create-an-instance-instances-with-vector-engine-optimization-enabled
+ """
+
+ ANALYTICDB_KEY_ID: Optional[str] = Field(
+ default=None, description="The Access Key ID provided by Alibaba Cloud for API authentication."
+ )
+ ANALYTICDB_KEY_SECRET: Optional[str] = Field(
+ default=None, description="The Secret Access Key corresponding to the Access Key ID for secure API access."
+ )
+ ANALYTICDB_REGION_ID: Optional[str] = Field(
+ default=None,
+ description="The region where the AnalyticDB instance is deployed (e.g., 'cn-hangzhou', 'ap-southeast-1').",
+ )
+ ANALYTICDB_INSTANCE_ID: Optional[str] = Field(
+ default=None,
+ description="The unique identifier of the AnalyticDB instance you want to connect to.",
+ )
+ ANALYTICDB_ACCOUNT: Optional[str] = Field(
+ default=None,
+ description="The account name used to log in to the AnalyticDB instance"
+ " (usually the initial account created with the instance).",
+ )
+ ANALYTICDB_PASSWORD: Optional[str] = Field(
+ default=None, description="The password associated with the AnalyticDB account for database authentication."
+ )
+ ANALYTICDB_NAMESPACE: Optional[str] = Field(
+ default=None, description="The namespace within AnalyticDB for schema isolation (if using namespace feature)."
+ )
+ ANALYTICDB_NAMESPACE_PASSWORD: Optional[str] = Field(
+ default=None,
+ description="The password for accessing the specified namespace within the AnalyticDB instance"
+ " (if namespace feature is enabled).",
+ )
+ ANALYTICDB_HOST: Optional[str] = Field(
+ default=None, description="The host of the AnalyticDB instance you want to connect to."
+ )
+ ANALYTICDB_PORT: PositiveInt = Field(
+ default=5432, description="The port of the AnalyticDB instance you want to connect to."
+ )
+ ANALYTICDB_MIN_CONNECTION: PositiveInt = Field(default=1, description="Min connection of the AnalyticDB database.")
+ ANALYTICDB_MAX_CONNECTION: PositiveInt = Field(default=5, description="Max connection of the AnalyticDB database.")
diff --git a/api/configs/middleware/vdb/baidu_vector_config.py b/api/configs/middleware/vdb/baidu_vector_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..44742c2e2f434999640e227619b5eb146000e6b1
--- /dev/null
+++ b/api/configs/middleware/vdb/baidu_vector_config.py
@@ -0,0 +1,45 @@
+from typing import Optional
+
+from pydantic import Field, NonNegativeInt, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class BaiduVectorDBConfig(BaseSettings):
+ """
+ Configuration settings for Baidu Vector Database
+ """
+
+ BAIDU_VECTOR_DB_ENDPOINT: Optional[str] = Field(
+ description="URL of the Baidu Vector Database service (e.g., 'http://vdb.bj.baidubce.com')",
+ default=None,
+ )
+
+ BAIDU_VECTOR_DB_CONNECTION_TIMEOUT_MS: PositiveInt = Field(
+ description="Timeout in milliseconds for Baidu Vector Database operations (default is 30000 milliseconds)",
+ default=30000,
+ )
+
+ BAIDU_VECTOR_DB_ACCOUNT: Optional[str] = Field(
+ description="Account for authenticating with the Baidu Vector Database",
+ default=None,
+ )
+
+ BAIDU_VECTOR_DB_API_KEY: Optional[str] = Field(
+ description="API key for authenticating with the Baidu Vector Database service",
+ default=None,
+ )
+
+ BAIDU_VECTOR_DB_DATABASE: Optional[str] = Field(
+ description="Name of the specific Baidu Vector Database to connect to",
+ default=None,
+ )
+
+ BAIDU_VECTOR_DB_SHARD: PositiveInt = Field(
+ description="Number of shards for the Baidu Vector Database (default is 1)",
+ default=1,
+ )
+
+ BAIDU_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
+ description="Number of replicas for the Baidu Vector Database (default is 3)",
+ default=3,
+ )
diff --git a/api/configs/middleware/vdb/chroma_config.py b/api/configs/middleware/vdb/chroma_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..e83a9902dee903d6f2efd66366ba6b841094315d
--- /dev/null
+++ b/api/configs/middleware/vdb/chroma_config.py
@@ -0,0 +1,40 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class ChromaConfig(BaseSettings):
+ """
+ Configuration settings for Chroma vector database
+ """
+
+ CHROMA_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the Chroma server (e.g., 'localhost' or '192.168.1.100')",
+ default=None,
+ )
+
+ CHROMA_PORT: PositiveInt = Field(
+ description="Port number on which the Chroma server is listening (default is 8000)",
+ default=8000,
+ )
+
+ CHROMA_TENANT: Optional[str] = Field(
+ description="Tenant identifier for multi-tenancy support in Chroma",
+ default=None,
+ )
+
+ CHROMA_DATABASE: Optional[str] = Field(
+ description="Name of the Chroma database to connect to",
+ default=None,
+ )
+
+ CHROMA_AUTH_PROVIDER: Optional[str] = Field(
+ description="Authentication provider for Chroma (e.g., 'basic', 'token', or a custom provider)",
+ default=None,
+ )
+
+ CHROMA_AUTH_CREDENTIALS: Optional[str] = Field(
+ description="Authentication credentials for Chroma (format depends on the auth provider)",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/couchbase_config.py b/api/configs/middleware/vdb/couchbase_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..b81cbf895956accdf788c8896fd45596edb95a73
--- /dev/null
+++ b/api/configs/middleware/vdb/couchbase_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class CouchbaseConfig(BaseSettings):
+ """
+ Couchbase configs
+ """
+
+ COUCHBASE_CONNECTION_STRING: Optional[str] = Field(
+ description="COUCHBASE connection string",
+ default=None,
+ )
+
+ COUCHBASE_USER: Optional[str] = Field(
+ description="COUCHBASE user",
+ default=None,
+ )
+
+ COUCHBASE_PASSWORD: Optional[str] = Field(
+ description="COUCHBASE password",
+ default=None,
+ )
+
+ COUCHBASE_BUCKET_NAME: Optional[str] = Field(
+ description="COUCHBASE bucket name",
+ default=None,
+ )
+
+ COUCHBASE_SCOPE_NAME: Optional[str] = Field(
+ description="COUCHBASE scope name",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/elasticsearch_config.py b/api/configs/middleware/vdb/elasticsearch_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..df8182985dc193ef5e46f4f965962a52518a732f
--- /dev/null
+++ b/api/configs/middleware/vdb/elasticsearch_config.py
@@ -0,0 +1,30 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class ElasticsearchConfig(BaseSettings):
+ """
+ Configuration settings for Elasticsearch
+ """
+
+ ELASTICSEARCH_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the Elasticsearch server (e.g., 'localhost' or '192.168.1.100')",
+ default="127.0.0.1",
+ )
+
+ ELASTICSEARCH_PORT: PositiveInt = Field(
+ description="Port number on which the Elasticsearch server is listening (default is 9200)",
+ default=9200,
+ )
+
+ ELASTICSEARCH_USERNAME: Optional[str] = Field(
+ description="Username for authenticating with Elasticsearch (default is 'elastic')",
+ default="elastic",
+ )
+
+ ELASTICSEARCH_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with Elasticsearch (default is 'elastic')",
+ default="elastic",
+ )
diff --git a/api/configs/middleware/vdb/lindorm_config.py b/api/configs/middleware/vdb/lindorm_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..95e1d1cfca4b807bc5020fb5819177ec7858777f
--- /dev/null
+++ b/api/configs/middleware/vdb/lindorm_config.py
@@ -0,0 +1,34 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class LindormConfig(BaseSettings):
+ """
+ Lindorm configs
+ """
+
+ LINDORM_URL: Optional[str] = Field(
+ description="Lindorm url",
+ default=None,
+ )
+ LINDORM_USERNAME: Optional[str] = Field(
+ description="Lindorm user",
+ default=None,
+ )
+ LINDORM_PASSWORD: Optional[str] = Field(
+ description="Lindorm password",
+ default=None,
+ )
+ DEFAULT_INDEX_TYPE: Optional[str] = Field(
+ description="Lindorm Vector Index Type, hnsw or flat is available in dify",
+ default="hnsw",
+ )
+ DEFAULT_DISTANCE_TYPE: Optional[str] = Field(
+ description="Vector Distance Type, support l2, cosinesimil, innerproduct", default="l2"
+ )
+ USING_UGC_INDEX: Optional[bool] = Field(
+ description="Using UGC index will store the same type of Index in a single index but can retrieve separately.",
+ default=False,
+ )
diff --git a/api/configs/middleware/vdb/milvus_config.py b/api/configs/middleware/vdb/milvus_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..ebdf8857b962b769390cfc2a555835a417a847e2
--- /dev/null
+++ b/api/configs/middleware/vdb/milvus_config.py
@@ -0,0 +1,41 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class MilvusConfig(BaseSettings):
+ """
+ Configuration settings for Milvus vector database
+ """
+
+ MILVUS_URI: Optional[str] = Field(
+ description="URI for connecting to the Milvus server (e.g., 'http://localhost:19530' or 'https://milvus-instance.example.com:19530')",
+ default="http://127.0.0.1:19530",
+ )
+
+ MILVUS_TOKEN: Optional[str] = Field(
+ description="Authentication token for Milvus, if token-based authentication is enabled",
+ default=None,
+ )
+
+ MILVUS_USER: Optional[str] = Field(
+ description="Username for authenticating with Milvus, if username/password authentication is enabled",
+ default=None,
+ )
+
+ MILVUS_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with Milvus, if username/password authentication is enabled",
+ default=None,
+ )
+
+ MILVUS_DATABASE: str = Field(
+ description="Name of the Milvus database to connect to (default is 'default')",
+ default="default",
+ )
+
+ MILVUS_ENABLE_HYBRID_SEARCH: bool = Field(
+ description="Enable hybrid search features (requires Milvus >= 2.5.0). Set to false for compatibility with "
+ "older versions",
+ default=True,
+ )
diff --git a/api/configs/middleware/vdb/myscale_config.py b/api/configs/middleware/vdb/myscale_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..b5bf98b3aab25fc3706ae809076de757439f3783
--- /dev/null
+++ b/api/configs/middleware/vdb/myscale_config.py
@@ -0,0 +1,38 @@
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class MyScaleConfig(BaseSettings):
+ """
+ Configuration settings for MyScale vector database
+ """
+
+ MYSCALE_HOST: str = Field(
+ description="Hostname or IP address of the MyScale server (e.g., 'localhost' or 'myscale.example.com')",
+ default="localhost",
+ )
+
+ MYSCALE_PORT: PositiveInt = Field(
+ description="Port number on which the MyScale server is listening (default is 8123)",
+ default=8123,
+ )
+
+ MYSCALE_USER: str = Field(
+ description="Username for authenticating with MyScale (default is 'default')",
+ default="default",
+ )
+
+ MYSCALE_PASSWORD: str = Field(
+ description="Password for authenticating with MyScale (default is an empty string)",
+ default="",
+ )
+
+ MYSCALE_DATABASE: str = Field(
+ description="Name of the MyScale database to connect to (default is 'default')",
+ default="default",
+ )
+
+ MYSCALE_FTS_PARAMS: str = Field(
+ description="Additional parameters for MyScale Full Text Search index)",
+ default="",
+ )
diff --git a/api/configs/middleware/vdb/oceanbase_config.py b/api/configs/middleware/vdb/oceanbase_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..87427af960202daaf3a9588412e07a62037a604b
--- /dev/null
+++ b/api/configs/middleware/vdb/oceanbase_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class OceanBaseVectorConfig(BaseSettings):
+ """
+ Configuration settings for OceanBase Vector database
+ """
+
+ OCEANBASE_VECTOR_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the OceanBase Vector server (e.g. 'localhost')",
+ default=None,
+ )
+
+ OCEANBASE_VECTOR_PORT: Optional[PositiveInt] = Field(
+ description="Port number on which the OceanBase Vector server is listening (default is 2881)",
+ default=2881,
+ )
+
+ OCEANBASE_VECTOR_USER: Optional[str] = Field(
+ description="Username for authenticating with the OceanBase Vector database",
+ default=None,
+ )
+
+ OCEANBASE_VECTOR_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with the OceanBase Vector database",
+ default=None,
+ )
+
+ OCEANBASE_VECTOR_DATABASE: Optional[str] = Field(
+ description="Name of the OceanBase Vector database to connect to",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/opensearch_config.py b/api/configs/middleware/vdb/opensearch_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..81dde4c04d472ef4d7cef0a4bc3d2909595a68e1
--- /dev/null
+++ b/api/configs/middleware/vdb/opensearch_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class OpenSearchConfig(BaseSettings):
+ """
+ Configuration settings for OpenSearch
+ """
+
+ OPENSEARCH_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the OpenSearch server (e.g., 'localhost' or 'opensearch.example.com')",
+ default=None,
+ )
+
+ OPENSEARCH_PORT: PositiveInt = Field(
+ description="Port number on which the OpenSearch server is listening (default is 9200)",
+ default=9200,
+ )
+
+ OPENSEARCH_USER: Optional[str] = Field(
+ description="Username for authenticating with OpenSearch",
+ default=None,
+ )
+
+ OPENSEARCH_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with OpenSearch",
+ default=None,
+ )
+
+ OPENSEARCH_SECURE: bool = Field(
+ description="Whether to use SSL/TLS encrypted connection for OpenSearch (True for HTTPS, False for HTTP)",
+ default=False,
+ )
diff --git a/api/configs/middleware/vdb/oracle_config.py b/api/configs/middleware/vdb/oracle_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..5d2cf67ba37b34840b053d03d34a5c3b4af7f26f
--- /dev/null
+++ b/api/configs/middleware/vdb/oracle_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class OracleConfig(BaseSettings):
+ """
+ Configuration settings for Oracle database
+ """
+
+ ORACLE_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the Oracle database server (e.g., 'localhost' or 'oracle.example.com')",
+ default=None,
+ )
+
+ ORACLE_PORT: PositiveInt = Field(
+ description="Port number on which the Oracle database server is listening (default is 1521)",
+ default=1521,
+ )
+
+ ORACLE_USER: Optional[str] = Field(
+ description="Username for authenticating with the Oracle database",
+ default=None,
+ )
+
+ ORACLE_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with the Oracle database",
+ default=None,
+ )
+
+ ORACLE_DATABASE: Optional[str] = Field(
+ description="Name of the Oracle database or service to connect to (e.g., 'ORCL' or 'pdborcl')",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/pgvector_config.py b/api/configs/middleware/vdb/pgvector_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..4561a9a7ca9626eb11ccb743d456362ee523a894
--- /dev/null
+++ b/api/configs/middleware/vdb/pgvector_config.py
@@ -0,0 +1,45 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class PGVectorConfig(BaseSettings):
+ """
+ Configuration settings for PGVector (PostgreSQL with vector extension)
+ """
+
+ PGVECTOR_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the PostgreSQL server with PGVector extension (e.g., 'localhost')",
+ default=None,
+ )
+
+ PGVECTOR_PORT: PositiveInt = Field(
+ description="Port number on which the PostgreSQL server is listening (default is 5433)",
+ default=5433,
+ )
+
+ PGVECTOR_USER: Optional[str] = Field(
+ description="Username for authenticating with the PostgreSQL database",
+ default=None,
+ )
+
+ PGVECTOR_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with the PostgreSQL database",
+ default=None,
+ )
+
+ PGVECTOR_DATABASE: Optional[str] = Field(
+ description="Name of the PostgreSQL database to connect to",
+ default=None,
+ )
+
+ PGVECTOR_MIN_CONNECTION: PositiveInt = Field(
+ description="Min connection of the PostgreSQL database",
+ default=1,
+ )
+
+ PGVECTOR_MAX_CONNECTION: PositiveInt = Field(
+ description="Max connection of the PostgreSQL database",
+ default=5,
+ )
diff --git a/api/configs/middleware/vdb/pgvectors_config.py b/api/configs/middleware/vdb/pgvectors_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..fa3bca5bb75bc541785b458c13b95d81a7513285
--- /dev/null
+++ b/api/configs/middleware/vdb/pgvectors_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class PGVectoRSConfig(BaseSettings):
+ """
+ Configuration settings for PGVecto.RS (Rust-based vector extension for PostgreSQL)
+ """
+
+ PGVECTO_RS_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the PostgreSQL server with PGVecto.RS extension (e.g., 'localhost')",
+ default=None,
+ )
+
+ PGVECTO_RS_PORT: PositiveInt = Field(
+ description="Port number on which the PostgreSQL server with PGVecto.RS is listening (default is 5431)",
+ default=5431,
+ )
+
+ PGVECTO_RS_USER: Optional[str] = Field(
+ description="Username for authenticating with the PostgreSQL database using PGVecto.RS",
+ default=None,
+ )
+
+ PGVECTO_RS_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with the PostgreSQL database using PGVecto.RS",
+ default=None,
+ )
+
+ PGVECTO_RS_DATABASE: Optional[str] = Field(
+ description="Name of the PostgreSQL database with PGVecto.RS extension to connect to",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/qdrant_config.py b/api/configs/middleware/vdb/qdrant_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..b70f6246523c57dced1d92bcf18eff3ceb8528f9
--- /dev/null
+++ b/api/configs/middleware/vdb/qdrant_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field, NonNegativeInt, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class QdrantConfig(BaseSettings):
+ """
+ Configuration settings for Qdrant vector database
+ """
+
+ QDRANT_URL: Optional[str] = Field(
+ description="URL of the Qdrant server (e.g., 'http://localhost:6333' or 'https://qdrant.example.com')",
+ default=None,
+ )
+
+ QDRANT_API_KEY: Optional[str] = Field(
+ description="API key for authenticating with the Qdrant server",
+ default=None,
+ )
+
+ QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field(
+ description="Timeout in seconds for Qdrant client operations (default is 20 seconds)",
+ default=20,
+ )
+
+ QDRANT_GRPC_ENABLED: bool = Field(
+ description="Whether to enable gRPC support for Qdrant connection (True for gRPC, False for HTTP)",
+ default=False,
+ )
+
+ QDRANT_GRPC_PORT: PositiveInt = Field(
+ description="Port number for gRPC connection to Qdrant server (default is 6334)",
+ default=6334,
+ )
diff --git a/api/configs/middleware/vdb/relyt_config.py b/api/configs/middleware/vdb/relyt_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ffbea7b19bb8f8f3e06a43fe754059bf2e6a2f4
--- /dev/null
+++ b/api/configs/middleware/vdb/relyt_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class RelytConfig(BaseSettings):
+ """
+ Configuration settings for Relyt database
+ """
+
+ RELYT_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the Relyt server (e.g., 'localhost' or 'relyt.example.com')",
+ default=None,
+ )
+
+ RELYT_PORT: PositiveInt = Field(
+ description="Port number on which the Relyt server is listening (default is 9200)",
+ default=9200,
+ )
+
+ RELYT_USER: Optional[str] = Field(
+ description="Username for authenticating with the Relyt database",
+ default=None,
+ )
+
+ RELYT_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with the Relyt database",
+ default=None,
+ )
+
+ RELYT_DATABASE: Optional[str] = Field(
+ description="Name of the Relyt database to connect to (default is 'default')",
+ default="default",
+ )
diff --git a/api/configs/middleware/vdb/tencent_vector_config.py b/api/configs/middleware/vdb/tencent_vector_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..9cf4d07f6fe66042c50b690e99ae264ae328c83d
--- /dev/null
+++ b/api/configs/middleware/vdb/tencent_vector_config.py
@@ -0,0 +1,50 @@
+from typing import Optional
+
+from pydantic import Field, NonNegativeInt, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class TencentVectorDBConfig(BaseSettings):
+ """
+ Configuration settings for Tencent Vector Database
+ """
+
+ TENCENT_VECTOR_DB_URL: Optional[str] = Field(
+ description="URL of the Tencent Vector Database service (e.g., 'https://vectordb.tencentcloudapi.com')",
+ default=None,
+ )
+
+ TENCENT_VECTOR_DB_API_KEY: Optional[str] = Field(
+ description="API key for authenticating with the Tencent Vector Database service",
+ default=None,
+ )
+
+ TENCENT_VECTOR_DB_TIMEOUT: PositiveInt = Field(
+ description="Timeout in seconds for Tencent Vector Database operations (default is 30 seconds)",
+ default=30,
+ )
+
+ TENCENT_VECTOR_DB_USERNAME: Optional[str] = Field(
+ description="Username for authenticating with the Tencent Vector Database (if required)",
+ default=None,
+ )
+
+ TENCENT_VECTOR_DB_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with the Tencent Vector Database (if required)",
+ default=None,
+ )
+
+ TENCENT_VECTOR_DB_SHARD: PositiveInt = Field(
+ description="Number of shards for the Tencent Vector Database (default is 1)",
+ default=1,
+ )
+
+ TENCENT_VECTOR_DB_REPLICAS: NonNegativeInt = Field(
+ description="Number of replicas for the Tencent Vector Database (default is 2)",
+ default=2,
+ )
+
+ TENCENT_VECTOR_DB_DATABASE: Optional[str] = Field(
+ description="Name of the specific Tencent Vector Database to connect to",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/tidb_on_qdrant_config.py b/api/configs/middleware/vdb/tidb_on_qdrant_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..d2625af2644785d10308285fa7ac87e314655035
--- /dev/null
+++ b/api/configs/middleware/vdb/tidb_on_qdrant_config.py
@@ -0,0 +1,70 @@
+from typing import Optional
+
+from pydantic import Field, NonNegativeInt, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class TidbOnQdrantConfig(BaseSettings):
+ """
+ Tidb on Qdrant configs
+ """
+
+ TIDB_ON_QDRANT_URL: Optional[str] = Field(
+ description="Tidb on Qdrant url",
+ default=None,
+ )
+
+ TIDB_ON_QDRANT_API_KEY: Optional[str] = Field(
+ description="Tidb on Qdrant api key",
+ default=None,
+ )
+
+ TIDB_ON_QDRANT_CLIENT_TIMEOUT: NonNegativeInt = Field(
+ description="Tidb on Qdrant client timeout in seconds",
+ default=20,
+ )
+
+ TIDB_ON_QDRANT_GRPC_ENABLED: bool = Field(
+ description="whether enable grpc support for Tidb on Qdrant connection",
+ default=False,
+ )
+
+ TIDB_ON_QDRANT_GRPC_PORT: PositiveInt = Field(
+ description="Tidb on Qdrant grpc port",
+ default=6334,
+ )
+
+ TIDB_PUBLIC_KEY: Optional[str] = Field(
+ description="Tidb account public key",
+ default=None,
+ )
+
+ TIDB_PRIVATE_KEY: Optional[str] = Field(
+ description="Tidb account private key",
+ default=None,
+ )
+
+ TIDB_API_URL: Optional[str] = Field(
+ description="Tidb API url",
+ default=None,
+ )
+
+ TIDB_IAM_API_URL: Optional[str] = Field(
+ description="Tidb IAM API url",
+ default=None,
+ )
+
+ TIDB_REGION: Optional[str] = Field(
+ description="Tidb serverless region",
+ default="regions/aws-us-east-1",
+ )
+
+ TIDB_PROJECT_ID: Optional[str] = Field(
+ description="Tidb project id",
+ default=None,
+ )
+
+ TIDB_SPEND_LIMIT: Optional[int] = Field(
+ description="Tidb spend limit",
+ default=100,
+ )
diff --git a/api/configs/middleware/vdb/tidb_vector_config.py b/api/configs/middleware/vdb/tidb_vector_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..bc68be69d86ad7cd442fd8dab7fa28ce871c569e
--- /dev/null
+++ b/api/configs/middleware/vdb/tidb_vector_config.py
@@ -0,0 +1,35 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class TiDBVectorConfig(BaseSettings):
+ """
+ Configuration settings for TiDB Vector database
+ """
+
+ TIDB_VECTOR_HOST: Optional[str] = Field(
+ description="Hostname or IP address of the TiDB Vector server (e.g., 'localhost' or 'tidb.example.com')",
+ default=None,
+ )
+
+ TIDB_VECTOR_PORT: Optional[PositiveInt] = Field(
+ description="Port number on which the TiDB Vector server is listening (default is 4000)",
+ default=4000,
+ )
+
+ TIDB_VECTOR_USER: Optional[str] = Field(
+ description="Username for authenticating with the TiDB Vector database",
+ default=None,
+ )
+
+ TIDB_VECTOR_PASSWORD: Optional[str] = Field(
+ description="Password for authenticating with the TiDB Vector database",
+ default=None,
+ )
+
+ TIDB_VECTOR_DATABASE: Optional[str] = Field(
+ description="Name of the TiDB Vector database to connect to",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/upstash_config.py b/api/configs/middleware/vdb/upstash_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..412c56374ad41dd9d153bf63152bb1e1257d6632
--- /dev/null
+++ b/api/configs/middleware/vdb/upstash_config.py
@@ -0,0 +1,20 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class UpstashConfig(BaseSettings):
+ """
+ Configuration settings for Upstash vector database
+ """
+
+ UPSTASH_VECTOR_URL: Optional[str] = Field(
+ description="URL of the upstash server (e.g., 'https://vector.upstash.io')",
+ default=None,
+ )
+
+ UPSTASH_VECTOR_TOKEN: Optional[str] = Field(
+ description="Token for authenticating with the upstash server",
+ default=None,
+ )
diff --git a/api/configs/middleware/vdb/vikingdb_config.py b/api/configs/middleware/vdb/vikingdb_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..aba49ff6702ed80434cd67649949daa754465554
--- /dev/null
+++ b/api/configs/middleware/vdb/vikingdb_config.py
@@ -0,0 +1,50 @@
+from typing import Optional
+
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class VikingDBConfig(BaseSettings):
+ """
+ Configuration for connecting to Volcengine VikingDB.
+ Refer to the following documentation for details on obtaining credentials:
+ https://www.volcengine.com/docs/6291/65568
+ """
+
+ VIKINGDB_ACCESS_KEY: Optional[str] = Field(
+ description="The Access Key provided by Volcengine VikingDB for API authentication."
+ "Refer to the following documentation for details on obtaining credentials:"
+ "https://www.volcengine.com/docs/6291/65568",
+ default=None,
+ )
+
+ VIKINGDB_SECRET_KEY: Optional[str] = Field(
+ description="The Secret Key provided by Volcengine VikingDB for API authentication.",
+ default=None,
+ )
+
+ VIKINGDB_REGION: str = Field(
+ description="The region of the Volcengine VikingDB service.(e.g., 'cn-shanghai', 'cn-beijing').",
+ default="cn-shanghai",
+ )
+
+ VIKINGDB_HOST: str = Field(
+ description="The host of the Volcengine VikingDB service.(e.g., 'api-vikingdb.volces.com', \
+ 'api-vikingdb.mlp.cn-shanghai.volces.com')",
+ default="api-vikingdb.mlp.cn-shanghai.volces.com",
+ )
+
+ VIKINGDB_SCHEME: str = Field(
+ description="The scheme of the Volcengine VikingDB service.(e.g., 'http', 'https').",
+ default="http",
+ )
+
+ VIKINGDB_CONNECTION_TIMEOUT: int = Field(
+ description="The connection timeout of the Volcengine VikingDB service.",
+ default=30,
+ )
+
+ VIKINGDB_SOCKET_TIMEOUT: int = Field(
+ description="The socket timeout of the Volcengine VikingDB service.",
+ default=30,
+ )
diff --git a/api/configs/middleware/vdb/weaviate_config.py b/api/configs/middleware/vdb/weaviate_config.py
new file mode 100644
index 0000000000000000000000000000000000000000..25000e8bde290762a4d52f04d1accbdf02097f2c
--- /dev/null
+++ b/api/configs/middleware/vdb/weaviate_config.py
@@ -0,0 +1,30 @@
+from typing import Optional
+
+from pydantic import Field, PositiveInt
+from pydantic_settings import BaseSettings
+
+
+class WeaviateConfig(BaseSettings):
+ """
+ Configuration settings for Weaviate vector database
+ """
+
+ WEAVIATE_ENDPOINT: Optional[str] = Field(
+ description="URL of the Weaviate server (e.g., 'http://localhost:8080' or 'https://weaviate.example.com')",
+ default=None,
+ )
+
+ WEAVIATE_API_KEY: Optional[str] = Field(
+ description="API key for authenticating with the Weaviate server",
+ default=None,
+ )
+
+ WEAVIATE_GRPC_ENABLED: bool = Field(
+ description="Whether to enable gRPC for Weaviate connection (True for gRPC, False for HTTP)",
+ default=True,
+ )
+
+ WEAVIATE_BATCH_SIZE: PositiveInt = Field(
+ description="Number of objects to be processed in a single batch operation (default is 100)",
+ default=100,
+ )
diff --git a/api/configs/packaging/__init__.py b/api/configs/packaging/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..fd3d6afb3a6feb14af0f61614f9e9598ddf3d739
--- /dev/null
+++ b/api/configs/packaging/__init__.py
@@ -0,0 +1,18 @@
+from pydantic import Field
+from pydantic_settings import BaseSettings
+
+
+class PackagingInfo(BaseSettings):
+ """
+ Packaging build information
+ """
+
+ CURRENT_VERSION: str = Field(
+ description="Dify version",
+ default="0.15.3",
+ )
+
+ COMMIT_SHA: str = Field(
+ description="SHA-1 checksum of the git commit used to build the app",
+ default="",
+ )
diff --git a/api/configs/remote_settings_sources/__init__.py b/api/configs/remote_settings_sources/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f3878d13b65c8684afff5b15134ada35717110a
--- /dev/null
+++ b/api/configs/remote_settings_sources/__init__.py
@@ -0,0 +1,17 @@
+from typing import Optional
+
+from pydantic import Field
+
+from .apollo import ApolloSettingsSourceInfo
+from .base import RemoteSettingsSource
+from .enums import RemoteSettingsSourceName
+
+
+class RemoteSettingsSourceConfig(ApolloSettingsSourceInfo):
+ REMOTE_SETTINGS_SOURCE_NAME: RemoteSettingsSourceName | str = Field(
+ description="name of remote config source",
+ default="",
+ )
+
+
+__all__ = ["RemoteSettingsSource", "RemoteSettingsSourceConfig", "RemoteSettingsSourceName"]
diff --git a/api/configs/remote_settings_sources/apollo/__init__.py b/api/configs/remote_settings_sources/apollo/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f02f7dc9ff625802931507163b34445cc8f5260d
--- /dev/null
+++ b/api/configs/remote_settings_sources/apollo/__init__.py
@@ -0,0 +1,55 @@
+from collections.abc import Mapping
+from typing import Any, Optional
+
+from pydantic import Field
+from pydantic.fields import FieldInfo
+from pydantic_settings import BaseSettings
+
+from configs.remote_settings_sources.base import RemoteSettingsSource
+
+from .client import ApolloClient
+
+
+class ApolloSettingsSourceInfo(BaseSettings):
+ """
+ Packaging build information
+ """
+
+ APOLLO_APP_ID: Optional[str] = Field(
+ description="apollo app_id",
+ default=None,
+ )
+
+ APOLLO_CLUSTER: Optional[str] = Field(
+ description="apollo cluster",
+ default=None,
+ )
+
+ APOLLO_CONFIG_URL: Optional[str] = Field(
+ description="apollo config url",
+ default=None,
+ )
+
+ APOLLO_NAMESPACE: Optional[str] = Field(
+ description="apollo namespace",
+ default=None,
+ )
+
+
+class ApolloSettingsSource(RemoteSettingsSource):
+ def __init__(self, configs: Mapping[str, Any]):
+ self.client = ApolloClient(
+ app_id=configs["APOLLO_APP_ID"],
+ cluster=configs["APOLLO_CLUSTER"],
+ config_url=configs["APOLLO_CONFIG_URL"],
+ start_hot_update=False,
+ _notification_map={configs["APOLLO_NAMESPACE"]: -1},
+ )
+ self.namespace = configs["APOLLO_NAMESPACE"]
+ self.remote_configs = self.client.get_all_dicts(self.namespace)
+
+ def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
+ if not isinstance(self.remote_configs, dict):
+ raise ValueError(f"remote configs is not dict, but {type(self.remote_configs)}")
+ field_value = self.remote_configs.get(field_name)
+ return field_value, field_name, False
diff --git a/api/configs/remote_settings_sources/apollo/client.py b/api/configs/remote_settings_sources/apollo/client.py
new file mode 100644
index 0000000000000000000000000000000000000000..03c64ea00f018544c225fe743c8066b25b52b113
--- /dev/null
+++ b/api/configs/remote_settings_sources/apollo/client.py
@@ -0,0 +1,304 @@
+import hashlib
+import json
+import logging
+import os
+import threading
+import time
+from collections.abc import Mapping
+from pathlib import Path
+
+from .python_3x import http_request, makedirs_wrapper
+from .utils import (
+ CONFIGURATIONS,
+ NAMESPACE_NAME,
+ NOTIFICATION_ID,
+ get_value_from_dict,
+ init_ip,
+ no_key_cache_key,
+ signature,
+ url_encode_wrapper,
+)
+
+logger = logging.getLogger(__name__)
+
+
+class ApolloClient:
+ def __init__(
+ self,
+ config_url,
+ app_id,
+ cluster="default",
+ secret="",
+ start_hot_update=True,
+ change_listener=None,
+ _notification_map=None,
+ ):
+ # Core routing parameters
+ self.config_url = config_url
+ self.cluster = cluster
+ self.app_id = app_id
+
+ # Non-core parameters
+ self.ip = init_ip()
+ self.secret = secret
+
+ # Check the parameter variables
+
+ # Private control variables
+ self._cycle_time = 5
+ self._stopping = False
+ self._cache = {}
+ self._no_key = {}
+ self._hash = {}
+ self._pull_timeout = 75
+ self._cache_file_path = os.path.expanduser("~") + "/.dify/config/remote-settings/apollo/cache/"
+ self._long_poll_thread = None
+ self._change_listener = change_listener # "add" "delete" "update"
+ if _notification_map is None:
+ _notification_map = {"application": -1}
+ self._notification_map = _notification_map
+ self.last_release_key = None
+ # Private startup method
+ self._path_checker()
+ if start_hot_update:
+ self._start_hot_update()
+
+ # start the heartbeat thread
+ heartbeat = threading.Thread(target=self._heart_beat)
+ heartbeat.daemon = True
+ heartbeat.start()
+
+ def get_json_from_net(self, namespace="application"):
+ url = "{}/configs/{}/{}/{}?releaseKey={}&ip={}".format(
+ self.config_url, self.app_id, self.cluster, namespace, "", self.ip
+ )
+ try:
+ code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
+ if code == 200:
+ if not body:
+ logger.error(f"get_json_from_net load configs failed, body is {body}")
+ return None
+ data = json.loads(body)
+ data = data["configurations"]
+ return_data = {CONFIGURATIONS: data}
+ return return_data
+ else:
+ return None
+ except Exception:
+ logger.exception("an error occurred in get_json_from_net")
+ return None
+
+ def get_value(self, key, default_val=None, namespace="application"):
+ try:
+ # read memory configuration
+ namespace_cache = self._cache.get(namespace)
+ val = get_value_from_dict(namespace_cache, key)
+ if val is not None:
+ return val
+
+ no_key = no_key_cache_key(namespace, key)
+ if no_key in self._no_key:
+ return default_val
+
+ # read the network configuration
+ namespace_data = self.get_json_from_net(namespace)
+ val = get_value_from_dict(namespace_data, key)
+ if val is not None:
+ self._update_cache_and_file(namespace_data, namespace)
+ return val
+
+ # read the file configuration
+ namespace_cache = self._get_local_cache(namespace)
+ val = get_value_from_dict(namespace_cache, key)
+ if val is not None:
+ self._update_cache_and_file(namespace_cache, namespace)
+ return val
+
+ # If all of them are not obtained, the default value is returned
+ # and the local cache is set to None
+ self._set_local_cache_none(namespace, key)
+ return default_val
+ except Exception:
+ logger.exception("get_value has error, [key is %s], [namespace is %s]", key, namespace)
+ return default_val
+
+ # Set the key of a namespace to none, and do not set default val
+ # to ensure the real-time correctness of the function call.
+ # If the user does not have the same default val twice
+ # and the default val is used here, there may be a problem.
+ def _set_local_cache_none(self, namespace, key):
+ no_key = no_key_cache_key(namespace, key)
+ self._no_key[no_key] = key
+
+ def _start_hot_update(self):
+ self._long_poll_thread = threading.Thread(target=self._listener)
+ # When the asynchronous thread is started, the daemon thread will automatically exit
+ # when the main thread is launched.
+ self._long_poll_thread.daemon = True
+ self._long_poll_thread.start()
+
+ def stop(self):
+ self._stopping = True
+ logger.info("Stopping listener...")
+
+ # Call the set callback function, and if it is abnormal, try it out
+ def _call_listener(self, namespace, old_kv, new_kv):
+ if self._change_listener is None:
+ return
+ if old_kv is None:
+ old_kv = {}
+ if new_kv is None:
+ new_kv = {}
+ try:
+ for key in old_kv:
+ new_value = new_kv.get(key)
+ old_value = old_kv.get(key)
+ if new_value is None:
+ # If newValue is empty, it means key, and the value is deleted.
+ self._change_listener("delete", namespace, key, old_value)
+ continue
+ if new_value != old_value:
+ self._change_listener("update", namespace, key, new_value)
+ continue
+ for key in new_kv:
+ new_value = new_kv.get(key)
+ old_value = old_kv.get(key)
+ if old_value is None:
+ self._change_listener("add", namespace, key, new_value)
+ except BaseException as e:
+ logger.warning(str(e))
+
+ def _path_checker(self):
+ if not os.path.isdir(self._cache_file_path):
+ makedirs_wrapper(self._cache_file_path)
+
+ # update the local cache and file cache
+ def _update_cache_and_file(self, namespace_data, namespace="application"):
+ # update the local cache
+ self._cache[namespace] = namespace_data
+ # update the file cache
+ new_string = json.dumps(namespace_data)
+ new_hash = hashlib.md5(new_string.encode("utf-8")).hexdigest()
+ if self._hash.get(namespace) == new_hash:
+ pass
+ else:
+ file_path = Path(self._cache_file_path) / f"{self.app_id}_configuration_{namespace}.txt"
+ file_path.write_text(new_string)
+ self._hash[namespace] = new_hash
+
+ # get the configuration from the local file
+ def _get_local_cache(self, namespace="application"):
+ cache_file_path = os.path.join(self._cache_file_path, f"{self.app_id}_configuration_{namespace}.txt")
+ if os.path.isfile(cache_file_path):
+ with open(cache_file_path) as f:
+ result = json.loads(f.readline())
+ return result
+ return {}
+
+ def _long_poll(self):
+ notifications = []
+ for key in self._cache:
+ namespace_data = self._cache[key]
+ notification_id = -1
+ if NOTIFICATION_ID in namespace_data:
+ notification_id = self._cache[key][NOTIFICATION_ID]
+ notifications.append({NAMESPACE_NAME: key, NOTIFICATION_ID: notification_id})
+ try:
+ # if the length is 0 it is returned directly
+ if len(notifications) == 0:
+ return
+ url = "{}/notifications/v2".format(self.config_url)
+ params = {
+ "appId": self.app_id,
+ "cluster": self.cluster,
+ "notifications": json.dumps(notifications, ensure_ascii=False),
+ }
+ param_str = url_encode_wrapper(params)
+ url = url + "?" + param_str
+ code, body = http_request(url, self._pull_timeout, headers=self._sign_headers(url))
+ http_code = code
+ if http_code == 304:
+ logger.debug("No change, loop...")
+ return
+ if http_code == 200:
+ if not body:
+ logger.error(f"_long_poll load configs failed,body is {body}")
+ return
+ data = json.loads(body)
+ for entry in data:
+ namespace = entry[NAMESPACE_NAME]
+ n_id = entry[NOTIFICATION_ID]
+ logger.info("%s has changes: notificationId=%d", namespace, n_id)
+ self._get_net_and_set_local(namespace, n_id, call_change=True)
+ return
+ else:
+ logger.warning("Sleep...")
+ except Exception as e:
+ logger.warning(str(e))
+
+ def _get_net_and_set_local(self, namespace, n_id, call_change=False):
+ namespace_data = self.get_json_from_net(namespace)
+ if not namespace_data:
+ return
+ namespace_data[NOTIFICATION_ID] = n_id
+ old_namespace = self._cache.get(namespace)
+ self._update_cache_and_file(namespace_data, namespace)
+ if self._change_listener is not None and call_change and old_namespace:
+ old_kv = old_namespace.get(CONFIGURATIONS)
+ new_kv = namespace_data.get(CONFIGURATIONS)
+ self._call_listener(namespace, old_kv, new_kv)
+
+ def _listener(self):
+ logger.info("start long_poll")
+ while not self._stopping:
+ self._long_poll()
+ time.sleep(self._cycle_time)
+ logger.info("stopped, long_poll")
+
+ # add the need for endorsement to the header
+ def _sign_headers(self, url: str) -> Mapping[str, str]:
+ headers: dict[str, str] = {}
+ if self.secret == "":
+ return headers
+ uri = url[len(self.config_url) : len(url)]
+ time_unix_now = str(int(round(time.time() * 1000)))
+ headers["Authorization"] = "Apollo " + self.app_id + ":" + signature(time_unix_now, uri, self.secret)
+ headers["Timestamp"] = time_unix_now
+ return headers
+
+ def _heart_beat(self):
+ while not self._stopping:
+ for namespace in self._notification_map:
+ self._do_heart_beat(namespace)
+ time.sleep(60 * 10) # 10分钟
+
+ def _do_heart_beat(self, namespace):
+ url = "{}/configs/{}/{}/{}?ip={}".format(self.config_url, self.app_id, self.cluster, namespace, self.ip)
+ try:
+ code, body = http_request(url, timeout=3, headers=self._sign_headers(url))
+ if code == 200:
+ if not body:
+ logger.error(f"_do_heart_beat load configs failed,body is {body}")
+ return None
+ data = json.loads(body)
+ if self.last_release_key == data["releaseKey"]:
+ return None
+ self.last_release_key = data["releaseKey"]
+ data = data["configurations"]
+ self._update_cache_and_file(data, namespace)
+ else:
+ return None
+ except Exception:
+ logger.exception("an error occurred in _do_heart_beat")
+ return None
+
+ def get_all_dicts(self, namespace):
+ namespace_data = self._cache.get(namespace)
+ if namespace_data is None:
+ net_namespace_data = self.get_json_from_net(namespace)
+ if not net_namespace_data:
+ return namespace_data
+ namespace_data = net_namespace_data.get(CONFIGURATIONS)
+ if namespace_data:
+ self._update_cache_and_file(namespace_data, namespace)
+ return namespace_data
diff --git a/api/configs/remote_settings_sources/apollo/python_3x.py b/api/configs/remote_settings_sources/apollo/python_3x.py
new file mode 100644
index 0000000000000000000000000000000000000000..6a5f381991220647ce1527db0cce8e92c9e83ce8
--- /dev/null
+++ b/api/configs/remote_settings_sources/apollo/python_3x.py
@@ -0,0 +1,41 @@
+import logging
+import os
+import ssl
+import urllib.request
+from urllib import parse
+from urllib.error import HTTPError
+
+# Create an SSL context that allows for a lower level of security
+ssl_context = ssl.create_default_context()
+ssl_context.set_ciphers("HIGH:!DH:!aNULL")
+ssl_context.check_hostname = False
+ssl_context.verify_mode = ssl.CERT_NONE
+
+# Create an opener object and pass in a custom SSL context
+opener = urllib.request.build_opener(urllib.request.HTTPSHandler(context=ssl_context))
+
+urllib.request.install_opener(opener)
+
+logger = logging.getLogger(__name__)
+
+
+def http_request(url, timeout, headers={}):
+ try:
+ request = urllib.request.Request(url, headers=headers)
+ res = urllib.request.urlopen(request, timeout=timeout)
+ body = res.read().decode("utf-8")
+ return res.code, body
+ except HTTPError as e:
+ if e.code == 304:
+ logger.warning("http_request error,code is 304, maybe you should check secret")
+ return 304, None
+ logger.warning("http_request error,code is %d, msg is %s", e.code, e.msg)
+ raise e
+
+
+def url_encode(params):
+ return parse.urlencode(params)
+
+
+def makedirs_wrapper(path):
+ os.makedirs(path, exist_ok=True)
diff --git a/api/configs/remote_settings_sources/apollo/utils.py b/api/configs/remote_settings_sources/apollo/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..6136112e03d18e2dba2c0334e1cfd048b06a699f
--- /dev/null
+++ b/api/configs/remote_settings_sources/apollo/utils.py
@@ -0,0 +1,51 @@
+import hashlib
+import socket
+
+from .python_3x import url_encode
+
+# define constants
+CONFIGURATIONS = "configurations"
+NOTIFICATION_ID = "notificationId"
+NAMESPACE_NAME = "namespaceName"
+
+
+# add timestamps uris and keys
+def signature(timestamp, uri, secret):
+ import base64
+ import hmac
+
+ string_to_sign = "" + timestamp + "\n" + uri
+ hmac_code = hmac.new(secret.encode(), string_to_sign.encode(), hashlib.sha1).digest()
+ return base64.b64encode(hmac_code).decode()
+
+
+def url_encode_wrapper(params):
+ return url_encode(params)
+
+
+def no_key_cache_key(namespace, key):
+ return "{}{}{}".format(namespace, len(namespace), key)
+
+
+# Returns whether the obtained value is obtained, and None if it does not
+def get_value_from_dict(namespace_cache, key):
+ if namespace_cache:
+ kv_data = namespace_cache.get(CONFIGURATIONS)
+ if kv_data is None:
+ return None
+ if key in kv_data:
+ return kv_data[key]
+ return None
+
+
+def init_ip():
+ ip = ""
+ s = None
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(("8.8.8.8", 53))
+ ip = s.getsockname()[0]
+ finally:
+ if s:
+ s.close()
+ return ip
diff --git a/api/configs/remote_settings_sources/base.py b/api/configs/remote_settings_sources/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..a96ffdfb4bc7df40ddf5ee0272a52e051110d1da
--- /dev/null
+++ b/api/configs/remote_settings_sources/base.py
@@ -0,0 +1,15 @@
+from collections.abc import Mapping
+from typing import Any
+
+from pydantic.fields import FieldInfo
+
+
+class RemoteSettingsSource:
+ def __init__(self, configs: Mapping[str, Any]):
+ pass
+
+ def get_field_value(self, field: FieldInfo, field_name: str) -> tuple[Any, str, bool]:
+ raise NotImplementedError
+
+ def prepare_field_value(self, field_name: str, field: FieldInfo, value: Any, value_is_complex: bool) -> Any:
+ return value
diff --git a/api/configs/remote_settings_sources/enums.py b/api/configs/remote_settings_sources/enums.py
new file mode 100644
index 0000000000000000000000000000000000000000..3081f2950ff7075e8a710e245e4c52ddcdec8e5f
--- /dev/null
+++ b/api/configs/remote_settings_sources/enums.py
@@ -0,0 +1,5 @@
+from enum import StrEnum
+
+
+class RemoteSettingsSourceName(StrEnum):
+ APOLLO = "apollo"
diff --git a/api/constants/__init__.py b/api/constants/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4500ef4306fc2a744d8a3f6be1572a64a2e77d27
--- /dev/null
+++ b/api/constants/__init__.py
@@ -0,0 +1,24 @@
+from configs import dify_config
+
+HIDDEN_VALUE = "[__HIDDEN__]"
+UUID_NIL = "00000000-0000-0000-0000-000000000000"
+
+IMAGE_EXTENSIONS = ["jpg", "jpeg", "png", "webp", "gif", "svg"]
+IMAGE_EXTENSIONS.extend([ext.upper() for ext in IMAGE_EXTENSIONS])
+
+VIDEO_EXTENSIONS = ["mp4", "mov", "mpeg", "mpga"]
+VIDEO_EXTENSIONS.extend([ext.upper() for ext in VIDEO_EXTENSIONS])
+
+AUDIO_EXTENSIONS = ["mp3", "m4a", "wav", "webm", "amr"]
+AUDIO_EXTENSIONS.extend([ext.upper() for ext in AUDIO_EXTENSIONS])
+
+
+if dify_config.ETL_TYPE == "Unstructured":
+ DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls"]
+ DOCUMENT_EXTENSIONS.extend(("docx", "csv", "eml", "msg", "pptx", "xml", "epub"))
+ if dify_config.UNSTRUCTURED_API_URL:
+ DOCUMENT_EXTENSIONS.append("ppt")
+ DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
+else:
+ DOCUMENT_EXTENSIONS = ["txt", "markdown", "md", "mdx", "pdf", "html", "htm", "xlsx", "xls", "docx", "csv"]
+ DOCUMENT_EXTENSIONS.extend([ext.upper() for ext in DOCUMENT_EXTENSIONS])
diff --git a/api/constants/languages.py b/api/constants/languages.py
new file mode 100644
index 0000000000000000000000000000000000000000..1157ec430708f366009a043f29d7786899aa52c5
--- /dev/null
+++ b/api/constants/languages.py
@@ -0,0 +1,32 @@
+language_timezone_mapping = {
+ "en-US": "America/New_York",
+ "zh-Hans": "Asia/Shanghai",
+ "zh-Hant": "Asia/Taipei",
+ "pt-BR": "America/Sao_Paulo",
+ "es-ES": "Europe/Madrid",
+ "fr-FR": "Europe/Paris",
+ "de-DE": "Europe/Berlin",
+ "ja-JP": "Asia/Tokyo",
+ "ko-KR": "Asia/Seoul",
+ "ru-RU": "Europe/Moscow",
+ "it-IT": "Europe/Rome",
+ "uk-UA": "Europe/Kyiv",
+ "vi-VN": "Asia/Ho_Chi_Minh",
+ "ro-RO": "Europe/Bucharest",
+ "pl-PL": "Europe/Warsaw",
+ "hi-IN": "Asia/Kolkata",
+ "tr-TR": "Europe/Istanbul",
+ "fa-IR": "Asia/Tehran",
+ "sl-SI": "Europe/Ljubljana",
+ "th-TH": "Asia/Bangkok",
+}
+
+languages = list(language_timezone_mapping.keys())
+
+
+def supported_language(lang):
+ if lang in languages:
+ return lang
+
+ error = "{lang} is not a valid language.".format(lang=lang)
+ raise ValueError(error)
diff --git a/api/constants/model_template.py b/api/constants/model_template.py
new file mode 100644
index 0000000000000000000000000000000000000000..c26d8c018610d0ed7c8e33f4f22a551e37f5e1dd
--- /dev/null
+++ b/api/constants/model_template.py
@@ -0,0 +1,84 @@
+import json
+from collections.abc import Mapping
+
+from models.model import AppMode
+
+default_app_templates: Mapping[AppMode, Mapping] = {
+ # workflow default mode
+ AppMode.WORKFLOW: {
+ "app": {
+ "mode": AppMode.WORKFLOW.value,
+ "enable_site": True,
+ "enable_api": True,
+ }
+ },
+ # completion default mode
+ AppMode.COMPLETION: {
+ "app": {
+ "mode": AppMode.COMPLETION.value,
+ "enable_site": True,
+ "enable_api": True,
+ },
+ "model_config": {
+ "model": {
+ "provider": "openai",
+ "name": "gpt-4o",
+ "mode": "chat",
+ "completion_params": {},
+ },
+ "user_input_form": json.dumps(
+ [
+ {
+ "paragraph": {
+ "label": "Query",
+ "variable": "query",
+ "required": True,
+ "default": "",
+ },
+ },
+ ]
+ ),
+ "pre_prompt": "{{query}}",
+ },
+ },
+ # chat default mode
+ AppMode.CHAT: {
+ "app": {
+ "mode": AppMode.CHAT.value,
+ "enable_site": True,
+ "enable_api": True,
+ },
+ "model_config": {
+ "model": {
+ "provider": "openai",
+ "name": "gpt-4o",
+ "mode": "chat",
+ "completion_params": {},
+ },
+ },
+ },
+ # advanced-chat default mode
+ AppMode.ADVANCED_CHAT: {
+ "app": {
+ "mode": AppMode.ADVANCED_CHAT.value,
+ "enable_site": True,
+ "enable_api": True,
+ },
+ },
+ # agent-chat default mode
+ AppMode.AGENT_CHAT: {
+ "app": {
+ "mode": AppMode.AGENT_CHAT.value,
+ "enable_site": True,
+ "enable_api": True,
+ },
+ "model_config": {
+ "model": {
+ "provider": "openai",
+ "name": "gpt-4o",
+ "mode": "chat",
+ "completion_params": {},
+ },
+ },
+ },
+}
diff --git a/api/constants/recommended_apps.json b/api/constants/recommended_apps.json
new file mode 100644
index 0000000000000000000000000000000000000000..3779fb0180ede4a21f9cd41a22c02e70acdad0c6
--- /dev/null
+++ b/api/constants/recommended_apps.json
@@ -0,0 +1,580 @@
+{
+ "recommended_apps": {
+ "en-US": {
+ "categories": [
+ "Agent",
+ "Workflow",
+ "HR",
+ "Programming",
+ "Writing",
+ "Assistant"
+ ],
+ "recommended_apps": [
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "b53545b1-79ea-4da3-b31a-c39391c6f041",
+ "mode": "chat",
+ "name": "Website Generator"
+ },
+ "app_id": "b53545b1-79ea-4da3-b31a-c39391c6f041",
+ "category": "Programming",
+ "copyright": null,
+ "description": null,
+ "is_listed": true,
+ "position": 10,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤑",
+ "icon_background": "#E4FBCC",
+ "id": "a23b57fa-85da-49c0-a571-3aff375976c1",
+ "mode": "agent-chat",
+ "name": "Investment Analysis Report Copilot"
+ },
+ "app_id": "a23b57fa-85da-49c0-a571-3aff375976c1",
+ "category": "Agent",
+ "copyright": "Dify.AI",
+ "description": "Welcome to your personalized Investment Analysis Copilot service, where we delve into the depths of stock analysis to provide you with comprehensive insights. \n",
+ "is_listed": true,
+ "position": 10,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "f3303a7d-a81c-404e-b401-1f8711c998c1",
+ "mode": "advanced-chat",
+ "name": "Workflow Planning Assistant "
+ },
+ "app_id": "f3303a7d-a81c-404e-b401-1f8711c998c1",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "An assistant that helps you plan and select the right node for a workflow (V0.6.0). ",
+ "is_listed": true,
+ "position": 4,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "e9d92058-7d20-4904-892f-75d90bef7587",
+ "mode": "advanced-chat",
+ "name": "Automated Email Reply "
+ },
+ "app_id": "e9d92058-7d20-4904-892f-75d90bef7587",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "Reply emails using Gmail API. It will automatically retrieve email in your inbox and create a response in Gmail. \nConfigure your Gmail API in Google Cloud Console. ",
+ "is_listed": true,
+ "position": 5,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "98b87f88-bd22-4d86-8b74-86beba5e0ed4",
+ "mode": "workflow",
+ "name": "Book Translation "
+ },
+ "app_id": "98b87f88-bd22-4d86-8b74-86beba5e0ed4",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "A workflow designed to translate a full book up to 15000 tokens per run. Uses Code node to separate text into chunks and Iteration to translate each chunk. ",
+ "is_listed": true,
+ "position": 5,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "cae337e6-aec5-4c7b-beca-d6f1a808bd5e",
+ "mode": "chat",
+ "name": "Python bug fixer"
+ },
+ "app_id": "cae337e6-aec5-4c7b-beca-d6f1a808bd5e",
+ "category": "Programming",
+ "copyright": null,
+ "description": null,
+ "is_listed": true,
+ "position": 10,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "d077d587-b072-4f2c-b631-69ed1e7cdc0f",
+ "mode": "chat",
+ "name": "Code Interpreter"
+ },
+ "app_id": "d077d587-b072-4f2c-b631-69ed1e7cdc0f",
+ "category": "Programming",
+ "copyright": "Copyright 2023 Dify",
+ "description": "Code interpreter, clarifying the syntax and semantics of the code.",
+ "is_listed": true,
+ "position": 13,
+ "privacy_policy": "https://dify.ai"
+ },
+ {
+ "app": {
+ "icon": "🎨",
+ "icon_background": "#E4FBCC",
+ "id": "73fbb5f1-c15d-4d74-9cc8-46d9db9b2cca",
+ "mode": "agent-chat",
+ "name": "SVG Logo Design "
+ },
+ "app_id": "73fbb5f1-c15d-4d74-9cc8-46d9db9b2cca",
+ "category": "Agent",
+ "copyright": "Dify.AI",
+ "description": "Hello, I am your creative partner in bringing ideas to vivid life! I can assist you in creating stunning designs by leveraging abilities of DALL·E 3. ",
+ "is_listed": true,
+ "position": 6,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "5efb98d7-176b-419c-b6ef-50767391ab62",
+ "mode": "advanced-chat",
+ "name": "Long Story Generator (Iteration) "
+ },
+ "app_id": "5efb98d7-176b-419c-b6ef-50767391ab62",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "A workflow demonstrating how to use Iteration node to generate long article that is longer than the context length of LLMs. ",
+ "is_listed": true,
+ "position": 5,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "f00c4531-6551-45ee-808f-1d7903099515",
+ "mode": "workflow",
+ "name": "Text Summarization Workflow"
+ },
+ "app_id": "f00c4531-6551-45ee-808f-1d7903099515",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "Based on users' choice, retrieve external knowledge to more accurately summarize articles.",
+ "is_listed": true,
+ "position": 5,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🔢",
+ "icon_background": "#E4FBCC",
+ "id": "be591209-2ca8-410f-8f3b-ca0e530dd638",
+ "mode": "agent-chat",
+ "name": "YouTube Channel Data Analysis"
+ },
+ "app_id": "be591209-2ca8-410f-8f3b-ca0e530dd638",
+ "category": "Agent",
+ "copyright": "Dify.AI",
+ "description": "I am a YouTube Channel Data Analysis Copilot, I am here to provide expert data analysis tailored to your needs. ",
+ "is_listed": true,
+ "position": 6,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "a747f7b4-c48b-40d6-b313-5e628232c05f",
+ "mode": "chat",
+ "name": "Article Grading Bot"
+ },
+ "app_id": "a747f7b4-c48b-40d6-b313-5e628232c05f",
+ "category": "Writing",
+ "copyright": null,
+ "description": "Assess the quality of articles and text based on user defined criteria. ",
+ "is_listed": true,
+ "position": 10,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "18f3bd03-524d-4d7a-8374-b30dbe7c69d5",
+ "mode": "workflow",
+ "name": "SEO Blog Generator"
+ },
+ "app_id": "18f3bd03-524d-4d7a-8374-b30dbe7c69d5",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "Workflow for retrieving information from the internet, followed by segmented generation of SEO blogs.",
+ "is_listed": true,
+ "position": 5,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": null,
+ "id": "050ef42e-3e0c-40c1-a6b6-a64f2c49d744",
+ "mode": "completion",
+ "name": "SQL Creator"
+ },
+ "app_id": "050ef42e-3e0c-40c1-a6b6-a64f2c49d744",
+ "category": "Programming",
+ "copyright": "Copyright 2023 Dify",
+ "description": "Write SQL from natural language by pasting in your schema with the request.Please describe your query requirements in natural language and select the target database type.",
+ "is_listed": true,
+ "position": 13,
+ "privacy_policy": "https://dify.ai"
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "f06bf86b-d50c-4895-a942-35112dbe4189",
+ "mode": "workflow",
+ "name": "Sentiment Analysis "
+ },
+ "app_id": "f06bf86b-d50c-4895-a942-35112dbe4189",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "Batch sentiment analysis of text, followed by JSON output of sentiment classification along with scores.",
+ "is_listed": true,
+ "position": 5,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "7e8ca1ae-02f2-4b5f-979e-62d19133bee2",
+ "mode": "chat",
+ "name": "Strategic Consulting Expert"
+ },
+ "app_id": "7e8ca1ae-02f2-4b5f-979e-62d19133bee2",
+ "category": "Assistant",
+ "copyright": "Copyright 2023 Dify",
+ "description": "I can answer your questions related to strategic marketing.",
+ "is_listed": true,
+ "position": 10,
+ "privacy_policy": "https://dify.ai"
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": null,
+ "id": "4006c4b2-0735-4f37-8dbb-fb1a8c5bd87a",
+ "mode": "completion",
+ "name": "Code Converter"
+ },
+ "app_id": "4006c4b2-0735-4f37-8dbb-fb1a8c5bd87a",
+ "category": "Programming",
+ "copyright": "Copyright 2023 Dify",
+ "description": "This is an application that provides the ability to convert code snippets in multiple programming languages. You can input the code you wish to convert, select the target programming language, and get the desired output.",
+ "is_listed": true,
+ "position": 10,
+ "privacy_policy": "https://dify.ai"
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "d9f6b733-e35d-4a40-9f38-ca7bbfa009f7",
+ "mode": "advanced-chat",
+ "name": "Question Classifier + Knowledge + Chatbot "
+ },
+ "app_id": "d9f6b733-e35d-4a40-9f38-ca7bbfa009f7",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "Basic Workflow Template, a chatbot capable of identifying intents alongside with a knowledge base.",
+ "is_listed": true,
+ "position": 4,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": null,
+ "id": "127efead-8944-4e20-ba9d-12402eb345e0",
+ "mode": "chat",
+ "name": "AI Front-end interviewer"
+ },
+ "app_id": "127efead-8944-4e20-ba9d-12402eb345e0",
+ "category": "HR",
+ "copyright": "Copyright 2023 Dify",
+ "description": "A simulated front-end interviewer that tests the skill level of front-end development through questioning.",
+ "is_listed": true,
+ "position": 19,
+ "privacy_policy": "https://dify.ai"
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "e9870913-dd01-4710-9f06-15d4180ca1ce",
+ "mode": "advanced-chat",
+ "name": "Knowledge Retrieval + Chatbot "
+ },
+ "app_id": "e9870913-dd01-4710-9f06-15d4180ca1ce",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "Basic Workflow Template, A chatbot with a knowledge base. ",
+ "is_listed": true,
+ "position": 4,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "dd5b6353-ae9b-4bce-be6a-a681a12cf709",
+ "mode": "workflow",
+ "name": "Email Assistant Workflow "
+ },
+ "app_id": "dd5b6353-ae9b-4bce-be6a-a681a12cf709",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "A multifunctional email assistant capable of summarizing, replying, composing, proofreading, and checking grammar.",
+ "is_listed": true,
+ "position": 5,
+ "privacy_policy": null
+ },
+ {
+ "app": {
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "9c0cd31f-4b62-4005-adf5-e3888d08654a",
+ "mode": "workflow",
+ "name": "Customer Review Analysis Workflow "
+ },
+ "app_id": "9c0cd31f-4b62-4005-adf5-e3888d08654a",
+ "category": "Workflow",
+ "copyright": null,
+ "description": "Utilize LLM (Large Language Models) to classify customer reviews and forward them to the internal system.",
+ "is_listed": true,
+ "position": 5,
+ "privacy_policy": null
+ }
+ ]
+ },
+ "zh-Hans": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "zh-Hant": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "pt-BR": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "es-ES": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "fr-FR": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "de-DE": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "ja-JP": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "ko-KR": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "ru-RU": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "it-IT": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "uk-UA": {
+ "categories": [],
+ "recommended_apps": []
+ },
+ "vi-VN": {
+ "categories": [],
+ "recommended_apps": []
+ }
+ },
+ "app_details": {
+ "b53545b1-79ea-4da3-b31a-c39391c6f041": {
+ "export_data": "app:\n icon: \"\\U0001F916\"\n icon_background: '#FFEAD5'\n mode: chat\n name: Website Generator\nmodel_config:\n agent_mode:\n enabled: false\n max_iteration: 5\n strategy: function_call\n tools: []\n annotation_reply:\n enabled: false\n chat_prompt_config: {}\n completion_prompt_config: {}\n dataset_configs:\n datasets:\n datasets: []\n retrieval_model: single\n dataset_query_variable: ''\n external_data_tools: []\n file_upload:\n image:\n detail: high\n enabled: false\n number_limits: 3\n transfer_methods:\n - remote_url\n - local_file\n model:\n completion_params:\n frequency_penalty: 0\n max_tokens: 512\n presence_penalty: 0\n stop: []\n temperature: 0\n top_p: 1\n mode: chat\n name: gpt-3.5-turbo-0125\n provider: openai\n more_like_this:\n enabled: false\n opening_statement: ''\n pre_prompt: Your task is to create a one-page website based on the given specifications,\n delivered as an HTML file with embedded JavaScript and CSS. The website should\n incorporate a variety of engaging and interactive design features, such as drop-down\n menus, dynamic text and content, clickable buttons, and more. Ensure that the\n design is visually appealing, responsive, and user-friendly. The HTML, CSS, and\n JavaScript code should be well-structured, efficiently organized, and properly\n commented for readability and maintainability.\n prompt_type: simple\n retriever_resource:\n enabled: false\n sensitive_word_avoidance:\n configs: []\n enabled: false\n type: ''\n speech_to_text:\n enabled: false\n suggested_questions: []\n suggested_questions_after_answer:\n enabled: false\n text_to_speech:\n enabled: false\n language: ''\n voice: ''\n user_input_form: []\n",
+ "icon": "🤖",
+ "icon_background": "#FFEAD5",
+ "id": "b53545b1-79ea-4da3-b31a-c39391c6f041",
+ "mode": "chat",
+ "name": "Website Generator"
+ },
+ "a23b57fa-85da-49c0-a571-3aff375976c1": {
+ "export_data": "app:\n icon: \"\\U0001F911\"\n icon_background: '#E4FBCC'\n mode: agent-chat\n name: Investment Analysis Report Copilot\nmodel_config:\n agent_mode:\n enabled: true\n max_iteration: 5\n strategy: function_call\n tools:\n - enabled: true\n isDeleted: false\n notAuthor: false\n provider_id: yahoo\n provider_name: yahoo\n provider_type: builtin\n tool_label: Analytics\n tool_name: yahoo_finance_analytics\n tool_parameters:\n end_date: ''\n start_date: ''\n symbol: ''\n - enabled: true\n isDeleted: false\n notAuthor: false\n provider_id: yahoo\n provider_name: yahoo\n provider_type: builtin\n tool_label: News\n tool_name: yahoo_finance_news\n tool_parameters:\n symbol: ''\n - enabled: true\n isDeleted: false\n notAuthor: false\n provider_id: yahoo\n provider_name: yahoo\n provider_type: builtin\n tool_label: Ticker\n tool_name: yahoo_finance_ticker\n tool_parameters:\n symbol: ''\n annotation_reply:\n enabled: false\n chat_prompt_config: {}\n completion_prompt_config: {}\n dataset_configs:\n datasets:\n datasets: []\n retrieval_model: single\n dataset_query_variable: ''\n external_data_tools: []\n file_upload:\n image:\n detail: high\n enabled: false\n number_limits: 3\n transfer_methods:\n - remote_url\n - local_file\n model:\n completion_params:\n frequency_penalty: 0.5\n max_tokens: 4096\n presence_penalty: 0.5\n stop: []\n temperature: 0.2\n top_p: 0.75\n mode: chat\n name: gpt-4-1106-preview\n provider: openai\n more_like_this:\n enabled: false\n opening_statement: 'Welcome to your personalized Investment Analysis Copilot service,\n where we delve into the depths of stock analysis to provide you with comprehensive\n insights. To begin our journey into the financial world, try to ask:\n\n '\n pre_prompt: \"# Job Description: Data Analysis Copilot\\n## Character\\nMy primary\\\n \\ goal is to provide user with expert data analysis advice. Using extensive and\\\n \\ detailed data. Tell me the stock (with ticket symbol) you want to analyze. I\\\n \\ will do all fundamental, technical, market sentiment, and Marco economical analysis\\\n \\ for the stock as an expert. \\n\\n## Skills \\n### Skill 1: Search for stock information\\\n \\ using 'Ticker' from Yahoo Finance \\n### Skill 2: Search for recent news using\\\n \\ 'News' for the target company. \\n### Skill 3: Search for financial figures and\\\n \\ analytics using 'Analytics' for the target company\\n\\n## Workflow\\nAsks the\\\n \\ user which stocks with ticker name need to be analyzed and then performs the\\\n \\ following analysis in sequence. \\n**Part I: Fundamental analysis: financial\\\n \\ reporting analysis\\n*Objective 1: In-depth analysis of the financial situation\\\n \\ of the target company.\\n*Steps:\\n1. Identify the object of analysis:\\n\\n\\n\\n2. Access to financial\\\n \\ reports \\n