File size: 2,404 Bytes
1d6d1e6 2f9f089 1d6d1e6 8663ded 1d6d1e6 2f9f089 1d6d1e6 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
"""
Unified Celery configuration for the Lin application.
This centralizes all Celery configuration to avoid conflicts.
"""
import os
from celery import Celery
from celery.schedules import crontab
from backend.config import Config
# Create Celery instance
celery_app = Celery('lin_app')
# Configure Celery with broker and result backend
celery_app.conf.broker_url = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0')
celery_app.conf.result_backend = os.environ.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379/0')
# Additional Celery configuration
celery_app.conf.update(
# Task serialization
task_serializer='json',
accept_content=['json'],
result_serializer='json',
timezone='UTC',
enable_utc=True,
# Task routing
task_routes={
'backend.celery_tasks.content_tasks.generate_content_task': {'queue': 'content'},
'backend.celery_tasks.content_tasks.publish_post_task': {'queue': 'publish'},
'backend.celery_tasks.schedule_loader.load_schedules_task': {'queue': 'scheduler'},
},
# Worker configuration
worker_prefetch_multiplier=1,
task_acks_late=True,
worker_max_tasks_per_child=100,
# Beat schedule configuration (scheduler itself will be default)
# beat_scheduler is not set, so it defaults to 'celery.beat:PersistentScheduler'
beat_schedule={
# This task will run every 5 minutes to load schedules from the database
'load-schedules': {
'task': 'backend.celery_tasks.schedule_loader.load_schedules_task',
'schedule': crontab(minute='*/5'),
},
},
# Task result expiration
result_expires=3600, # 1 hour
# Task time limits
task_soft_time_limit=300, # 5 minutes
task_time_limit=600, # 10 minutes
# Rate limiting
task_annotations=(
('backend.celery_tasks.content_tasks.generate_content_task', {'rate_limit': '10/h'}),
('backend.celery_tasks.content_tasks.publish_post_task', {'rate_limit': '30/h'}),
),
# Error handling
task_reject_on_worker_lost=True,
worker_disable_rate_limits=False,
# Security
result_backend_transport_options={'visibility': 'hidden'},
broker_connection_max_retries=3,
broker_connection_retry_delay=5,
)
# Import tasks to ensure they're registered
from backend import celery_tasks
__all__ = ['celery_app'] |