Edwin Salguero
commited on
Commit
·
6cd35fc
1
Parent(s):
63da54f
Add comprehensive Docker integration with multi-environment support, monitoring, and deployment tools
Browse files- .dockerignore +88 -0
- Dockerfile +51 -0
- README.md +295 -216
- docker-compose.dev.yml +73 -0
- docker-compose.prod.yml +102 -0
- docker-compose.yml +81 -0
- docker-entrypoint.sh +77 -0
- scripts/docker-build.sh +162 -0
.dockerignore
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Git
|
2 |
+
.git
|
3 |
+
.gitignore
|
4 |
+
.gitattributes
|
5 |
+
|
6 |
+
# Python
|
7 |
+
__pycache__
|
8 |
+
*.pyc
|
9 |
+
*.pyo
|
10 |
+
*.pyd
|
11 |
+
.Python
|
12 |
+
env
|
13 |
+
pip-log.txt
|
14 |
+
pip-delete-this-directory.txt
|
15 |
+
.tox
|
16 |
+
.coverage
|
17 |
+
.coverage.*
|
18 |
+
.cache
|
19 |
+
nosetests.xml
|
20 |
+
coverage.xml
|
21 |
+
*.cover
|
22 |
+
*.log
|
23 |
+
.git
|
24 |
+
.mypy_cache
|
25 |
+
.pytest_cache
|
26 |
+
.hypothesis
|
27 |
+
|
28 |
+
# Virtual environments
|
29 |
+
.env
|
30 |
+
.venv
|
31 |
+
env/
|
32 |
+
venv/
|
33 |
+
ENV/
|
34 |
+
env.bak/
|
35 |
+
venv.bak/
|
36 |
+
|
37 |
+
# IDEs
|
38 |
+
.vscode/
|
39 |
+
.idea/
|
40 |
+
*.swp
|
41 |
+
*.swo
|
42 |
+
*~
|
43 |
+
|
44 |
+
# OS
|
45 |
+
.DS_Store
|
46 |
+
.DS_Store?
|
47 |
+
._*
|
48 |
+
.Spotlight-V100
|
49 |
+
.Trashes
|
50 |
+
ehthumbs.db
|
51 |
+
Thumbs.db
|
52 |
+
|
53 |
+
# Project specific
|
54 |
+
logs/*
|
55 |
+
!logs/.gitkeep
|
56 |
+
data/*
|
57 |
+
!data/.gitkeep
|
58 |
+
models/*
|
59 |
+
!models/.gitkeep
|
60 |
+
*.csv
|
61 |
+
*.json
|
62 |
+
*.pkl
|
63 |
+
*.h5
|
64 |
+
*.pth
|
65 |
+
|
66 |
+
# Documentation
|
67 |
+
docs/
|
68 |
+
*.md
|
69 |
+
!README.md
|
70 |
+
|
71 |
+
# Tests
|
72 |
+
tests/
|
73 |
+
pytest.ini
|
74 |
+
|
75 |
+
# Docker
|
76 |
+
Dockerfile
|
77 |
+
docker-compose.yml
|
78 |
+
.dockerignore
|
79 |
+
|
80 |
+
# Jupyter
|
81 |
+
.ipynb_checkpoints
|
82 |
+
*.ipynb
|
83 |
+
|
84 |
+
# Temporary files
|
85 |
+
tmp/
|
86 |
+
temp/
|
87 |
+
*.tmp
|
88 |
+
*.temp
|
Dockerfile
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Use Python 3.11 slim image for smaller size
|
2 |
+
FROM python:3.11-slim
|
3 |
+
|
4 |
+
# Set environment variables
|
5 |
+
ENV PYTHONUNBUFFERED=1
|
6 |
+
ENV PYTHONDONTWRITEBYTECODE=1
|
7 |
+
ENV DEBIAN_FRONTEND=noninteractive
|
8 |
+
|
9 |
+
# Set work directory
|
10 |
+
WORKDIR /app
|
11 |
+
|
12 |
+
# Install system dependencies
|
13 |
+
RUN apt-get update && apt-get install -y \
|
14 |
+
gcc \
|
15 |
+
g++ \
|
16 |
+
libffi-dev \
|
17 |
+
libssl-dev \
|
18 |
+
curl \
|
19 |
+
wget \
|
20 |
+
git \
|
21 |
+
&& rm -rf /var/lib/apt/lists/*
|
22 |
+
|
23 |
+
# Copy requirements first for better caching
|
24 |
+
COPY requirements.txt .
|
25 |
+
|
26 |
+
# Install Python dependencies
|
27 |
+
RUN pip install --no-cache-dir --upgrade pip && \
|
28 |
+
pip install --no-cache-dir -r requirements.txt && \
|
29 |
+
pip install --no-cache-dir jupyter jupyterlab
|
30 |
+
|
31 |
+
# Copy the entire project
|
32 |
+
COPY . .
|
33 |
+
|
34 |
+
# Create necessary directories
|
35 |
+
RUN mkdir -p logs data models
|
36 |
+
|
37 |
+
# Set permissions
|
38 |
+
RUN chmod +x demo.py finrl_demo.py docker-entrypoint.sh
|
39 |
+
|
40 |
+
# Expose port for potential web interface
|
41 |
+
EXPOSE 8000 8888
|
42 |
+
|
43 |
+
# Set entrypoint
|
44 |
+
ENTRYPOINT ["/app/docker-entrypoint.sh"]
|
45 |
+
|
46 |
+
# Health check
|
47 |
+
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
48 |
+
CMD python -c "import sys; sys.exit(0)" || exit 1
|
49 |
+
|
50 |
+
# Default command
|
51 |
+
CMD ["python", "-m", "agentic_ai_system.main"]
|
README.md
CHANGED
@@ -25,7 +25,7 @@ paperswithcode_id: null
|
|
25 |
|
26 |
# Algorithmic Trading System
|
27 |
|
28 |
-
A comprehensive algorithmic trading system with synthetic data generation, comprehensive logging, extensive testing capabilities,
|
29 |
|
30 |
## Features
|
31 |
|
@@ -44,6 +44,16 @@ A comprehensive algorithmic trading system with synthetic data generation, compr
|
|
44 |
- **TensorBoard Integration**: Training progress visualization and monitoring
|
45 |
- **Comprehensive Evaluation**: Performance metrics including Sharpe ratio and total returns
|
46 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
### Synthetic Data Generation
|
48 |
- **Realistic Market Data**: Generate OHLCV data using geometric Brownian motion
|
49 |
- **Multiple Frequencies**: Support for 1min, 5min, 1H, and 1D data
|
@@ -65,6 +75,25 @@ A comprehensive algorithmic trading system with synthetic data generation, compr
|
|
65 |
|
66 |
## Installation
|
67 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
1. Clone the repository:
|
69 |
```bash
|
70 |
git clone https://github.com/ParallelLLC/algorithmic_trading.git
|
@@ -76,6 +105,128 @@ cd algorithmic_trading
|
|
76 |
pip install -r requirements.txt
|
77 |
```
|
78 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
## Configuration
|
80 |
|
81 |
The system is configured via `config.yaml`:
|
@@ -121,6 +272,23 @@ logging:
|
|
121 |
enable_file: true
|
122 |
max_file_size_mb: 10
|
123 |
backup_count: 5
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
124 |
```
|
125 |
|
126 |
## Usage
|
@@ -172,6 +340,15 @@ pytest --cov=agentic_ai_system --cov-report=html
|
|
172 |
pytest tests/test_synthetic_data_generator.py
|
173 |
```
|
174 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
175 |
## System Architecture
|
176 |
|
177 |
### Components
|
@@ -182,6 +359,7 @@ pytest tests/test_synthetic_data_generator.py
|
|
182 |
4. **ExecutionAgent**: Executes trading orders with broker simulation
|
183 |
5. **Orchestrator**: Coordinates the entire trading workflow
|
184 |
6. **LoggerConfig**: Manages comprehensive logging throughout the system
|
|
|
185 |
|
186 |
### Data Flow
|
187 |
|
@@ -189,266 +367,167 @@ pytest tests/test_synthetic_data_generator.py
|
|
189 |
Synthetic Data Generator → Data Ingestion → Strategy Agent → Execution Agent
|
190 |
↓
|
191 |
Logging System
|
|
|
|
|
192 |
```
|
193 |
|
194 |
-
|
195 |
-
|
196 |
-
### Features
|
197 |
-
- **Geometric Brownian Motion**: Realistic price movement simulation
|
198 |
-
- **OHLCV Data**: Complete market data with open, high, low, close, and volume
|
199 |
-
- **Market Scenarios**: Different market conditions for testing
|
200 |
-
- **Configurable Parameters**: Adjustable volatility, trend, and noise levels
|
201 |
-
|
202 |
-
### Usage Examples
|
203 |
-
|
204 |
-
```python
|
205 |
-
from agentic_ai_system.synthetic_data_generator import SyntheticDataGenerator
|
206 |
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
|
211 |
-
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
|
217 |
-
|
218 |
-
# Generate tick data
|
219 |
-
tick_data = generator.generate_tick_data(
|
220 |
-
symbol='AAPL',
|
221 |
-
duration_minutes=60,
|
222 |
-
tick_interval_ms=1000
|
223 |
-
)
|
224 |
-
|
225 |
-
# Generate market scenarios
|
226 |
-
crash_data = generator.generate_market_scenarios('crash')
|
227 |
-
volatile_data = generator.generate_market_scenarios('volatile')
|
228 |
```
|
229 |
|
230 |
-
##
|
231 |
-
|
232 |
-
### Log Files
|
233 |
-
- `logs/trading_system.log`: General system logs
|
234 |
-
- `logs/trading.log`: Trading-specific logs
|
235 |
-
- `logs/performance.log`: Performance metrics
|
236 |
-
- `logs/errors.log`: Error logs
|
237 |
|
238 |
-
###
|
239 |
-
-
|
240 |
-
-
|
241 |
-
-
|
242 |
-
-
|
243 |
-
- **CRITICAL**: Critical system failures
|
244 |
|
245 |
-
###
|
|
|
|
|
|
|
|
|
246 |
|
247 |
-
|
248 |
-
|
249 |
-
|
|
|
|
|
250 |
|
251 |
-
|
252 |
-
setup_logging(config)
|
253 |
|
254 |
-
|
255 |
-
|
|
|
|
|
256 |
|
257 |
-
#
|
258 |
-
|
259 |
-
logger.warning("High volatility detected")
|
260 |
-
logger.error("Order execution failed", exc_info=True)
|
261 |
```
|
262 |
|
263 |
-
|
264 |
-
|
265 |
-
|
266 |
-
|
267 |
-
|
268 |
-
### Supported Algorithms
|
269 |
-
- **PPO (Proximal Policy Optimization)**: Stable policy gradient method
|
270 |
-
- **A2C (Advantage Actor-Critic)**: Actor-critic method with advantage estimation
|
271 |
-
- **DDPG (Deep Deterministic Policy Gradient)**: Continuous action space algorithm
|
272 |
-
- **TD3 (Twin Delayed DDPG)**: Improved version of DDPG with twin critics
|
273 |
|
274 |
-
|
275 |
-
|
276 |
-
|
277 |
-
|
278 |
-
- **Reward Function**: Portfolio return-based rewards
|
279 |
-
- **Transaction Costs**: Realistic trading fees and slippage
|
280 |
-
- **Position Limits**: Maximum position constraints
|
281 |
|
282 |
-
###
|
|
|
|
|
|
|
|
|
|
|
283 |
|
284 |
-
|
285 |
-
```
|
286 |
-
|
287 |
-
|
|
|
288 |
|
289 |
-
#
|
290 |
-
|
291 |
-
|
292 |
-
|
293 |
-
batch_size=64,
|
294 |
-
total_timesteps=100000
|
295 |
-
)
|
296 |
|
297 |
-
|
298 |
-
agent = FinRLAgent(config)
|
299 |
|
300 |
-
|
301 |
-
training_result = agent.train(
|
302 |
-
data=market_data,
|
303 |
-
total_timesteps=100000,
|
304 |
-
eval_freq=10000
|
305 |
-
)
|
306 |
|
307 |
-
|
308 |
-
|
|
|
|
|
309 |
|
310 |
-
#
|
311 |
-
|
312 |
-
print(f"Total Return: {evaluation['total_return']:.2%}")
|
313 |
```
|
314 |
|
315 |
-
####
|
316 |
-
```
|
317 |
-
|
318 |
-
|
319 |
-
# Create agent from config file
|
320 |
-
agent = create_finrl_agent_from_config('config.yaml')
|
321 |
|
322 |
-
#
|
323 |
-
|
324 |
-
results = agent.evaluate(test_data)
|
325 |
```
|
326 |
|
327 |
-
####
|
328 |
```bash
|
329 |
-
#
|
330 |
-
|
331 |
|
332 |
-
#
|
333 |
-
|
334 |
-
# 2. Train a FinRL agent
|
335 |
-
# 3. Evaluate performance
|
336 |
-
# 4. Generate trading predictions
|
337 |
-
# 5. Create visualization plots
|
338 |
```
|
339 |
|
340 |
-
###
|
341 |
-
FinRL settings can be configured in `config.yaml`:
|
342 |
|
|
|
343 |
```yaml
|
344 |
-
|
345 |
-
|
346 |
-
|
347 |
-
|
348 |
-
|
349 |
-
|
350 |
-
tensorboard_log: 'logs/finrl_tensorboard'
|
351 |
-
training:
|
352 |
-
total_timesteps: 100000
|
353 |
-
eval_freq: 10000
|
354 |
-
save_best_model: true
|
355 |
-
model_save_path: 'models/finrl_best/'
|
356 |
-
inference:
|
357 |
-
use_trained_model: false
|
358 |
-
model_path: 'models/finrl_best/best_model'
|
359 |
```
|
360 |
|
361 |
-
|
362 |
-
```
|
363 |
-
#
|
364 |
-
|
365 |
-
|
366 |
-
|
367 |
-
|
368 |
-
|
369 |
-
# Continue training
|
370 |
-
agent.train(more_data, total_timesteps=50000)
|
371 |
-
```
|
372 |
-
|
373 |
-
### Performance Monitoring
|
374 |
-
- **TensorBoard Integration**: Monitor training progress
|
375 |
-
- **Evaluation Metrics**: Total return, Sharpe ratio, portfolio value
|
376 |
-
- **Trading Statistics**: Buy/sell signal analysis
|
377 |
-
- **Visualization**: Price charts with trading signals
|
378 |
-
|
379 |
-
### Advanced Features
|
380 |
-
- **Multi-timeframe Support**: Train on different data frequencies
|
381 |
-
- **Feature Engineering**: Automatic technical indicator calculation
|
382 |
-
- **Risk Management**: Built-in position and drawdown limits
|
383 |
-
- **Backtesting**: Comprehensive backtesting capabilities
|
384 |
-
- **Hyperparameter Tuning**: Easy configuration for different algorithms
|
385 |
-
|
386 |
-
## Testing
|
387 |
-
|
388 |
-
### Test Structure
|
389 |
-
```
|
390 |
-
tests/
|
391 |
-
├── __init__.py
|
392 |
-
├── test_synthetic_data_generator.py
|
393 |
-
├── test_strategy_agent.py
|
394 |
-
├── test_execution_agent.py
|
395 |
-
├── test_data_ingestion.py
|
396 |
-
├── test_integration.py
|
397 |
-
├── test_finrl_agent.py
|
398 |
```
|
399 |
|
400 |
-
|
401 |
-
- **Unit Tests**: Test individual components in isolation
|
402 |
-
- **Integration Tests**: Test complete workflows
|
403 |
-
- **Performance Tests**: Test system performance and scalability
|
404 |
-
- **Error Handling Tests**: Test error conditions and edge cases
|
405 |
-
- **Slow RL Tests**: RL agent training tests are marked as `@pytest.mark.slow` and use minimal timesteps for speed. These are skipped by default unless explicitly run.
|
406 |
|
407 |
-
|
|
|
|
|
|
|
|
|
408 |
|
|
|
409 |
```bash
|
410 |
-
#
|
411 |
-
|
412 |
-
|
413 |
-
# Run slow RL tests (FinRL agent training)
|
414 |
-
pytest -m slow
|
415 |
|
416 |
-
#
|
417 |
-
|
418 |
|
419 |
-
# Run
|
420 |
-
|
421 |
|
422 |
-
#
|
423 |
-
|
|
|
|
|
424 |
```
|
425 |
|
426 |
-
##
|
427 |
-
|
428 |
-
The system includes comprehensive performance monitoring:
|
429 |
-
|
430 |
-
- **Execution Time Tracking**: Monitor workflow execution times
|
431 |
-
- **Trade Statistics**: Track successful vs failed trades
|
432 |
-
- **Performance Metrics**: Calculate returns and drawdowns
|
433 |
-
- **Resource Usage**: Monitor memory and CPU usage
|
434 |
-
|
435 |
-
## Error Handling
|
436 |
-
|
437 |
-
The system includes robust error handling:
|
438 |
-
|
439 |
-
- **Graceful Degradation**: System continues operation despite component failures
|
440 |
-
- **Error Logging**: Comprehensive error logging with stack traces
|
441 |
-
- **Fallback Mechanisms**: Automatic fallback to synthetic data when CSV files are missing
|
442 |
-
- **Validation**: Data validation at multiple levels
|
443 |
|
444 |
-
|
445 |
|
446 |
-
|
447 |
-
2. Create a feature branch
|
448 |
-
3. Add tests for new functionality
|
449 |
-
4. Ensure all tests pass
|
450 |
-
5. Submit a pull request
|
451 |
|
452 |
-
|
453 |
|
454 |
-
|
|
|
25 |
|
26 |
# Algorithmic Trading System
|
27 |
|
28 |
+
A comprehensive algorithmic trading system with synthetic data generation, comprehensive logging, extensive testing capabilities, FinRL reinforcement learning integration, and full Docker support.
|
29 |
|
30 |
## Features
|
31 |
|
|
|
44 |
- **TensorBoard Integration**: Training progress visualization and monitoring
|
45 |
- **Comprehensive Evaluation**: Performance metrics including Sharpe ratio and total returns
|
46 |
|
47 |
+
### Docker Integration
|
48 |
+
- **Multi-Environment Support**: Development, production, and testing environments
|
49 |
+
- **Container Orchestration**: Docker Compose for easy service management
|
50 |
+
- **Monitoring Stack**: Prometheus and Grafana for system monitoring
|
51 |
+
- **Development Tools**: Jupyter Lab integration for interactive development
|
52 |
+
- **Automated Testing**: Containerized test execution with coverage reporting
|
53 |
+
- **Resource Management**: CPU and memory limits for production deployment
|
54 |
+
- **Health Checks**: Built-in health monitoring for all services
|
55 |
+
- **Backup Services**: Automated backup and data persistence
|
56 |
+
|
57 |
### Synthetic Data Generation
|
58 |
- **Realistic Market Data**: Generate OHLCV data using geometric Brownian motion
|
59 |
- **Multiple Frequencies**: Support for 1min, 5min, 1H, and 1D data
|
|
|
75 |
|
76 |
## Installation
|
77 |
|
78 |
+
### Option 1: Docker (Recommended)
|
79 |
+
|
80 |
+
1. Clone the repository:
|
81 |
+
```bash
|
82 |
+
git clone https://github.com/ParallelLLC/algorithmic_trading.git
|
83 |
+
cd algorithmic_trading
|
84 |
+
```
|
85 |
+
|
86 |
+
2. Build and run with Docker:
|
87 |
+
```bash
|
88 |
+
# Build the image
|
89 |
+
docker build -t algorithmic-trading .
|
90 |
+
|
91 |
+
# Run the trading system
|
92 |
+
docker run -p 8000:8000 algorithmic-trading
|
93 |
+
```
|
94 |
+
|
95 |
+
### Option 2: Local Installation
|
96 |
+
|
97 |
1. Clone the repository:
|
98 |
```bash
|
99 |
git clone https://github.com/ParallelLLC/algorithmic_trading.git
|
|
|
105 |
pip install -r requirements.txt
|
106 |
```
|
107 |
|
108 |
+
## Docker Usage
|
109 |
+
|
110 |
+
### Quick Start
|
111 |
+
|
112 |
+
```bash
|
113 |
+
# Build and start development environment
|
114 |
+
./scripts/docker-build.sh dev
|
115 |
+
|
116 |
+
# Build and start production environment
|
117 |
+
./scripts/docker-build.sh prod
|
118 |
+
|
119 |
+
# Run tests in Docker
|
120 |
+
./scripts/docker-build.sh test
|
121 |
+
|
122 |
+
# Stop all containers
|
123 |
+
./scripts/docker-build.sh stop
|
124 |
+
```
|
125 |
+
|
126 |
+
### Development Environment
|
127 |
+
|
128 |
+
```bash
|
129 |
+
# Start development environment with Jupyter Lab
|
130 |
+
docker-compose -f docker-compose.dev.yml up -d
|
131 |
+
|
132 |
+
# Access services:
|
133 |
+
# - Jupyter Lab: http://localhost:8888
|
134 |
+
# - Trading System: http://localhost:8000
|
135 |
+
# - TensorBoard: http://localhost:6006
|
136 |
+
```
|
137 |
+
|
138 |
+
### Production Environment
|
139 |
+
|
140 |
+
```bash
|
141 |
+
# Start production environment with monitoring
|
142 |
+
docker-compose -f docker-compose.prod.yml up -d
|
143 |
+
|
144 |
+
# Access services:
|
145 |
+
# - Trading System: http://localhost:8000
|
146 |
+
# - Grafana: http://localhost:3000 (admin/admin)
|
147 |
+
# - Prometheus: http://localhost:9090
|
148 |
+
```
|
149 |
+
|
150 |
+
### Custom Commands
|
151 |
+
|
152 |
+
```bash
|
153 |
+
# Run a specific command in the container
|
154 |
+
./scripts/docker-build.sh run 'python demo.py'
|
155 |
+
|
156 |
+
# Run FinRL training
|
157 |
+
./scripts/docker-build.sh run 'python finrl_demo.py'
|
158 |
+
|
159 |
+
# Run backtesting
|
160 |
+
./scripts/docker-build.sh run 'python -m agentic_ai_system.main --mode backtest'
|
161 |
+
|
162 |
+
# Show logs
|
163 |
+
./scripts/docker-build.sh logs trading-system
|
164 |
+
```
|
165 |
+
|
166 |
+
### Docker Compose Services
|
167 |
+
|
168 |
+
#### Development (`docker-compose.dev.yml`)
|
169 |
+
- **trading-dev**: Jupyter Lab environment with hot reload
|
170 |
+
- **finrl-training-dev**: FinRL training with TensorBoard
|
171 |
+
- **testing**: Automated test execution
|
172 |
+
- **linting**: Code quality checks
|
173 |
+
|
174 |
+
#### Production (`docker-compose.prod.yml`)
|
175 |
+
- **trading-system**: Main trading system with resource limits
|
176 |
+
- **monitoring**: Prometheus metrics collection
|
177 |
+
- **grafana**: Data visualization dashboard
|
178 |
+
- **backup**: Automated backup service
|
179 |
+
|
180 |
+
#### Standard (`docker-compose.yml`)
|
181 |
+
- **trading-system**: Basic trading system
|
182 |
+
- **finrl-training**: FinRL training service
|
183 |
+
- **backtesting**: Backtesting service
|
184 |
+
- **development**: Development environment
|
185 |
+
|
186 |
+
### Docker Features
|
187 |
+
|
188 |
+
#### Health Checks
|
189 |
+
All services include health checks to ensure system reliability:
|
190 |
+
```yaml
|
191 |
+
healthcheck:
|
192 |
+
test: ["CMD", "python", "-c", "import sys; sys.exit(0)"]
|
193 |
+
interval: 30s
|
194 |
+
timeout: 10s
|
195 |
+
retries: 3
|
196 |
+
start_period: 40s
|
197 |
+
```
|
198 |
+
|
199 |
+
#### Resource Management
|
200 |
+
Production services include resource limits:
|
201 |
+
```yaml
|
202 |
+
deploy:
|
203 |
+
resources:
|
204 |
+
limits:
|
205 |
+
memory: 2G
|
206 |
+
cpus: '1.0'
|
207 |
+
reservations:
|
208 |
+
memory: 512M
|
209 |
+
cpus: '0.5'
|
210 |
+
```
|
211 |
+
|
212 |
+
#### Volume Management
|
213 |
+
Persistent data storage with named volumes:
|
214 |
+
- `trading_data`: Market data and configuration
|
215 |
+
- `trading_logs`: System logs
|
216 |
+
- `trading_models`: Trained models
|
217 |
+
- `prometheus_data`: Monitoring metrics
|
218 |
+
- `grafana_data`: Dashboard configurations
|
219 |
+
|
220 |
+
#### Logging
|
221 |
+
Structured logging with rotation:
|
222 |
+
```yaml
|
223 |
+
logging:
|
224 |
+
driver: "json-file"
|
225 |
+
options:
|
226 |
+
max-size: "10m"
|
227 |
+
max-file: "3"
|
228 |
+
```
|
229 |
+
|
230 |
## Configuration
|
231 |
|
232 |
The system is configured via `config.yaml`:
|
|
|
272 |
enable_file: true
|
273 |
max_file_size_mb: 10
|
274 |
backup_count: 5
|
275 |
+
|
276 |
+
# FinRL configuration
|
277 |
+
finrl:
|
278 |
+
algorithm: 'PPO'
|
279 |
+
learning_rate: 0.0003
|
280 |
+
batch_size: 64
|
281 |
+
buffer_size: 1000000
|
282 |
+
gamma: 0.99
|
283 |
+
tensorboard_log: 'logs/finrl_tensorboard'
|
284 |
+
training:
|
285 |
+
total_timesteps: 100000
|
286 |
+
eval_freq: 10000
|
287 |
+
save_best_model: true
|
288 |
+
model_save_path: 'models/finrl_best/'
|
289 |
+
inference:
|
290 |
+
use_trained_model: false
|
291 |
+
model_path: 'models/finrl_best/best_model'
|
292 |
```
|
293 |
|
294 |
## Usage
|
|
|
340 |
pytest tests/test_synthetic_data_generator.py
|
341 |
```
|
342 |
|
343 |
+
### Docker Testing
|
344 |
+
```bash
|
345 |
+
# Run all tests in Docker
|
346 |
+
./scripts/docker-build.sh test
|
347 |
+
|
348 |
+
# Run tests with coverage
|
349 |
+
docker run --rm -v $(pwd):/app algorithmic-trading:latest pytest --cov=agentic_ai_system --cov-report=html
|
350 |
+
```
|
351 |
+
|
352 |
## System Architecture
|
353 |
|
354 |
### Components
|
|
|
359 |
4. **ExecutionAgent**: Executes trading orders with broker simulation
|
360 |
5. **Orchestrator**: Coordinates the entire trading workflow
|
361 |
6. **LoggerConfig**: Manages comprehensive logging throughout the system
|
362 |
+
7. **FinRLAgent**: Reinforcement learning agent for advanced trading strategies
|
363 |
|
364 |
### Data Flow
|
365 |
|
|
|
367 |
Synthetic Data Generator → Data Ingestion → Strategy Agent → Execution Agent
|
368 |
↓
|
369 |
Logging System
|
370 |
+
↓
|
371 |
+
FinRL Agent (Optional)
|
372 |
```
|
373 |
|
374 |
+
### Docker Architecture
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
375 |
|
376 |
+
```
|
377 |
+
┌─────────────────┐ ┌─────────────────┐ ┌─────────────────┐
|
378 |
+
│ Development │ │ Production │ │ Monitoring │
|
379 |
+
│ Environment │ │ Environment │ │ Stack │
|
380 |
+
├─────────────────┤ ├─────────────────┤ ├─────────────────┤
|
381 |
+
│ • Jupyter Lab │ │ • Trading Sys │ │ • Prometheus │
|
382 |
+
│ • Hot Reload │ │ • Resource Mgmt │ │ • Grafana │
|
383 |
+
│ • TensorBoard │ │ • Health Checks │ │ • Metrics │
|
384 |
+
│ • Testing │ │ • Logging │ │ • Dashboards │
|
385 |
+
└─────────────────┘ └─────────────────┘ └─────────────────┘
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
386 |
```
|
387 |
|
388 |
+
## Monitoring and Observability
|
|
|
|
|
|
|
|
|
|
|
|
|
389 |
|
390 |
+
### Prometheus Metrics
|
391 |
+
- Trading performance metrics
|
392 |
+
- System resource usage
|
393 |
+
- Error rates and response times
|
394 |
+
- Custom business metrics
|
|
|
395 |
|
396 |
+
### Grafana Dashboards
|
397 |
+
- Real-time trading performance
|
398 |
+
- System health monitoring
|
399 |
+
- Historical data analysis
|
400 |
+
- Alert management
|
401 |
|
402 |
+
### Health Checks
|
403 |
+
- Service availability monitoring
|
404 |
+
- Dependency health verification
|
405 |
+
- Automatic restart on failure
|
406 |
+
- Performance degradation detection
|
407 |
|
408 |
+
## Deployment
|
|
|
409 |
|
410 |
+
### Local Development
|
411 |
+
```bash
|
412 |
+
# Start development environment
|
413 |
+
./scripts/docker-build.sh dev
|
414 |
|
415 |
+
# Access Jupyter Lab
|
416 |
+
open http://localhost:8888
|
|
|
|
|
417 |
```
|
418 |
|
419 |
+
### Production Deployment
|
420 |
+
```bash
|
421 |
+
# Deploy to production
|
422 |
+
./scripts/docker-build.sh prod
|
|
|
|
|
|
|
|
|
|
|
|
|
423 |
|
424 |
+
# Monitor system health
|
425 |
+
open http://localhost:3000 # Grafana
|
426 |
+
open http://localhost:9090 # Prometheus
|
427 |
+
```
|
|
|
|
|
|
|
428 |
|
429 |
+
### Cloud Deployment
|
430 |
+
The Docker setup is compatible with:
|
431 |
+
- **AWS ECS/Fargate**: For serverless container deployment
|
432 |
+
- **Google Cloud Run**: For scalable containerized applications
|
433 |
+
- **Azure Container Instances**: For managed container deployment
|
434 |
+
- **Kubernetes**: For orchestrated container management
|
435 |
|
436 |
+
### Environment Variables
|
437 |
+
```bash
|
438 |
+
# Development
|
439 |
+
LOG_LEVEL=DEBUG
|
440 |
+
PYTHONDONTWRITEBYTECODE=1
|
441 |
|
442 |
+
# Production
|
443 |
+
LOG_LEVEL=INFO
|
444 |
+
PYTHONUNBUFFERED=1
|
445 |
+
```
|
|
|
|
|
|
|
446 |
|
447 |
+
## Troubleshooting
|
|
|
448 |
|
449 |
+
### Common Docker Issues
|
|
|
|
|
|
|
|
|
|
|
450 |
|
451 |
+
#### Build Failures
|
452 |
+
```bash
|
453 |
+
# Clean build cache
|
454 |
+
docker system prune -a
|
455 |
|
456 |
+
# Rebuild without cache
|
457 |
+
docker build --no-cache -t algorithmic-trading .
|
|
|
458 |
```
|
459 |
|
460 |
+
#### Container Startup Issues
|
461 |
+
```bash
|
462 |
+
# Check container logs
|
463 |
+
docker logs algorithmic-trading
|
|
|
|
|
464 |
|
465 |
+
# Check container status
|
466 |
+
docker ps -a
|
|
|
467 |
```
|
468 |
|
469 |
+
#### Volume Mount Issues
|
470 |
```bash
|
471 |
+
# Check volume permissions
|
472 |
+
docker run --rm -v $(pwd):/app algorithmic-trading:latest ls -la /app
|
473 |
|
474 |
+
# Fix volume permissions
|
475 |
+
chmod -R 755 data logs models
|
|
|
|
|
|
|
|
|
476 |
```
|
477 |
|
478 |
+
### Performance Optimization
|
|
|
479 |
|
480 |
+
#### Resource Tuning
|
481 |
```yaml
|
482 |
+
# Adjust resource limits in docker-compose.prod.yml
|
483 |
+
deploy:
|
484 |
+
resources:
|
485 |
+
limits:
|
486 |
+
memory: 4G # Increase for heavy workloads
|
487 |
+
cpus: '2.0' # Increase for CPU-intensive tasks
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
488 |
```
|
489 |
|
490 |
+
#### Logging Optimization
|
491 |
+
```yaml
|
492 |
+
# Reduce log verbosity in production
|
493 |
+
logging:
|
494 |
+
driver: "json-file"
|
495 |
+
options:
|
496 |
+
max-size: "5m" # Smaller log files
|
497 |
+
max-file: "2" # Fewer log files
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
498 |
```
|
499 |
|
500 |
+
## Contributing
|
|
|
|
|
|
|
|
|
|
|
501 |
|
502 |
+
1. Fork the repository
|
503 |
+
2. Create a feature branch
|
504 |
+
3. Add tests for new functionality
|
505 |
+
4. Ensure all tests pass (including Docker tests)
|
506 |
+
5. Submit a pull request
|
507 |
|
508 |
+
### Development Workflow
|
509 |
```bash
|
510 |
+
# Start development environment
|
511 |
+
./scripts/docker-build.sh dev
|
|
|
|
|
|
|
512 |
|
513 |
+
# Make changes and test
|
514 |
+
./scripts/docker-build.sh test
|
515 |
|
516 |
+
# Run linting
|
517 |
+
docker-compose -f docker-compose.dev.yml run linting
|
518 |
|
519 |
+
# Commit and push
|
520 |
+
git add .
|
521 |
+
git commit -m "Add new feature"
|
522 |
+
git push origin feature-branch
|
523 |
```
|
524 |
|
525 |
+
## License
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
526 |
|
527 |
+
This project is licensed under the Apache License, Version 2.0 - see the LICENSE file for details.
|
528 |
|
529 |
+
## About
|
|
|
|
|
|
|
|
|
530 |
|
531 |
+
A comprehensive, production-ready algorithmic trading system with real-time market data streaming, multi-symbol trading, advanced technical analysis, robust risk management capabilities, and full Docker containerization support.
|
532 |
|
533 |
+
[Medium Article](https://medium.com/@edwinsalguero/data-pipeline-design-in-an-algorithmic-trading-system-ac0d8109c4b9)
|
docker-compose.dev.yml
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3.8'
|
2 |
+
|
3 |
+
services:
|
4 |
+
# Development environment with hot reload
|
5 |
+
trading-dev:
|
6 |
+
build: .
|
7 |
+
container_name: trading-dev
|
8 |
+
ports:
|
9 |
+
- "8888:8888" # Jupyter Lab
|
10 |
+
- "8000:8000" # Trading system
|
11 |
+
- "6006:6006" # TensorBoard
|
12 |
+
volumes:
|
13 |
+
- .:/app
|
14 |
+
- ./data:/app/data
|
15 |
+
- ./logs:/app/logs
|
16 |
+
- ./models:/app/models
|
17 |
+
- ./config.yaml:/app/config.yaml:ro
|
18 |
+
environment:
|
19 |
+
- PYTHONPATH=/app
|
20 |
+
- LOG_LEVEL=DEBUG
|
21 |
+
- PYTHONDONTWRITEBYTECODE=1
|
22 |
+
command: ["jupyter", "lab", "--ip=0.0.0.0", "--port=8888", "--no-browser", "--allow-root", "--NotebookApp.token=''"]
|
23 |
+
restart: unless-stopped
|
24 |
+
stdin_open: true
|
25 |
+
tty: true
|
26 |
+
|
27 |
+
# FinRL training with TensorBoard
|
28 |
+
finrl-training-dev:
|
29 |
+
build: .
|
30 |
+
container_name: finrl-training-dev
|
31 |
+
ports:
|
32 |
+
- "6006:6006" # TensorBoard
|
33 |
+
volumes:
|
34 |
+
- .:/app
|
35 |
+
- ./data:/app/data
|
36 |
+
- ./logs:/app/logs
|
37 |
+
- ./models:/app/models
|
38 |
+
- ./config.yaml:/app/config.yaml:ro
|
39 |
+
environment:
|
40 |
+
- PYTHONPATH=/app
|
41 |
+
- LOG_LEVEL=DEBUG
|
42 |
+
command: ["python", "finrl_demo.py"]
|
43 |
+
restart: "no"
|
44 |
+
|
45 |
+
# Testing service
|
46 |
+
testing:
|
47 |
+
build: .
|
48 |
+
container_name: trading-testing
|
49 |
+
volumes:
|
50 |
+
- .:/app
|
51 |
+
- ./data:/app/data
|
52 |
+
- ./logs:/app/logs
|
53 |
+
environment:
|
54 |
+
- PYTHONPATH=/app
|
55 |
+
- LOG_LEVEL=DEBUG
|
56 |
+
command: ["pytest", "-v", "--cov=agentic_ai_system", "--cov-report=html"]
|
57 |
+
restart: "no"
|
58 |
+
|
59 |
+
# Linting and code quality
|
60 |
+
linting:
|
61 |
+
build: .
|
62 |
+
container_name: trading-lint
|
63 |
+
volumes:
|
64 |
+
- .:/app
|
65 |
+
environment:
|
66 |
+
- PYTHONPATH=/app
|
67 |
+
command: ["sh", "-c", "pip install flake8 black isort mypy && flake8 agentic_ai_system && black --check agentic_ai_system && isort --check-only agentic_ai_system"]
|
68 |
+
restart: "no"
|
69 |
+
|
70 |
+
volumes:
|
71 |
+
data:
|
72 |
+
logs:
|
73 |
+
models:
|
docker-compose.prod.yml
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3.8'
|
2 |
+
|
3 |
+
services:
|
4 |
+
# Production trading system
|
5 |
+
trading-system:
|
6 |
+
build: .
|
7 |
+
container_name: algorithmic-trading-prod
|
8 |
+
ports:
|
9 |
+
- "8000:8000"
|
10 |
+
volumes:
|
11 |
+
- trading_data:/app/data
|
12 |
+
- trading_logs:/app/logs
|
13 |
+
- trading_models:/app/models
|
14 |
+
- ./config.yaml:/app/config.yaml:ro
|
15 |
+
environment:
|
16 |
+
- PYTHONPATH=/app
|
17 |
+
- LOG_LEVEL=INFO
|
18 |
+
- PYTHONUNBUFFERED=1
|
19 |
+
command: ["python", "-m", "agentic_ai_system.main", "--mode", "live"]
|
20 |
+
restart: unless-stopped
|
21 |
+
healthcheck:
|
22 |
+
test: ["CMD", "python", "-c", "import sys; sys.exit(0)"]
|
23 |
+
interval: 30s
|
24 |
+
timeout: 10s
|
25 |
+
retries: 3
|
26 |
+
start_period: 40s
|
27 |
+
deploy:
|
28 |
+
resources:
|
29 |
+
limits:
|
30 |
+
memory: 2G
|
31 |
+
cpus: '1.0'
|
32 |
+
reservations:
|
33 |
+
memory: 512M
|
34 |
+
cpus: '0.5'
|
35 |
+
logging:
|
36 |
+
driver: "json-file"
|
37 |
+
options:
|
38 |
+
max-size: "10m"
|
39 |
+
max-file: "3"
|
40 |
+
|
41 |
+
# Monitoring service
|
42 |
+
monitoring:
|
43 |
+
image: prom/prometheus:latest
|
44 |
+
container_name: trading-monitoring
|
45 |
+
ports:
|
46 |
+
- "9090:9090"
|
47 |
+
volumes:
|
48 |
+
- ./monitoring/prometheus.yml:/etc/prometheus/prometheus.yml:ro
|
49 |
+
- prometheus_data:/prometheus
|
50 |
+
command:
|
51 |
+
- '--config.file=/etc/prometheus/prometheus.yml'
|
52 |
+
- '--storage.tsdb.path=/prometheus'
|
53 |
+
- '--web.console.libraries=/etc/prometheus/console_libraries'
|
54 |
+
- '--web.console.templates=/etc/prometheus/consoles'
|
55 |
+
- '--storage.tsdb.retention.time=200h'
|
56 |
+
- '--web.enable-lifecycle'
|
57 |
+
restart: unless-stopped
|
58 |
+
|
59 |
+
# Grafana for visualization
|
60 |
+
grafana:
|
61 |
+
image: grafana/grafana:latest
|
62 |
+
container_name: trading-grafana
|
63 |
+
ports:
|
64 |
+
- "3000:3000"
|
65 |
+
volumes:
|
66 |
+
- grafana_data:/var/lib/grafana
|
67 |
+
- ./monitoring/grafana/dashboards:/etc/grafana/provisioning/dashboards:ro
|
68 |
+
- ./monitoring/grafana/datasources:/etc/grafana/provisioning/datasources:ro
|
69 |
+
environment:
|
70 |
+
- GF_SECURITY_ADMIN_PASSWORD=admin
|
71 |
+
- GF_USERS_ALLOW_SIGN_UP=false
|
72 |
+
restart: unless-stopped
|
73 |
+
depends_on:
|
74 |
+
- monitoring
|
75 |
+
|
76 |
+
# Backup service
|
77 |
+
backup:
|
78 |
+
build: .
|
79 |
+
container_name: trading-backup
|
80 |
+
volumes:
|
81 |
+
- trading_data:/app/data:ro
|
82 |
+
- trading_logs:/app/logs:ro
|
83 |
+
- trading_models:/app/models:ro
|
84 |
+
- backup_data:/backup
|
85 |
+
environment:
|
86 |
+
- PYTHONPATH=/app
|
87 |
+
command: ["python", "-c", "import shutil; import os; from datetime import datetime; timestamp = datetime.now().strftime('%Y%m%d_%H%M%S'); shutil.make_archive(f'/backup/trading_backup_{timestamp}', 'zip', '/app/data'); shutil.make_archive(f'/backup/models_backup_{timestamp}', 'zip', '/app/models'); print('Backup completed')"]
|
88 |
+
restart: "no"
|
89 |
+
|
90 |
+
volumes:
|
91 |
+
trading_data:
|
92 |
+
driver: local
|
93 |
+
trading_logs:
|
94 |
+
driver: local
|
95 |
+
trading_models:
|
96 |
+
driver: local
|
97 |
+
prometheus_data:
|
98 |
+
driver: local
|
99 |
+
grafana_data:
|
100 |
+
driver: local
|
101 |
+
backup_data:
|
102 |
+
driver: local
|
docker-compose.yml
ADDED
@@ -0,0 +1,81 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: '3.8'
|
2 |
+
|
3 |
+
services:
|
4 |
+
# Main trading system
|
5 |
+
trading-system:
|
6 |
+
build: .
|
7 |
+
container_name: algorithmic-trading
|
8 |
+
ports:
|
9 |
+
- "8000:8000"
|
10 |
+
volumes:
|
11 |
+
- ./data:/app/data
|
12 |
+
- ./logs:/app/logs
|
13 |
+
- ./models:/app/models
|
14 |
+
- ./config.yaml:/app/config.yaml:ro
|
15 |
+
environment:
|
16 |
+
- PYTHONPATH=/app
|
17 |
+
- LOG_LEVEL=INFO
|
18 |
+
command: ["python", "-m", "agentic_ai_system.main", "--mode", "live", "--duration", "300"]
|
19 |
+
restart: unless-stopped
|
20 |
+
healthcheck:
|
21 |
+
test: ["CMD", "python", "-c", "import sys; sys.exit(0)"]
|
22 |
+
interval: 30s
|
23 |
+
timeout: 10s
|
24 |
+
retries: 3
|
25 |
+
start_period: 40s
|
26 |
+
|
27 |
+
# FinRL training service
|
28 |
+
finrl-training:
|
29 |
+
build: .
|
30 |
+
container_name: finrl-training
|
31 |
+
volumes:
|
32 |
+
- ./data:/app/data
|
33 |
+
- ./logs:/app/logs
|
34 |
+
- ./models:/app/models
|
35 |
+
- ./config.yaml:/app/config.yaml:ro
|
36 |
+
environment:
|
37 |
+
- PYTHONPATH=/app
|
38 |
+
- LOG_LEVEL=INFO
|
39 |
+
command: ["python", "finrl_demo.py"]
|
40 |
+
restart: "no"
|
41 |
+
depends_on:
|
42 |
+
- trading-system
|
43 |
+
|
44 |
+
# Backtesting service
|
45 |
+
backtesting:
|
46 |
+
build: .
|
47 |
+
container_name: backtesting
|
48 |
+
volumes:
|
49 |
+
- ./data:/app/data
|
50 |
+
- ./logs:/app/logs
|
51 |
+
- ./config.yaml:/app/config.yaml:ro
|
52 |
+
environment:
|
53 |
+
- PYTHONPATH=/app
|
54 |
+
- LOG_LEVEL=INFO
|
55 |
+
command: ["python", "-m", "agentic_ai_system.main", "--mode", "backtest", "--start-date", "2024-01-01", "--end-date", "2024-12-31"]
|
56 |
+
restart: "no"
|
57 |
+
depends_on:
|
58 |
+
- trading-system
|
59 |
+
|
60 |
+
# Development service with Jupyter
|
61 |
+
development:
|
62 |
+
build: .
|
63 |
+
container_name: trading-dev
|
64 |
+
ports:
|
65 |
+
- "8888:8888"
|
66 |
+
volumes:
|
67 |
+
- ./data:/app/data
|
68 |
+
- ./logs:/app/logs
|
69 |
+
- ./models:/app/models
|
70 |
+
- ./config.yaml:/app/config.yaml:ro
|
71 |
+
- .:/app
|
72 |
+
environment:
|
73 |
+
- PYTHONPATH=/app
|
74 |
+
- LOG_LEVEL=DEBUG
|
75 |
+
command: ["jupyter", "lab", "--ip=0.0.0.0", "--port=8888", "--no-browser", "--allow-root", "--NotebookApp.token=''"]
|
76 |
+
restart: unless-stopped
|
77 |
+
|
78 |
+
volumes:
|
79 |
+
data:
|
80 |
+
logs:
|
81 |
+
models:
|
docker-entrypoint.sh
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
set -e
|
3 |
+
|
4 |
+
# Function to wait for dependencies
|
5 |
+
wait_for_dependencies() {
|
6 |
+
echo "Waiting for dependencies to be ready..."
|
7 |
+
sleep 5
|
8 |
+
}
|
9 |
+
|
10 |
+
# Function to initialize directories
|
11 |
+
init_directories() {
|
12 |
+
echo "Initializing directories..."
|
13 |
+
mkdir -p /app/data
|
14 |
+
mkdir -p /app/logs
|
15 |
+
mkdir -p /app/models
|
16 |
+
chmod 755 /app/data /app/logs /app/models
|
17 |
+
}
|
18 |
+
|
19 |
+
# Function to generate synthetic data if needed
|
20 |
+
generate_data_if_needed() {
|
21 |
+
if [ ! -f "/app/data/synthetic_market_data.csv" ]; then
|
22 |
+
echo "Generating synthetic market data..."
|
23 |
+
python -c "
|
24 |
+
from agentic_ai_system.synthetic_data_generator import SyntheticDataGenerator
|
25 |
+
import yaml
|
26 |
+
|
27 |
+
with open('/app/config.yaml', 'r') as f:
|
28 |
+
config = yaml.safe_load(f)
|
29 |
+
|
30 |
+
generator = SyntheticDataGenerator(config)
|
31 |
+
data = generator.generate_ohlcv_data(
|
32 |
+
symbol='AAPL',
|
33 |
+
start_date='2024-01-01',
|
34 |
+
end_date='2024-12-31',
|
35 |
+
frequency='1min'
|
36 |
+
)
|
37 |
+
data.to_csv('/app/data/synthetic_market_data.csv', index=True)
|
38 |
+
print('Synthetic data generated successfully')
|
39 |
+
"
|
40 |
+
else
|
41 |
+
echo "Synthetic data already exists"
|
42 |
+
fi
|
43 |
+
}
|
44 |
+
|
45 |
+
# Function to run health check
|
46 |
+
health_check() {
|
47 |
+
echo "Running health check..."
|
48 |
+
python -c "
|
49 |
+
import sys
|
50 |
+
from agentic_ai_system.logger_config import setup_logging
|
51 |
+
try:
|
52 |
+
setup_logging({})
|
53 |
+
print('Health check passed')
|
54 |
+
except Exception as e:
|
55 |
+
print(f'Health check failed: {e}')
|
56 |
+
sys.exit(1)
|
57 |
+
"
|
58 |
+
}
|
59 |
+
|
60 |
+
# Main execution
|
61 |
+
main() {
|
62 |
+
echo "Starting Algorithmic Trading System..."
|
63 |
+
|
64 |
+
# Initialize
|
65 |
+
init_directories
|
66 |
+
wait_for_dependencies
|
67 |
+
generate_data_if_needed
|
68 |
+
health_check
|
69 |
+
|
70 |
+
echo "System initialized successfully"
|
71 |
+
|
72 |
+
# Execute the main command
|
73 |
+
exec "$@"
|
74 |
+
}
|
75 |
+
|
76 |
+
# Run main function with all arguments
|
77 |
+
main "$@"
|
scripts/docker-build.sh
ADDED
@@ -0,0 +1,162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
|
3 |
+
# Colors for output
|
4 |
+
RED='\033[0;31m'
|
5 |
+
GREEN='\033[0;32m'
|
6 |
+
YELLOW='\033[1;33m'
|
7 |
+
BLUE='\033[0;34m'
|
8 |
+
NC='\033[0m' # No Color
|
9 |
+
|
10 |
+
# Function to print colored output
|
11 |
+
print_status() {
|
12 |
+
echo -e "${BLUE}[INFO]${NC} $1"
|
13 |
+
}
|
14 |
+
|
15 |
+
print_success() {
|
16 |
+
echo -e "${GREEN}[SUCCESS]${NC} $1"
|
17 |
+
}
|
18 |
+
|
19 |
+
print_warning() {
|
20 |
+
echo -e "${YELLOW}[WARNING]${NC} $1"
|
21 |
+
}
|
22 |
+
|
23 |
+
print_error() {
|
24 |
+
echo -e "${RED}[ERROR]${NC} $1"
|
25 |
+
}
|
26 |
+
|
27 |
+
# Function to build the Docker image
|
28 |
+
build_image() {
|
29 |
+
print_status "Building Docker image..."
|
30 |
+
docker build -t algorithmic-trading:latest .
|
31 |
+
if [ $? -eq 0 ]; then
|
32 |
+
print_success "Docker image built successfully"
|
33 |
+
else
|
34 |
+
print_error "Failed to build Docker image"
|
35 |
+
exit 1
|
36 |
+
fi
|
37 |
+
}
|
38 |
+
|
39 |
+
# Function to run tests in Docker
|
40 |
+
run_tests() {
|
41 |
+
print_status "Running tests in Docker..."
|
42 |
+
docker run --rm -v $(pwd):/app algorithmic-trading:latest pytest -v
|
43 |
+
if [ $? -eq 0 ]; then
|
44 |
+
print_success "Tests passed"
|
45 |
+
else
|
46 |
+
print_error "Tests failed"
|
47 |
+
exit 1
|
48 |
+
fi
|
49 |
+
}
|
50 |
+
|
51 |
+
# Function to start development environment
|
52 |
+
start_dev() {
|
53 |
+
print_status "Starting development environment..."
|
54 |
+
docker-compose -f docker-compose.dev.yml up -d
|
55 |
+
print_success "Development environment started"
|
56 |
+
print_status "Jupyter Lab available at: http://localhost:8888"
|
57 |
+
print_status "Trading system available at: http://localhost:8000"
|
58 |
+
print_status "TensorBoard available at: http://localhost:6006"
|
59 |
+
}
|
60 |
+
|
61 |
+
# Function to start production environment
|
62 |
+
start_prod() {
|
63 |
+
print_status "Starting production environment..."
|
64 |
+
docker-compose -f docker-compose.prod.yml up -d
|
65 |
+
print_success "Production environment started"
|
66 |
+
print_status "Trading system available at: http://localhost:8000"
|
67 |
+
print_status "Grafana available at: http://localhost:3000 (admin/admin)"
|
68 |
+
print_status "Prometheus available at: http://localhost:9090"
|
69 |
+
}
|
70 |
+
|
71 |
+
# Function to stop all containers
|
72 |
+
stop_all() {
|
73 |
+
print_status "Stopping all containers..."
|
74 |
+
docker-compose -f docker-compose.yml down
|
75 |
+
docker-compose -f docker-compose.dev.yml down
|
76 |
+
docker-compose -f docker-compose.prod.yml down
|
77 |
+
print_success "All containers stopped"
|
78 |
+
}
|
79 |
+
|
80 |
+
# Function to clean up Docker resources
|
81 |
+
cleanup() {
|
82 |
+
print_status "Cleaning up Docker resources..."
|
83 |
+
docker system prune -f
|
84 |
+
docker volume prune -f
|
85 |
+
print_success "Cleanup completed"
|
86 |
+
}
|
87 |
+
|
88 |
+
# Function to show logs
|
89 |
+
show_logs() {
|
90 |
+
local service=${1:-trading-system}
|
91 |
+
print_status "Showing logs for $service..."
|
92 |
+
docker-compose logs -f $service
|
93 |
+
}
|
94 |
+
|
95 |
+
# Function to run a specific command in the container
|
96 |
+
run_command() {
|
97 |
+
local command="$1"
|
98 |
+
print_status "Running command: $command"
|
99 |
+
docker run --rm -v $(pwd):/app algorithmic-trading:latest $command
|
100 |
+
}
|
101 |
+
|
102 |
+
# Function to show help
|
103 |
+
show_help() {
|
104 |
+
echo "Usage: $0 [COMMAND]"
|
105 |
+
echo ""
|
106 |
+
echo "Commands:"
|
107 |
+
echo " build Build the Docker image"
|
108 |
+
echo " test Run tests in Docker"
|
109 |
+
echo " dev Start development environment"
|
110 |
+
echo " prod Start production environment"
|
111 |
+
echo " stop Stop all containers"
|
112 |
+
echo " cleanup Clean up Docker resources"
|
113 |
+
echo " logs [SVC] Show logs for a service (default: trading-system)"
|
114 |
+
echo " run CMD Run a specific command in the container"
|
115 |
+
echo " help Show this help message"
|
116 |
+
echo ""
|
117 |
+
echo "Examples:"
|
118 |
+
echo " $0 build"
|
119 |
+
echo " $0 dev"
|
120 |
+
echo " $0 logs"
|
121 |
+
echo " $0 run 'python demo.py'"
|
122 |
+
}
|
123 |
+
|
124 |
+
# Main script logic
|
125 |
+
case "${1:-help}" in
|
126 |
+
build)
|
127 |
+
build_image
|
128 |
+
;;
|
129 |
+
test)
|
130 |
+
build_image
|
131 |
+
run_tests
|
132 |
+
;;
|
133 |
+
dev)
|
134 |
+
build_image
|
135 |
+
start_dev
|
136 |
+
;;
|
137 |
+
prod)
|
138 |
+
build_image
|
139 |
+
start_prod
|
140 |
+
;;
|
141 |
+
stop)
|
142 |
+
stop_all
|
143 |
+
;;
|
144 |
+
cleanup)
|
145 |
+
cleanup
|
146 |
+
;;
|
147 |
+
logs)
|
148 |
+
show_logs $2
|
149 |
+
;;
|
150 |
+
run)
|
151 |
+
if [ -z "$2" ]; then
|
152 |
+
print_error "No command specified"
|
153 |
+
show_help
|
154 |
+
exit 1
|
155 |
+
fi
|
156 |
+
build_image
|
157 |
+
run_command "$2"
|
158 |
+
;;
|
159 |
+
help|*)
|
160 |
+
show_help
|
161 |
+
;;
|
162 |
+
esac
|