Practical Applications and Examples
This section demonstrates real-world Docker Compose applications across different scenarios, from development environments to production-ready multi-service architectures.
Full-Stack Web Application
MEAN Stack Application
version: '3.8'
services:
# Angular Frontend
frontend:
build:
context: ./frontend
dockerfile: Dockerfile
ports:
- "4200:4200"
volumes:
- ./frontend:/app
- /app/node_modules
environment:
- API_URL=http://localhost:3000/api
depends_on:
- backend
# Express.js Backend
backend:
build:
context: ./backend
dockerfile: Dockerfile
ports:
- "3000:3000"
volumes:
- ./backend:/app
- /app/node_modules
environment:
- NODE_ENV=development
- MONGODB_URI=mongodb://mongo:27017/meanapp
- JWT_SECRET=your-jwt-secret
- PORT=3000
depends_on:
- mongo
- redis
# MongoDB Database
mongo:
image: mongo:5
ports:
- "27017:27017"
volumes:
- mongo_data:/data/db
- ./mongo-init.js:/docker-entrypoint-initdb.d/mongo-init.js:ro
environment:
- MONGO_INITDB_ROOT_USERNAME=admin
- MONGO_INITDB_ROOT_PASSWORD=password
- MONGO_INITDB_DATABASE=meanapp
# Redis Cache
redis:
image: redis:7-alpine
ports:
- "6379:6379"
volumes:
- redis_data:/data
command: redis-server --appendonly yes
# Nginx Reverse Proxy
nginx:
image: nginx:alpine
ports:
- "80:80"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
depends_on:
- frontend
- backend
volumes:
mongo_data:
redis_data:
Microservices E-Commerce Platform
version: '3.8'
services:
# API Gateway
api-gateway:
build: ./services/api-gateway
ports:
- "8080:8080"
environment:
- USER_SERVICE_URL=http://user-service:3001
- PRODUCT_SERVICE_URL=http://product-service:3002
- ORDER_SERVICE_URL=http://order-service:3003
- PAYMENT_SERVICE_URL=http://payment-service:3004
depends_on:
- user-service
- product-service
- order-service
- payment-service
# User Service
user-service:
build: ./services/user-service
environment:
- DATABASE_URL=postgresql://user:password@user-db:5432/users
- REDIS_URL=redis://redis:6379/0
depends_on:
- user-db
- redis
user-db:
image: postgres:13
environment:
- POSTGRES_DB=users
- POSTGRES_USER=user
- POSTGRES_PASSWORD=password
volumes:
- user_db_data:/var/lib/postgresql/data
# Product Service
product-service:
build: ./services/product-service
environment:
- DATABASE_URL=postgresql://user:password@product-db:5432/products
- ELASTICSEARCH_URL=http://elasticsearch:9200
depends_on:
- product-db
- elasticsearch
product-db:
image: postgres:13
environment:
- POSTGRES_DB=products
- POSTGRES_USER=user
- POSTGRES_PASSWORD=password
volumes:
- product_db_data:/var/lib/postgresql/data
# Order Service
order-service:
build: ./services/order-service
environment:
- DATABASE_URL=postgresql://user:password@order-db:5432/orders
- RABBITMQ_URL=amqp://guest:guest@rabbitmq:5672/
depends_on:
- order-db
- rabbitmq
order-db:
image: postgres:13
environment:
- POSTGRES_DB=orders
- POSTGRES_USER=user
- POSTGRES_PASSWORD=password
volumes:
- order_db_data:/var/lib/postgresql/data
# Payment Service
payment-service:
build: ./services/payment-service
environment:
- DATABASE_URL=postgresql://user:password@payment-db:5432/payments
- STRIPE_SECRET_KEY=${STRIPE_SECRET_KEY}
depends_on:
- payment-db
payment-db:
image: postgres:13
environment:
- POSTGRES_DB=payments
- POSTGRES_USER=user
- POSTGRES_PASSWORD=password
volumes:
- payment_db_data:/var/lib/postgresql/data
# Shared Services
redis:
image: redis:7-alpine
volumes:
- redis_data:/data
rabbitmq:
image: rabbitmq:3-management
ports:
- "15672:15672"
environment:
- RABBITMQ_DEFAULT_USER=guest
- RABBITMQ_DEFAULT_PASS=guest
volumes:
- rabbitmq_data:/var/lib/rabbitmq
elasticsearch:
image: elasticsearch:7.17.0
environment:
- discovery.type=single-node
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
volumes:
- elasticsearch_data:/usr/share/elasticsearch/data
volumes:
user_db_data:
product_db_data:
order_db_data:
payment_db_data:
redis_data:
rabbitmq_data:
elasticsearch_data:
Development Environment with Hot Reload
version: '3.8'
services:
# React Development Server
frontend:
build:
context: ./frontend
dockerfile: Dockerfile.dev
ports:
- "3000:3000"
volumes:
- ./frontend:/app
- /app/node_modules
environment:
- CHOKIDAR_USEPOLLING=true
- REACT_APP_API_URL=http://localhost:8000
stdin_open: true
tty: true
# Django Development Server
backend:
build:
context: ./backend
dockerfile: Dockerfile.dev
ports:
- "8000:8000"
volumes:
- ./backend:/app
environment:
- DEBUG=1
- DATABASE_URL=postgresql://user:password@db:5432/devdb
- REDIS_URL=redis://redis:6379/0
depends_on:
- db
- redis
command: python manage.py runserver 0.0.0.0:8000
# Celery Worker
celery:
build:
context: ./backend
dockerfile: Dockerfile.dev
volumes:
- ./backend:/app
environment:
- DEBUG=1
- DATABASE_URL=postgresql://user:password@db:5432/devdb
- REDIS_URL=redis://redis:6379/0
depends_on:
- db
- redis
command: celery -A myproject worker -l info
# Celery Beat Scheduler
celery-beat:
build:
context: ./backend
dockerfile: Dockerfile.dev
volumes:
- ./backend:/app
environment:
- DEBUG=1
- DATABASE_URL=postgresql://user:password@db:5432/devdb
- REDIS_URL=redis://redis:6379/0
depends_on:
- db
- redis
command: celery -A myproject beat -l info
# Database
db:
image: postgres:13
environment:
- POSTGRES_DB=devdb
- POSTGRES_USER=user
- POSTGRES_PASSWORD=password
volumes:
- postgres_data:/var/lib/postgresql/data
- ./init.sql:/docker-entrypoint-initdb.d/init.sql
ports:
- "5432:5432"
# Redis
redis:
image: redis:7-alpine
ports:
- "6379:6379"
# Mailhog for Email Testing
mailhog:
image: mailhog/mailhog
ports:
- "1025:1025"
- "8025:8025"
volumes:
postgres_data:
CI/CD Pipeline Integration
version: '3.8'
services:
# Application Under Test
app:
build:
context: .
dockerfile: Dockerfile.test
environment:
- NODE_ENV=test
- DATABASE_URL=postgresql://test:test@test-db:5432/testdb
- REDIS_URL=redis://test-redis:6379/0
depends_on:
- test-db
- test-redis
command: npm test
# Test Database
test-db:
image: postgres:13
environment:
- POSTGRES_DB=testdb
- POSTGRES_USER=test
- POSTGRES_PASSWORD=test
tmpfs:
- /var/lib/postgresql/data
# Test Redis
test-redis:
image: redis:7-alpine
tmpfs:
- /data
# Integration Tests
integration-tests:
build:
context: .
dockerfile: Dockerfile.integration
environment:
- API_URL=http://app:3000
depends_on:
- app
command: npm run test:integration
# End-to-End Tests
e2e-tests:
build:
context: ./e2e
dockerfile: Dockerfile
environment:
- BASE_URL=http://app:3000
depends_on:
- app
command: npm run test:e2e
volumes:
- ./e2e/screenshots:/app/screenshots
- ./e2e/videos:/app/videos
Monitoring and Logging Stack
version: '3.8'
services:
# Application
app:
build: .
environment:
- LOG_LEVEL=info
logging:
driver: "fluentd"
options:
fluentd-address: localhost:24224
tag: app.logs
depends_on:
- fluentd
# Prometheus for Metrics
prometheus:
image: prom/prometheus
ports:
- "9090:9090"
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus_data:/prometheus
command:
- '--config.file=/etc/prometheus/prometheus.yml'
- '--storage.tsdb.path=/prometheus'
- '--web.console.libraries=/etc/prometheus/console_libraries'
- '--web.console.templates=/etc/prometheus/consoles'
# Grafana for Visualization
grafana:
image: grafana/grafana
ports:
- "3000:3000"
environment:
- GF_SECURITY_ADMIN_PASSWORD=admin
volumes:
- grafana_data:/var/lib/grafana
- ./grafana/dashboards:/etc/grafana/provisioning/dashboards
- ./grafana/datasources:/etc/grafana/provisioning/datasources
# Fluentd for Log Collection
fluentd:
build: ./fluentd
ports:
- "24224:24224"
volumes:
- ./fluentd/conf:/fluentd/etc
depends_on:
- elasticsearch
# Elasticsearch for Log Storage
elasticsearch:
image: elasticsearch:7.17.0
environment:
- discovery.type=single-node
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
volumes:
- elasticsearch_data:/usr/share/elasticsearch/data
# Kibana for Log Visualization
kibana:
image: kibana:7.17.0
ports:
- "5601:5601"
environment:
- ELASTICSEARCH_HOSTS=http://elasticsearch:9200
depends_on:
- elasticsearch
volumes:
prometheus_data:
grafana_data:
elasticsearch_data:
Multi-Environment Configuration
Base Configuration
# docker-compose.yml
version: '3.8'
services:
web:
build: .
environment:
- NODE_ENV=${NODE_ENV:-development}
depends_on:
- db
db:
image: postgres:13
environment:
- POSTGRES_DB=${DB_NAME:-myapp}
- POSTGRES_USER=${DB_USER:-user}
- POSTGRES_PASSWORD=${DB_PASSWORD:-password}
volumes:
postgres_data:
Development Override
# docker-compose.override.yml
version: '3.8'
services:
web:
ports:
- "3000:3000"
volumes:
- .:/app
- /app/node_modules
environment:
- DEBUG=1
command: npm run dev
db:
ports:
- "5432:5432"
volumes:
- postgres_data:/var/lib/postgresql/data
Production Configuration
# docker-compose.prod.yml
version: '3.8'
services:
web:
image: myapp:${TAG}
restart: always
environment:
- NODE_ENV=production
deploy:
replicas: 3
resources:
limits:
memory: 512M
reservations:
memory: 256M
db:
restart: always
volumes:
- /data/postgres:/var/lib/postgresql/data
deploy:
resources:
limits:
memory: 1G
reservations:
memory: 512M
nginx:
image: nginx:alpine
ports:
- "80:80"
- "443:443"
volumes:
- ./nginx.prod.conf:/etc/nginx/nginx.conf
- ./ssl:/etc/ssl/certs
depends_on:
- web
Summary
This section demonstrated practical Docker Compose applications:
Real-World Architectures
- Full-Stack Applications: MEAN stack with proper service separation
- Microservices: E-commerce platform with multiple databases and message queues
- Development Environments: Hot reload and debugging capabilities
Operational Patterns
- CI/CD Integration: Testing pipelines with isolated environments
- Monitoring Stack: Complete observability with Prometheus, Grafana, and ELK
- Multi-Environment: Development, staging, and production configurations
Best Practices Applied
- Service Isolation: Proper networking and dependency management
- Data Persistence: Volume strategies for different data types
- Configuration Management: Environment-specific overrides and secrets
Next Steps: Part 4 covers advanced techniques including custom networks, service mesh integration, and complex orchestration patterns for enterprise applications.