Spaces:
Running
Running
File size: 1,532 Bytes
6f509ec |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 |
version: '3'
services:
mongodb:
image: mongo:6.0
container_name: crawler-mongodb
ports:
- "27017:27017"
volumes:
- mongodb_data:/data/db
restart: unless-stopped
environment:
- MONGO_INITDB_DATABASE=webcrawler
networks:
- crawler-network
redis:
image: redis:latest
container_name: crawler-redis
ports:
- "6379:6379"
volumes:
- redis_data:/data
restart: unless-stopped
networks:
- crawler-network
web-crawler:
build:
context: .
dockerfile: Dockerfile
container_name: web-crawler
volumes:
- ./:/app
- crawler_data:/data/storage
ports:
- "9100:9100"
depends_on:
- mongodb
- redis
environment:
- MONGODB_URI=mongodb://mongodb:27017/
- REDIS_URI=redis://redis:6379/0
- LOG_LEVEL=INFO
- MAX_WORKERS=4
networks:
- crawler-network
command: python crawl.py start --workers=4
crawler-api:
build:
context: .
dockerfile: Dockerfile
container_name: crawler-api
volumes:
- ./:/app
- crawler_data:/data/storage
ports:
- "8000:8000"
depends_on:
- mongodb
- redis
- web-crawler
environment:
- MONGODB_URI=mongodb://mongodb:27017/
- REDIS_URI=redis://redis:6379/0
- LOG_LEVEL=INFO
networks:
- crawler-network
command: python api.py
networks:
crawler-network:
driver: bridge
volumes:
mongodb_data:
redis_data:
crawler_data: |