-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathdocker-compose.prd.yaml
87 lines (86 loc) · 2.64 KB
/
docker-compose.prd.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
version: '3.8'
services:
db:
image: postgres:latest
restart: always
container_name: shipped-brain-db
env_file:
- .env
ports:
- 5432:5432
volumes:
- /var/lib/postgres-data:/var/lib/postgresql/data
mlflow_server:
build: ./api/
restart: always
container_name: mlflow-server
depends_on:
- db
env_file:
- .env
ports:
- 5000:5000
volumes:
- ${CONDA_ENVS_PATH_VOL}:/opt/conda/envs:rw
- .env:/app/.env:ro
command: mlflow server --host 0.0.0.0 --backend-store-uri ${DB_URL} --default-artifact-root s3://${ARTIFACT_DIR_NAME}/ --workers ${MLFLOW_SERVER_WORKERS}
api_server:
build: ./api/
restart: always
container_name: shipped-brain-server
depends_on:
- db
- mlflow_server
env_file:
- .env
ports:
- ${API_SERVER_PORT}:${API_SERVER_PORT}
volumes:
- ${CONDA_ENVS_PATH_VOL}:/opt/conda/envs:rw
- .env:/app/.env:ro
- ./resources/data:/data:rw
command: gunicorn -k uvicorn.workers.UvicornWorker -b 0.0.0.0:${API_SERVER_PORT} -w ${API_SERVER_WORKERS} main:app
prediction_server:
build: ./api/
restart: always
container_name: shipped-brain-prediction-server
depends_on:
- db
- mlflow_server
env_file:
- .env
ports:
- ${PREDICTION_SERVER_PORT}:${PREDICTION_SERVER_PORT}
volumes:
- ${CONDA_ENVS_PATH_VOL}:/opt/conda/envs:rw
- .env:/app/.env:ro
command: gunicorn -k uvicorn.workers.UvicornWorker -b 0.0.0.0:${PREDICTION_SERVER_PORT} -w ${PREDICTION_SERVER_WORKERS} prediction_server:app
upload_server:
build: ./api/
restart: always
container_name: shipped-brain-upload-server
depends_on:
- db
- mlflow_server
env_file:
- .env
ports:
- ${UPLOAD_SERVER_PORT}:${UPLOAD_SERVER_PORT}
volumes:
- ${CONDA_ENVS_PATH_VOL}:/opt/conda/envs:rw
- .env:/app/.env:ro
command: gunicorn -k uvicorn.workers.UvicornWorker -b 0.0.0.0:${UPLOAD_SERVER_PORT} -w ${UPLOAD_SERVER_WORKERS} model_upload_server:app
frontend:
build: ./app/
container_name: shipped-brain-ui
volumes:
- /var/lib/shipped-brain-ui/dist/shipped-brain/:/usr/share/nginx/html/:rw
- /var/lib/shipped-brain-ui/nginx_conf/:/etc/nginx/conf.d/:rw
- /var/lib/shipped-brain-ui/letsencrypt/:/etc/letsencrypt/:rw
ports:
- 80:80
- 443:443
depends_on:
- api_server
- mlflow_server
command: /bin/bash /start_nginx.prd.sh