Skip to content

Commit

Permalink
Reduce docker build context size
Browse files Browse the repository at this point in the history
  • Loading branch information
fdegier committed Feb 13, 2023
1 parent 60674db commit 6ac8d0b
Show file tree
Hide file tree
Showing 10 changed files with 60 additions and 354 deletions.
325 changes: 0 additions & 325 deletions .dockerignore

This file was deleted.

8 changes: 8 additions & 0 deletions api.dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
.hf_cache/
.idea
*.md
.git*
.enditorconfig
models
converter
tests
4 changes: 2 additions & 2 deletions docker-compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ services:
triton:
build:
context: .
dockerfile: Dockerfile
dockerfile: triton.Dockerfile
command: bash -c "CUDA_VISIBLE_DEVICES=${GPUS} mpirun -n 1 --allow-run-as-root /opt/tritonserver/bin/tritonserver --model-repository=/model"
shm_size: '2gb'
volumes:
Expand All @@ -27,7 +27,7 @@ services:
# For local build
build:
context: .
dockerfile: copilot_proxy/Dockerfile
dockerfile: proxy.Dockerfile
env_file:
# Automatically created via ./setup.sh
- .env
Expand Down
File renamed without changes.
4 changes: 2 additions & 2 deletions tests/python_backend/docker-compose-with-gpus.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ services:
triton:
build:
context: ../../
dockerfile: Dockerfile
dockerfile: ../../triton.Dockerfile
command: bash -c "CUDA_VISIBLE_DEVICES="${GPUS}" mpirun -n 1 --allow-run-as-root /opt/tritonserver/bin/tritonserver --model-repository=/model"
shm_size: '2gb'
volumes:
Expand All @@ -27,7 +27,7 @@ services:
# For local build
build:
context: ../../
dockerfile: copilot_proxy/Dockerfile
dockerfile: ../../proxy.Dockerfile
env_file:
# Automatically created via ./setup.sh
- test.env
Expand Down
4 changes: 2 additions & 2 deletions tests/python_backend/docker-compose-without-gpus.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ services:
triton:
build:
context: ../../
dockerfile: Dockerfile
dockerfile: ../../triton.Dockerfile
command: bash -c "CUDA_VISIBLE_DEVICES="${GPUS}" mpirun -n 1 --allow-run-as-root /opt/tritonserver/bin/tritonserver --model-repository=/model"
shm_size: '2gb'
volumes:
Expand All @@ -20,7 +20,7 @@ services:
# For local build
build:
context: ../../
dockerfile: copilot_proxy/Dockerfile
dockerfile: ../../proxy.Dockerfile
env_file:
# Automatically created via ./setup.sh
- test.env
Expand Down
6 changes: 3 additions & 3 deletions tests/python_backend/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
pytest
pexpect
requests
pytest==7.2.1
pexpect==4.8.0
requests==2.28.2
Loading

0 comments on commit 6ac8d0b

Please sign in to comment.