diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index c966e9b..de5cbef 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -27,6 +27,9 @@ jobs: docker build -t ghcr.io/bionic-gpt/llama-3.2-3b:latest -t bionic-gpt/llama-3.2-3b:latest -f Dockerfile.llama3.2-3b.cpu . docker push ghcr.io/bionic-gpt/llama-3.2-3b:latest + docker build -t ghcr.io/bionic-gpt/llama-3-8b-chat:latest -t bionic-gpt/llama-3-8b-chat:latest -f Dockerfile.llama-3-8b-chat . + docker push ghcr.io/bionic-gpt/llama-3-8b-chat:latest + docker build -t ghcr.io/bionic-gpt/embeddings:latest -t bionic-gpt/embeddings:latest -f Dockerfile.embeddings . docker push ghcr.io/bionic-gpt/embeddings:latest diff --git a/.releaserc.json b/.releaserc.json index bd10996..e99d500 100644 --- a/.releaserc.json +++ b/.releaserc.json @@ -7,6 +7,10 @@ "name": "ghcr.io/bionic-gpt/llama-3.2-3b:latest", "skipLogin": true }], + ["@semantic-release-plus/docker", { + "name": "ghcr.io/bionic-gpt/llama-3-8b-chat:latest", + "skipLogin": true + }], ["@semantic-release-plus/docker", { "name": "ghcr.io/bionic-gpt/embeddings:latest", "skipLogin": true diff --git a/Dockerfile.llama3-8b-chat b/Dockerfile.llama3-8b-chat new file mode 100644 index 0000000..fa85d81 --- /dev/null +++ b/Dockerfile.llama3-8b-chat @@ -0,0 +1,3 @@ +FROM ollama/ollama:0.3.13 + +RUN nohup bash -c "ollama serve &" && sleep 20 && ollama pull llama3 \ No newline at end of file