mirror of
https://github.com/zylon-ai/private-gpt.git
synced 2025-12-22 04:30:11 +01:00
fix: docker permissions (#2059)
* fix: missing depends_on * chore: update copy permissions * chore: update entrypoint * Revert "chore: update entrypoint" This reverts commitf73a36af2f. * Revert "chore: update copy permissions" This reverts commitfabc3f66bb. * style: fix docker warning * fix: multiples fixes * fix: user permissions writing local_data folder
This commit is contained in:
parent
77461b96cf
commit
8c12c6830b
3 changed files with 14 additions and 10 deletions
|
|
@ -8,11 +8,12 @@ services:
|
|||
# This service builds from an external Dockerfile and runs the Ollama mode.
|
||||
private-gpt-ollama:
|
||||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-ollama # x-release-please-version
|
||||
user: root
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.ollama
|
||||
volumes:
|
||||
- ./local_data/:/home/worker/app/local_data
|
||||
- ./local_data:/home/worker/app/local_data
|
||||
ports:
|
||||
- "8001:8001"
|
||||
environment:
|
||||
|
|
@ -27,11 +28,14 @@ services:
|
|||
- ollama-cpu
|
||||
- ollama-cuda
|
||||
- ollama-api
|
||||
depends_on:
|
||||
- ollama
|
||||
|
||||
# Private-GPT service for the local mode
|
||||
# This service builds from a local Dockerfile and runs the application in local mode.
|
||||
private-gpt-llamacpp-cpu:
|
||||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-llamacpp-cpu # x-release-please-version
|
||||
user: root
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile.llamacpp-cpu
|
||||
|
|
@ -44,7 +48,7 @@ services:
|
|||
environment:
|
||||
PORT: 8001
|
||||
PGPT_PROFILES: local
|
||||
HF_TOKEN: ${HF_TOKEN}
|
||||
HF_TOKEN: ${HF_TOKEN:-}
|
||||
profiles:
|
||||
- llamacpp-cpu
|
||||
|
||||
|
|
@ -57,7 +61,7 @@ services:
|
|||
ollama:
|
||||
image: traefik:v2.10
|
||||
ports:
|
||||
- "8081:8080"
|
||||
- "11434:11434"
|
||||
command:
|
||||
- "--providers.file.filename=/etc/router.yml"
|
||||
- "--log.level=ERROR"
|
||||
|
|
@ -98,4 +102,4 @@ services:
|
|||
count: 1
|
||||
capabilities: [gpu]
|
||||
profiles:
|
||||
- ollama-cuda
|
||||
- ollama-cuda
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue