19 lines
412 B
YAML
19 lines
412 B
YAML
services:
|
|
llamacpp-server:
|
|
image: llamacpp:latest
|
|
container_name: llamacpp_${SERVER_PORT}
|
|
ports:
|
|
- "${SERVER_PORT}:${SERVER_PORT}"
|
|
devices:
|
|
- /dev/kfd:/dev/kfd
|
|
- /dev/dri:/dev/dri
|
|
volumes:
|
|
- /opt/models:/models
|
|
command: >
|
|
-m /models/${MODEL_FILENAME}
|
|
--host 0.0.0.0
|
|
--port ${SERVER_PORT}
|
|
-ngl 99
|
|
-c 4096
|
|
restart: unless-stopped
|