Initial production-ready Gemma 3 vLLM ROCm stack
Co-Authored-By: Oz <oz-agent@warp.dev>
This commit is contained in:
4
backend/Dockerfile
Normal file
4
backend/Dockerfile
Normal file
@ -0,0 +1,4 @@
|
||||
# Optional backend Dockerfile.
|
||||
# This stack uses the official vLLM ROCm image directly from docker-compose.yml.
|
||||
# Keep this file for future customizations.
|
||||
FROM vllm/vllm-openai-rocm:latest
|
||||
Reference in New Issue
Block a user