Initial production-ready Gemma 3 vLLM ROCm stack

Co-Authored-By: Oz <oz-agent@warp.dev>
This commit is contained in:
Raghav
2026-04-18 22:53:38 +05:30
commit ef8537e923
18 changed files with 988 additions and 0 deletions

25
scripts/test_ui.sh Executable file
View File

@ -0,0 +1,25 @@
#!/usr/bin/env bash
# Tests whether the chat UI is reachable on localhost frontend port.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
ENV_FILE="${REPO_ROOT}/.env"
if [[ -f "${ENV_FILE}" ]]; then
# shellcheck disable=SC1090
source "${ENV_FILE}"
fi
FRONTEND_PORT="${FRONTEND_PORT:-3000}"
UI_URL="http://localhost:${FRONTEND_PORT}"
http_status="$(curl -sS -o /dev/null -w '%{http_code}' "${UI_URL}")"
if [[ "${http_status}" != "200" && "${http_status}" != "301" && "${http_status}" != "302" ]]; then
echo "[test_ui][error] UI check failed with HTTP status ${http_status} at ${UI_URL}" >&2
echo "[test_ui][hint] See docs/TROUBLESHOOTING.md#ui-loads-but-cannot-reach-vllm-backend" >&2
exit 1
fi
echo "[test_ui] Chat UI is reachable at ${UI_URL} (HTTP ${http_status})."