fix: stabilize docker e2e lanes

This commit is contained in:
Peter Steinberger
2026-04-05 07:13:59 +01:00
parent e5023cc141
commit 657c6f6788
9 changed files with 297 additions and 69 deletions

View File

@@ -27,7 +27,7 @@ COPY --chown=appuser:appuser scripts/postinstall-bundled-plugins.mjs scripts/npm
RUN --mount=type=cache,id=openclaw-pnpm-store,target=/home/appuser/.local/share/pnpm/store,sharing=locked \
pnpm install --frozen-lockfile
COPY --chown=appuser:appuser tsconfig.json tsconfig.plugin-sdk.dts.json tsdown.config.ts vitest.config.ts vitest.e2e.config.ts vitest.performance-config.ts vitest.shared.config.ts vitest.bundled-plugin-paths.ts openclaw.mjs ./
COPY --chown=appuser:appuser tsconfig.json tsconfig.plugin-sdk.dts.json tsdown.config.ts vitest.config.ts vitest.e2e.config.ts vitest.performance-config.ts vitest.shared.config.ts vitest.system-load.ts vitest.bundled-plugin-paths.ts openclaw.mjs ./
COPY --chown=appuser:appuser src ./src
COPY --chown=appuser:appuser test ./test
COPY --chown=appuser:appuser scripts ./scripts

View File

@@ -2,12 +2,12 @@
set -euo pipefail
ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
source "$ROOT_DIR/scripts/lib/live-docker-auth.sh"
IMAGE_NAME="openclaw-openwebui-e2e"
OPENWEBUI_IMAGE="${OPENWEBUI_IMAGE:-ghcr.io/open-webui/open-webui:v0.8.10}"
PROFILE_FILE="${OPENCLAW_PROFILE_FILE:-$HOME/.profile}"
MODEL="${OPENCLAW_OPENWEBUI_MODEL:-openai/gpt-5.4}"
# Keep the default on a broadly available non-reasoning OpenAI model for
# Open WebUI compatibility smoke. Callers can still override this explicitly.
MODEL="${OPENCLAW_OPENWEBUI_MODEL:-openai/gpt-4.1-mini}"
PROMPT_NONCE="OPENWEBUI_DOCKER_E2E_$(date +%s)_$$"
PROMPT="${OPENCLAW_OPENWEBUI_PROMPT:-Reply with exactly this token and nothing else: ${PROMPT_NONCE}}"
PORT="${OPENCLAW_OPENWEBUI_GATEWAY_PORT:-18789}"
@@ -19,31 +19,17 @@ NET_NAME="openclaw-openwebui-e2e-$$"
GW_NAME="openclaw-openwebui-gateway-$$"
OW_NAME="openclaw-openwebui-$$"
PROFILE_MOUNT=()
if [[ -f "$PROFILE_FILE" ]]; then
PROFILE_MOUNT=(-v "$PROFILE_FILE":/home/appuser/.profile:ro)
OPENAI_API_KEY_VALUE="${OPENAI_API_KEY:-}"
if [[ "$OPENAI_API_KEY_VALUE" == "undefined" || "$OPENAI_API_KEY_VALUE" == "null" ]]; then
OPENAI_API_KEY_VALUE=""
fi
AUTH_DIRS=()
if [[ -n "${OPENCLAW_DOCKER_AUTH_DIRS:-}" ]]; then
while IFS= read -r auth_dir; do
[[ -n "$auth_dir" ]] || continue
AUTH_DIRS+=("$auth_dir")
done < <(openclaw_live_collect_auth_dirs)
OPENAI_BASE_URL_VALUE="${OPENAI_BASE_URL:-}"
if [[ "$OPENAI_BASE_URL_VALUE" == "undefined" || "$OPENAI_BASE_URL_VALUE" == "null" ]]; then
OPENAI_BASE_URL_VALUE=""
fi
AUTH_DIRS_CSV=""
if ((${#AUTH_DIRS[@]} > 0)); then
AUTH_DIRS_CSV="$(openclaw_live_join_csv "${AUTH_DIRS[@]}")"
fi
EXTERNAL_AUTH_MOUNTS=()
if ((${#AUTH_DIRS[@]} > 0)); then
for auth_dir in "${AUTH_DIRS[@]}"; do
host_path="$HOME/$auth_dir"
if [[ -d "$host_path" ]]; then
EXTERNAL_AUTH_MOUNTS+=(-v "$host_path":/host-auth/"$auth_dir":ro)
fi
done
if [[ -z "$OPENAI_API_KEY_VALUE" ]]; then
echo "OPENAI_API_KEY is required for the Open WebUI Docker smoke." >&2
exit 2
fi
cleanup() {
@@ -66,34 +52,45 @@ echo "Starting gateway container..."
docker run -d \
--name "$GW_NAME" \
--network "$NET_NAME" \
-e "OPENCLAW_DOCKER_AUTH_DIRS_RESOLVED=$AUTH_DIRS_CSV" \
-e "OPENCLAW_GATEWAY_TOKEN=$TOKEN" \
-e "OPENCLAW_OPENWEBUI_MODEL=$MODEL" \
-e "OPENCLAW_SKIP_CHANNELS=1" \
-e "OPENCLAW_SKIP_GMAIL_WATCHER=1" \
-e "OPENCLAW_SKIP_CRON=1" \
-e "OPENCLAW_SKIP_CANVAS_HOST=1" \
"${EXTERNAL_AUTH_MOUNTS[@]}" \
"${PROFILE_MOUNT[@]}" \
-e OPENAI_API_KEY \
${OPENAI_BASE_URL_VALUE:+-e OPENAI_BASE_URL} \
"$IMAGE_NAME" \
bash -lc '
set -euo pipefail
[ -f "$HOME/.profile" ] && source "$HOME/.profile" || true
IFS="," read -r -a auth_dirs <<<"${OPENCLAW_DOCKER_AUTH_DIRS_RESOLVED:-}"
if ((${#auth_dirs[@]} > 0)); then
for auth_dir in "${auth_dirs[@]}"; do
[ -n "$auth_dir" ] || continue
if [ -d "/host-auth/$auth_dir" ]; then
mkdir -p "$HOME/$auth_dir"
cp -R "/host-auth/$auth_dir/." "$HOME/$auth_dir"
chmod -R u+rwX "$HOME/$auth_dir" || true
fi
done
fi
entry=dist/index.mjs
[ -f "$entry" ] || entry=dist/index.js
openai_api_key="${OPENAI_API_KEY:?OPENAI_API_KEY required}"
node - <<'"'"'NODE'"'"' "$openai_api_key"
const fs = require("node:fs");
const path = require("node:path");
const openaiApiKey = process.argv[2];
const configPath = path.join(process.env.HOME, ".openclaw", "openclaw.json");
const config = fs.existsSync(configPath)
? JSON.parse(fs.readFileSync(configPath, "utf8"))
: {};
const existingOpenAI = config.models?.providers?.openai ?? {};
config.models = {
...(config.models || {}),
providers: {
...(config.models?.providers || {}),
openai: {
...existingOpenAI,
baseUrl:
typeof existingOpenAI.baseUrl === "string" && existingOpenAI.baseUrl.trim()
? existingOpenAI.baseUrl
: process.env.OPENAI_BASE_URL || "https://api.openai.com/v1",
apiKey: openaiApiKey,
models: Array.isArray(existingOpenAI.models) ? existingOpenAI.models : [],
},
},
};
fs.mkdirSync(path.dirname(configPath), { recursive: true });
fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
NODE
node "$entry" config set gateway.controlUi.enabled false >/dev/null
node "$entry" config set gateway.mode local >/dev/null
node "$entry" config set gateway.bind lan >/dev/null
@@ -176,14 +173,20 @@ if [ "$ow_ready" -ne 1 ]; then
fi
echo "Running Open WebUI -> OpenClaw smoke..."
docker exec \
if ! docker exec \
-e "OPENWEBUI_BASE_URL=http://$OW_NAME:$WEBUI_PORT" \
-e "OPENWEBUI_ADMIN_EMAIL=$ADMIN_EMAIL" \
-e "OPENWEBUI_ADMIN_PASSWORD=$ADMIN_PASSWORD" \
-e "OPENWEBUI_EXPECTED_NONCE=$PROMPT_NONCE" \
-e "OPENWEBUI_PROMPT=$PROMPT" \
"$GW_NAME" \
node /app/scripts/e2e/openwebui-probe.mjs
node /app/scripts/e2e/openwebui-probe.mjs; then
echo "Open WebUI probe failed; gateway log tail:"
docker exec "$GW_NAME" bash -lc 'tail -n 200 /tmp/openwebui-gateway.log' || true
echo "Open WebUI container logs:"
docker logs "$OW_NAME" 2>&1 | tail -n 200 || true
exit 1
fi
echo "Open WebUI container logs:"
docker logs "$OW_NAME" 2>&1 | tail -n 80 || true

View File

@@ -7,8 +7,16 @@ IMAGE_NAME="openclaw-plugins-e2e"
echo "Building Docker image..."
docker build -t "$IMAGE_NAME" -f "$ROOT_DIR/scripts/e2e/Dockerfile" "$ROOT_DIR"
DOCKER_ENV_ARGS=(-e COREPACK_ENABLE_DOWNLOAD_PROMPT=0)
if [[ -n "${OPENAI_API_KEY:-}" && "${OPENAI_API_KEY:-}" != "undefined" && "${OPENAI_API_KEY:-}" != "null" ]]; then
DOCKER_ENV_ARGS+=(-e OPENAI_API_KEY)
fi
if [[ -n "${OPENAI_BASE_URL:-}" && "${OPENAI_BASE_URL:-}" != "undefined" && "${OPENAI_BASE_URL:-}" != "null" ]]; then
DOCKER_ENV_ARGS+=(-e OPENAI_BASE_URL)
fi
echo "Running plugins Docker E2E..."
docker run --rm -e COREPACK_ENABLE_DOWNLOAD_PROMPT=0 -e OPENAI_API_KEY -i "$IMAGE_NAME" bash -s <<'EOF'
docker run --rm "${DOCKER_ENV_ARGS[@]}" -i "$IMAGE_NAME" bash -s <<'EOF'
set -euo pipefail
if [ -f dist/index.mjs ]; then
@@ -22,6 +30,24 @@ else
fi
export OPENCLAW_ENTRY
sanitize_env_string() {
local value="${1:-}"
if [[ "$value" == "undefined" || "$value" == "null" ]]; then
printf ''
return
fi
printf '%s' "$value"
}
export OPENAI_API_KEY="$(sanitize_env_string "${OPENAI_API_KEY:-}")"
export OPENAI_BASE_URL="$(sanitize_env_string "${OPENAI_BASE_URL:-}")"
if [[ -z "$OPENAI_API_KEY" ]]; then
unset OPENAI_API_KEY || true
fi
if [[ -z "$OPENAI_BASE_URL" ]]; then
unset OPENAI_BASE_URL || true
fi
home_dir=$(mktemp -d "/tmp/openclaw-plugins-e2e.XXXXXX")
export HOME="$home_dir"
BUNDLED_PLUGIN_ROOT_DIR="extensions"
@@ -29,6 +55,40 @@ OPENCLAW_PLUGIN_HOME="$HOME/.openclaw/$BUNDLED_PLUGIN_ROOT_DIR"
gateway_pid=""
seed_openai_provider_config() {
local openai_api_key="$1"
local openai_base_url="${2:-}"
node - <<'NODE' "$openai_api_key" "$openai_base_url"
const fs = require("node:fs");
const path = require("node:path");
const openaiApiKey = process.argv[2];
const openaiBaseUrl = process.argv[3];
const configPath = path.join(process.env.HOME, ".openclaw", "openclaw.json");
const config = fs.existsSync(configPath)
? JSON.parse(fs.readFileSync(configPath, "utf8"))
: {};
const existingOpenAI = config.models?.providers?.openai ?? {};
config.models = {
...(config.models || {}),
providers: {
...(config.models?.providers || {}),
openai: {
...existingOpenAI,
baseUrl:
typeof existingOpenAI.baseUrl === "string" && existingOpenAI.baseUrl.trim()
? existingOpenAI.baseUrl
: openaiBaseUrl || "https://api.openai.com/v1",
apiKey: openaiApiKey,
models: Array.isArray(existingOpenAI.models) ? existingOpenAI.models : [],
},
},
};
fs.mkdirSync(path.dirname(configPath), { recursive: true });
fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
NODE
}
stop_gateway() {
if [ -n "${gateway_pid:-}" ] && kill -0 "$gateway_pid" 2>/dev/null; then
kill "$gateway_pid" 2>/dev/null || true
@@ -162,11 +222,12 @@ const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
async function main() {
const runId = `plugin-e2e-${randomUUID()}`;
const sendResult = callGateway("chat.send", {
const sendParams = {
sessionKey,
message,
idempotencyKey: runId,
});
};
const sendResult = callGateway("chat.send", sendParams);
if (!sendResult.ok) {
throw sendResult.error;
}
@@ -198,9 +259,39 @@ async function main() {
);
return;
}
const statusResult = callGateway("chat.send", sendParams);
if (statusResult.ok) {
const status = statusResult.value;
if (status?.status === "error") {
const summary =
typeof status.summary === "string" && status.summary.trim()
? status.summary.trim()
: JSON.stringify(status);
throw new Error(`gateway run failed for ${sessionKey}: ${summary}`);
}
}
await sleep(100);
}
const finalHistory = callGateway("chat.history", { sessionKey });
const finalStatus = callGateway("chat.send", sendParams);
fs.writeFileSync(
outputFile,
`${JSON.stringify(
{
sessionKey,
runId,
error: "timeout",
history: finalHistory.ok ? finalHistory.value : null,
historyError: finalHistory.ok ? null : String(finalHistory.error),
status: finalStatus.ok ? finalStatus.value : null,
statusError: finalStatus.ok ? null : String(finalStatus.error),
},
null,
2,
)}\n`,
"utf8",
);
throw new Error(`timed out waiting for assistant reply for ${sessionKey}`);
}
@@ -504,7 +595,9 @@ if (process.env.OPENAI_API_KEY) {
...(config.agents || {}),
defaults: {
...(config.agents?.defaults || {}),
model: { primary: "openai/gpt-5.4" },
// Use the same stable OpenAI family as the installer E2E to avoid
// long or reasoning-heavy live turns in this bundle-command smoke.
model: { primary: "openai/gpt-4.1-mini" },
},
};
}
@@ -517,6 +610,10 @@ fs.mkdirSync(path.dirname(configPath), { recursive: true });
fs.writeFileSync(configPath, `${JSON.stringify(config, null, 2)}\n`, "utf8");
NODE
if [ -n "${OPENAI_API_KEY:-}" ]; then
seed_openai_provider_config "$OPENAI_API_KEY" "${OPENAI_BASE_URL:-}"
fi
gateway_log="/tmp/openclaw-plugin-command-e2e.log"
start_gateway "$gateway_log"
wait_for_gateway_health
@@ -630,11 +727,17 @@ NODE
if [ -n "${OPENAI_API_KEY:-}" ]; then
echo "Testing Claude bundle command invocation..."
run_gateway_chat_json \
if ! run_gateway_chat_json \
"plugin-e2e-live" \
"/office_hours Reply with exactly BUNDLE_OK and nothing else." \
/tmp/plugin-command-live.json \
60000
120000; then
echo "Claude bundle command invocation failed; payload dump:"
cat /tmp/plugin-command-live.json 2>/dev/null || true
echo "Gateway log tail:"
tail -n 200 "$gateway_log" || true
exit 1
fi
node - <<'NODE'
const fs = require("node:fs");
const payload = JSON.parse(fs.readFileSync("/tmp/plugin-command-live.json", "utf8"));