mirror of
https://github.com/community-scripts/ProxmoxVE.git
synced 2026-02-25 19:07:40 +03:00
Compare commits
3 Commits
ref_api
...
tremor021-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4f18074436 | ||
|
|
7484c58175 | ||
|
|
dd4a15fff1 |
149
.github/workflows/check-node-versions.yml
generated
vendored
149
.github/workflows/check-node-versions.yml
generated
vendored
@@ -13,7 +13,7 @@ permissions:
|
||||
jobs:
|
||||
check-node-versions:
|
||||
if: github.repository == 'community-scripts/ProxmoxVE'
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: coolify-runner
|
||||
|
||||
steps:
|
||||
- name: Checkout Repository
|
||||
@@ -110,22 +110,94 @@ jobs:
|
||||
}
|
||||
|
||||
# Extract Node major from engines.node in package.json
|
||||
# Sets: ENGINES_NODE_RAW (raw string), ENGINES_MIN_MAJOR
|
||||
# Sets: ENGINES_NODE_RAW (raw string), ENGINES_MIN_MAJOR, ENGINES_IS_MINIMUM
|
||||
extract_engines_node() {
|
||||
local content="$1"
|
||||
ENGINES_NODE_RAW=""
|
||||
ENGINES_MIN_MAJOR=""
|
||||
ENGINES_IS_MINIMUM="false"
|
||||
|
||||
ENGINES_NODE_RAW=$(echo "$content" | jq -r '.engines.node // empty' 2>/dev/null || echo "")
|
||||
if [[ -z "$ENGINES_NODE_RAW" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# Detect if constraint is a minimum (>=, ^) vs exact pinning
|
||||
if [[ "$ENGINES_NODE_RAW" =~ ^(\>=|\^|\~) ]]; then
|
||||
ENGINES_IS_MINIMUM="true"
|
||||
fi
|
||||
|
||||
# Extract the first number (major) from the constraint
|
||||
# Handles: ">=24.13.1", "^22", ">=18.0.0", ">=18.15.0 <19 || ^20", etc.
|
||||
ENGINES_MIN_MAJOR=$(echo "$ENGINES_NODE_RAW" | grep -oP '\d+' | head -1 || echo "")
|
||||
}
|
||||
|
||||
# Check if our_version satisfies an engines.node constraint
|
||||
# Returns 0 if satisfied, 1 if not
|
||||
# Usage: version_satisfies_engines "22" ">=18.0.0" "true"
|
||||
version_satisfies_engines() {
|
||||
local our="$1"
|
||||
local min_major="$2"
|
||||
local is_minimum="$3"
|
||||
|
||||
if [[ -z "$min_major" || -z "$our" ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ "$is_minimum" == "true" ]]; then
|
||||
# >= or ^ constraint: our version must be >= min_major
|
||||
if [[ "$our" -ge "$min_major" ]]; then
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
# Search for files in subdirectories via GitHub API tree
|
||||
# Usage: find_repo_file "owner/repo" "branch" "filename" => sets REPLY to raw URL or empty
|
||||
find_repo_file() {
|
||||
local repo="$1"
|
||||
local branch="$2"
|
||||
local filename="$3"
|
||||
REPLY=""
|
||||
|
||||
# Try root first (fast)
|
||||
local root_url="https://raw.githubusercontent.com/${repo}/${branch}/${filename}"
|
||||
if curl -sfI "$root_url" >/dev/null 2>&1; then
|
||||
REPLY="$root_url"
|
||||
return
|
||||
fi
|
||||
|
||||
# Search via GitHub API tree (recursive)
|
||||
local tree_url="https://api.github.com/repos/${repo}/git/trees/${branch}?recursive=1"
|
||||
local tree_json
|
||||
tree_json=$(curl -sf -H "Authorization: token $GH_TOKEN" "$tree_url" 2>/dev/null || echo "")
|
||||
if [[ -z "$tree_json" ]]; then
|
||||
return
|
||||
fi
|
||||
|
||||
# Find first matching path (prefer shorter/root-level paths)
|
||||
local match_path
|
||||
match_path=$(echo "$tree_json" | jq -r --arg fn "$filename" \
|
||||
'.tree[]? | select(.path | endswith("/" + $fn) or . == $fn) | .path' 2>/dev/null \
|
||||
| sort | head -1 || echo "")
|
||||
|
||||
if [[ -n "$match_path" ]]; then
|
||||
REPLY="https://raw.githubusercontent.com/${repo}/${branch}/${match_path}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Extract Node major from .nvmrc or .node-version
|
||||
# Sets: NVMRC_NODE_MAJOR
|
||||
extract_nvmrc_node() {
|
||||
local content="$1"
|
||||
NVMRC_NODE_MAJOR=""
|
||||
# .nvmrc/.node-version typically has: "v22.9.0", "22", "lts/iron", etc.
|
||||
local ver
|
||||
ver=$(echo "$content" | tr -d '[:space:]' | grep -oP '^v?\K[0-9]+' | head -1 || echo "")
|
||||
NVMRC_NODE_MAJOR="$ver"
|
||||
}
|
||||
|
||||
# Collect results
|
||||
declare -a issue_scripts=()
|
||||
declare -a report_lines=()
|
||||
@@ -143,7 +215,10 @@ jobs:
|
||||
slug=$(basename "$script" | sed 's/-install\.sh$//')
|
||||
|
||||
# Extract Source URL (GitHub only)
|
||||
source_url=$(head -20 "$script" | grep -oP '(?<=# Source: )https://github\.com/[^\s]+' | head -1 || echo "")
|
||||
# Supports both:
|
||||
# # Source: https://github.com/owner/repo
|
||||
# # Source: https://example.com | Github: https://github.com/owner/repo
|
||||
source_url=$(head -20 "$script" | grep -oP 'https://github\.com/[^\s|]+' | head -1 || echo "")
|
||||
if [[ -z "$source_url" ]]; then
|
||||
report_lines+=("| \`$slug\` | — | — | — | — | ⏭️ No GitHub source |")
|
||||
continue
|
||||
@@ -167,12 +242,23 @@ jobs:
|
||||
fi
|
||||
fi
|
||||
|
||||
# Fetch upstream Dockerfile
|
||||
# Determine default branch via GitHub API (fast, single call)
|
||||
detected_branch=""
|
||||
api_default=$(curl -sf -H "Authorization: token $GH_TOKEN" \
|
||||
"https://api.github.com/repos/${repo}" 2>/dev/null \
|
||||
| jq -r '.default_branch // empty' 2>/dev/null || echo "")
|
||||
if [[ -n "$api_default" ]]; then
|
||||
detected_branch="$api_default"
|
||||
else
|
||||
detected_branch="main"
|
||||
fi
|
||||
|
||||
# Fetch upstream Dockerfile (root + subdirectories)
|
||||
df_content=""
|
||||
for branch in main master dev; do
|
||||
df_content=$(curl -sf "https://raw.githubusercontent.com/${repo}/${branch}/Dockerfile" 2>/dev/null || echo "")
|
||||
[[ -n "$df_content" ]] && break
|
||||
done
|
||||
find_repo_file "$repo" "$detected_branch" "Dockerfile"
|
||||
if [[ -n "$REPLY" ]]; then
|
||||
df_content=$(curl -sf "$REPLY" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
DF_NODE_MAJOR=""
|
||||
DF_SOURCE=""
|
||||
@@ -180,19 +266,35 @@ jobs:
|
||||
extract_dockerfile_node "$df_content"
|
||||
fi
|
||||
|
||||
# Fetch upstream package.json
|
||||
# Fetch upstream package.json (root + subdirectories)
|
||||
pkg_content=""
|
||||
for branch in main master dev; do
|
||||
pkg_content=$(curl -sf "https://raw.githubusercontent.com/${repo}/${branch}/package.json" 2>/dev/null || echo "")
|
||||
[[ -n "$pkg_content" ]] && break
|
||||
done
|
||||
find_repo_file "$repo" "$detected_branch" "package.json"
|
||||
if [[ -n "$REPLY" ]]; then
|
||||
pkg_content=$(curl -sf "$REPLY" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
ENGINES_NODE_RAW=""
|
||||
ENGINES_MIN_MAJOR=""
|
||||
ENGINES_IS_MINIMUM="false"
|
||||
if [[ -n "$pkg_content" ]]; then
|
||||
extract_engines_node "$pkg_content"
|
||||
fi
|
||||
|
||||
# Fallback: check .nvmrc or .node-version
|
||||
NVMRC_NODE_MAJOR=""
|
||||
if [[ -z "$DF_NODE_MAJOR" && -z "$ENGINES_MIN_MAJOR" ]]; then
|
||||
for nvmfile in .nvmrc .node-version; do
|
||||
find_repo_file "$repo" "$detected_branch" "$nvmfile"
|
||||
if [[ -n "$REPLY" ]]; then
|
||||
nvmrc_content=$(curl -sf "$REPLY" 2>/dev/null || echo "")
|
||||
if [[ -n "$nvmrc_content" ]]; then
|
||||
extract_nvmrc_node "$nvmrc_content"
|
||||
[[ -n "$NVMRC_NODE_MAJOR" ]] && break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
# Determine upstream recommended major version
|
||||
upstream_major=""
|
||||
upstream_hint=""
|
||||
@@ -203,6 +305,9 @@ jobs:
|
||||
elif [[ -n "$ENGINES_MIN_MAJOR" ]]; then
|
||||
upstream_major="$ENGINES_MIN_MAJOR"
|
||||
upstream_hint="engines: $ENGINES_NODE_RAW"
|
||||
elif [[ -n "$NVMRC_NODE_MAJOR" ]]; then
|
||||
upstream_major="$NVMRC_NODE_MAJOR"
|
||||
upstream_hint=".nvmrc/.node-version"
|
||||
fi
|
||||
|
||||
# Build display values
|
||||
@@ -214,13 +319,23 @@ jobs:
|
||||
if [[ "$our_version" == "dynamic" ]]; then
|
||||
status="🔄 Dynamic"
|
||||
elif [[ "$our_version" == "unset" ]]; then
|
||||
status="⚠️ NODE_VERSION not set"
|
||||
if [[ -n "$upstream_major" ]]; then
|
||||
status="⚠️ NODE_VERSION not set (upstream=$upstream_major via $upstream_hint)"
|
||||
else
|
||||
status="⚠️ NODE_VERSION not set (no upstream info found)"
|
||||
fi
|
||||
issue_scripts+=("$slug|$our_version|$upstream_major|$upstream_hint|$repo")
|
||||
drift_count=$((drift_count + 1))
|
||||
elif [[ -n "$upstream_major" && "$our_version" != "$upstream_major" ]]; then
|
||||
status="🔸 Drift → upstream=$upstream_major ($upstream_hint)"
|
||||
issue_scripts+=("$slug|$our_version|$upstream_major|$upstream_hint|$repo")
|
||||
drift_count=$((drift_count + 1))
|
||||
# Check if engines.node is a minimum constraint that our version satisfies
|
||||
if [[ -z "$DF_NODE_MAJOR" && "$ENGINES_IS_MINIMUM" == "true" ]] && \
|
||||
version_satisfies_engines "$our_version" "$ENGINES_MIN_MAJOR" "$ENGINES_IS_MINIMUM"; then
|
||||
status="✅ (engines: $ENGINES_NODE_RAW — ours: $our_version satisfies)"
|
||||
else
|
||||
status="🔸 Drift → upstream=$upstream_major ($upstream_hint)"
|
||||
issue_scripts+=("$slug|$our_version|$upstream_major|$upstream_hint|$repo")
|
||||
drift_count=$((drift_count + 1))
|
||||
fi
|
||||
fi
|
||||
|
||||
report_lines+=("| \`$slug\` | $our_version | $engines_display | $dockerfile_display | [$repo](https://github.com/$repo) | $status |")
|
||||
|
||||
@@ -421,6 +421,10 @@ Exercise vigilance regarding copycat or coat-tailing sites that seek to exploit
|
||||
|
||||
- Fix detection of ssh keys [@1-tempest](https://github.com/1-tempest) ([#12230](https://github.com/community-scripts/ProxmoxVE/pull/12230))
|
||||
|
||||
### 📂 Github
|
||||
|
||||
- github: improvements for node drift wf [@MickLesk](https://github.com/MickLesk) ([#12309](https://github.com/community-scripts/ProxmoxVE/pull/12309))
|
||||
|
||||
## 2026-02-24
|
||||
|
||||
### 🚀 Updated Scripts
|
||||
|
||||
@@ -28,7 +28,7 @@ function update_script() {
|
||||
exit
|
||||
fi
|
||||
|
||||
NODE_VERSION=24 NODE_MODULE="yarn,npm,pm2" setup_nodejs
|
||||
NODE_VERSION="24" NODE_MODULE="yarn,npm,pm2" setup_nodejs
|
||||
|
||||
if check_for_gh_release "joplin-server" "laurent22/joplin"; then
|
||||
msg_info "Stopping Services"
|
||||
|
||||
@@ -29,7 +29,7 @@ function update_script() {
|
||||
fi
|
||||
|
||||
if check_for_gh_release "Zigbee2MQTT" "Koenkk/zigbee2mqtt"; then
|
||||
NODE_VERSION=24 NODE_MODULE="pnpm@$(curl -fsSL https://raw.githubusercontent.com/Koenkk/zigbee2mqtt/master/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
|
||||
NODE_VERSION="24" NODE_MODULE="pnpm@$(curl -fsSL https://raw.githubusercontent.com/Koenkk/zigbee2mqtt/master/package.json | jq -r '.packageManager | split("@")[1]')" setup_nodejs
|
||||
msg_info "Stopping Service"
|
||||
systemctl stop zigbee2mqtt
|
||||
msg_ok "Stopped Service"
|
||||
|
||||
@@ -39,7 +39,7 @@ $STD apt install -y \
|
||||
texlive-xetex
|
||||
msg_ok "Installed Dependencies"
|
||||
|
||||
NODE_VERSION=22 NODE_MODULE="bun" setup_nodejs
|
||||
NODE_VERSION="22" NODE_MODULE="bun" setup_nodejs
|
||||
fetch_and_deploy_gh_release "ConvertX" "C4illin/ConvertX" "tarball" "latest" "/opt/convertx"
|
||||
|
||||
msg_info "Installing ConvertX"
|
||||
|
||||
@@ -21,7 +21,7 @@ msg_ok "Installed Dependencies"
|
||||
|
||||
PG_VERSION="17" setup_postgresql
|
||||
PG_DB_NAME="joplin" PG_DB_USER="joplin" setup_postgresql_db
|
||||
NODE_VERSION=24 NODE_MODULE="yarn,npm,pm2" setup_nodejs
|
||||
NODE_VERSION="24" NODE_MODULE="yarn,npm,pm2" setup_nodejs
|
||||
mkdir -p /opt/pm2
|
||||
export PM2_HOME=/opt/pm2
|
||||
$STD pm2 install pm2-logrotate
|
||||
|
||||
@@ -44,6 +44,8 @@ echo passbolt-ce-server passbolt/nginx-domain string $LOCAL_IP | debconf-set-sel
|
||||
echo passbolt-ce-server passbolt/nginx-certificate-file string /etc/ssl/passbolt/passbolt.crt | debconf-set-selections
|
||||
echo passbolt-ce-server passbolt/nginx-certificate-key-file string /etc/ssl/passbolt/passbolt.key | debconf-set-selections
|
||||
$STD apt install -y --no-install-recommends passbolt-ce-server
|
||||
sed -i 's/client_max_body_size[[:space:]]\+[0-9]\+M;/client_max_body_size 15M;/' /etc/nginx/sites-enabled/nginx-passbolt.conf
|
||||
systemctl reload nginx
|
||||
msg_ok "Setup Passbolt"
|
||||
|
||||
motd_ssh
|
||||
|
||||
@@ -21,7 +21,7 @@ $STD apt install -y \
|
||||
expect
|
||||
msg_ok "Dependencies installed."
|
||||
|
||||
NODE_VERSION=24 setup_nodejs
|
||||
NODE_VERSION="24" setup_nodejs
|
||||
fetch_and_deploy_gh_release "ProxmoxVE-Local" "community-scripts/ProxmoxVE-Local" "tarball"
|
||||
|
||||
msg_info "Installing PVE Scripts local"
|
||||
|
||||
@@ -312,8 +312,7 @@ json_escape() {
|
||||
s=${s//$'\r'/}
|
||||
s=${s//$'\t'/\\t}
|
||||
# Remove any remaining control characters (0x00-0x1F except those already handled)
|
||||
# Also remove DEL (0x7F) and invalid high bytes that break JSON parsers
|
||||
s=$(printf '%s' "$s" | tr -d '\000-\010\013\014\016-\037\177')
|
||||
s=$(printf '%s' "$s" | tr -d '\000-\010\013\014\016-\037')
|
||||
printf '%s' "$s"
|
||||
}
|
||||
|
||||
@@ -983,7 +982,7 @@ EOF
|
||||
fi
|
||||
|
||||
# All 3 attempts failed — do NOT set POST_UPDATE_DONE=true.
|
||||
# This allows the EXIT trap (on_exit in error_handler.func) to retry.
|
||||
# This allows the EXIT trap (api_exit_script) to retry with 3 fresh attempts.
|
||||
# No infinite loop risk: EXIT trap fires exactly once.
|
||||
}
|
||||
|
||||
|
||||
@@ -4098,11 +4098,6 @@ EOF'
|
||||
|
||||
# Installation failed?
|
||||
if [[ $install_exit_code -ne 0 ]]; then
|
||||
# Prevent SIGTSTP (Ctrl+Z) from suspending the script during recovery.
|
||||
# In non-interactive shells (bash -c), background processes (spinner) can
|
||||
# trigger terminal-related signals that stop the entire process group.
|
||||
trap '' TSTP
|
||||
|
||||
msg_error "Installation failed in container ${CTID} (exit code: ${install_exit_code})"
|
||||
|
||||
# Copy install log from container BEFORE API call so get_error_text() can read it
|
||||
@@ -4178,12 +4173,7 @@ EOF'
|
||||
fi
|
||||
|
||||
# Report failure to telemetry API (now with log available on host)
|
||||
# NOTE: Do NOT use msg_info/spinner here — the background spinner process
|
||||
# causes SIGTSTP in non-interactive shells (bash -c "$(curl ...)"), which
|
||||
# stops the entire process group and prevents the recovery dialog from appearing.
|
||||
$STD echo -e "${TAB}⏳ Reporting failure to telemetry..."
|
||||
post_update_to_api "failed" "$install_exit_code"
|
||||
$STD echo -e "${TAB}${CM:-✔} Failure reported"
|
||||
|
||||
# Defense-in-depth: Ensure error handling stays disabled during recovery.
|
||||
# Some functions (e.g. silent/$STD) unconditionally re-enable set -Eeuo pipefail
|
||||
@@ -4547,12 +4537,8 @@ EOF'
|
||||
|
||||
# Force one final status update attempt after cleanup
|
||||
# This ensures status is updated even if the first attempt failed (e.g., HTTP 400)
|
||||
$STD echo -e "${TAB}⏳ Finalizing telemetry report..."
|
||||
post_update_to_api "failed" "$install_exit_code" "force"
|
||||
$STD echo -e "${TAB}${CM:-✔} Telemetry finalized"
|
||||
|
||||
# Restore default SIGTSTP handling before exit
|
||||
trap - TSTP
|
||||
exit $install_exit_code
|
||||
fi
|
||||
|
||||
@@ -5622,21 +5608,44 @@ ensure_log_on_host() {
|
||||
fi
|
||||
}
|
||||
|
||||
# ==============================================================================
|
||||
# TRAP MANAGEMENT
|
||||
# ==============================================================================
|
||||
# All traps (ERR, EXIT, INT, TERM, HUP) are set by catch_errors() in
|
||||
# error_handler.func — called at the top of this file after sourcing.
|
||||
# ------------------------------------------------------------------------------
|
||||
# api_exit_script()
|
||||
#
|
||||
# Do NOT set duplicate traps here. The handlers in error_handler.func
|
||||
# (on_exit, on_interrupt, on_terminate, on_hangup, error_handler) already:
|
||||
# - Send telemetry via post_update_to_api / _send_abort_telemetry
|
||||
# - Stop orphaned containers via _stop_container_if_installing
|
||||
# - Collect logs via ensure_log_on_host
|
||||
# - Clean up lock files and spinner processes
|
||||
#
|
||||
# Previously, inline traps here overwrote catch_errors() traps, causing:
|
||||
# - error_handler() never fired (no error output, no cleanup dialog)
|
||||
# - on_hangup() never fired (SSH disconnect → stuck records)
|
||||
# - Duplicated logic in two places (hard to debug)
|
||||
# ==============================================================================
|
||||
# - Exit trap handler for reporting to API telemetry
|
||||
# - Captures exit code and reports to PocketBase using centralized error descriptions
|
||||
# - Uses explain_exit_code() from api.func for consistent error messages
|
||||
# - ALWAYS sends telemetry FIRST before log collection to prevent pct pull
|
||||
# hangs from blocking status updates (container may be dead/unresponsive)
|
||||
# - For non-zero exit codes: posts "failed" status
|
||||
# - For zero exit codes where post_update_to_api was never called:
|
||||
# catches orphaned "installing" records (e.g., script exited cleanly
|
||||
# but description() was never reached)
|
||||
# ------------------------------------------------------------------------------
|
||||
api_exit_script() {
|
||||
local exit_code=$?
|
||||
if [ $exit_code -ne 0 ]; then
|
||||
# ALWAYS send telemetry FIRST - ensure status is reported even if
|
||||
# ensure_log_on_host hangs (e.g. pct pull on dead container)
|
||||
post_update_to_api "failed" "$exit_code" 2>/dev/null || true
|
||||
# Best-effort log collection (non-critical after telemetry is sent)
|
||||
if declare -f ensure_log_on_host >/dev/null 2>&1; then
|
||||
ensure_log_on_host 2>/dev/null || true
|
||||
fi
|
||||
# Stop orphaned container if we're in the install phase
|
||||
if [[ "${CONTAINER_INSTALLING:-}" == "true" && -n "${CTID:-}" ]] && command -v pct &>/dev/null; then
|
||||
pct stop "$CTID" 2>/dev/null || true
|
||||
fi
|
||||
elif [[ "${POST_TO_API_DONE:-}" == "true" && "${POST_UPDATE_DONE:-}" != "true" ]]; then
|
||||
# Script exited with 0 but never sent a completion status
|
||||
# exit_code=0 is never an error — report as success
|
||||
post_update_to_api "done" "0"
|
||||
fi
|
||||
}
|
||||
|
||||
if command -v pveversion >/dev/null 2>&1; then
|
||||
trap 'api_exit_script' EXIT
|
||||
fi
|
||||
trap 'local _ec=$?; if [[ $_ec -ne 0 ]]; then post_update_to_api "failed" "$_ec" 2>/dev/null || true; if declare -f ensure_log_on_host &>/dev/null; then ensure_log_on_host 2>/dev/null || true; fi; fi' ERR
|
||||
trap 'post_update_to_api "failed" "129" 2>/dev/null || true; if [[ -n "${CTID:-}" ]] && command -v pct &>/dev/null; then pct stop "$CTID" 2>/dev/null || true; fi; exit 129' SIGHUP
|
||||
trap 'post_update_to_api "failed" "130" 2>/dev/null || true; if [[ -n "${CTID:-}" ]] && command -v pct &>/dev/null; then pct stop "$CTID" 2>/dev/null || true; fi; exit 130' SIGINT
|
||||
trap 'post_update_to_api "failed" "143" 2>/dev/null || true; if [[ -n "${CTID:-}" ]] && command -v pct &>/dev/null; then pct stop "$CTID" 2>/dev/null || true; fi; exit 143' SIGTERM
|
||||
|
||||
@@ -199,16 +199,11 @@ error_handler() {
|
||||
return 0
|
||||
fi
|
||||
|
||||
# Stop spinner and restore cursor FIRST — before any output
|
||||
# This prevents spinner text overlapping with error messages
|
||||
if declare -f stop_spinner >/dev/null 2>&1; then
|
||||
stop_spinner 2>/dev/null || true
|
||||
fi
|
||||
printf "\e[?25h"
|
||||
|
||||
local explanation
|
||||
explanation="$(explain_exit_code "$exit_code")"
|
||||
|
||||
printf "\e[?25h"
|
||||
|
||||
# ALWAYS report failure to API immediately - don't wait for container checks
|
||||
# This ensures we capture failures that occur before/after container exists
|
||||
if declare -f post_update_to_api &>/dev/null; then
|
||||
@@ -364,39 +359,9 @@ _send_abort_telemetry() {
|
||||
command -v curl &>/dev/null || return 0
|
||||
[[ "${DIAGNOSTICS:-no}" == "no" ]] && return 0
|
||||
[[ -z "${RANDOM_UUID:-}" ]] && return 0
|
||||
|
||||
# Collect last 20 log lines for error diagnosis (best-effort)
|
||||
local error_text=""
|
||||
if [[ -n "${INSTALL_LOG:-}" && -s "${INSTALL_LOG}" ]]; then
|
||||
error_text=$(tail -n 20 "$INSTALL_LOG" 2>/dev/null | sed 's/\x1b\[[0-9;]*[a-zA-Z]//g; s/\\/\\\\/g; s/"/\\"/g; s/\r//g' | tr '\n' '|' | sed 's/|$//' | tr -d '\000-\010\013\014\016-\037\177') || true
|
||||
fi
|
||||
|
||||
# Calculate duration if start time is available
|
||||
local duration=""
|
||||
if [[ -n "${DIAGNOSTICS_START_TIME:-}" ]]; then
|
||||
duration=$(($(date +%s) - DIAGNOSTICS_START_TIME))
|
||||
fi
|
||||
|
||||
# Build JSON payload with error context
|
||||
local payload
|
||||
payload="{\"random_id\":\"${RANDOM_UUID}\",\"execution_id\":\"${EXECUTION_ID:-${RANDOM_UUID}}\",\"type\":\"${TELEMETRY_TYPE:-lxc}\",\"nsapp\":\"${NSAPP:-${app:-unknown}}\",\"status\":\"failed\",\"exit_code\":${exit_code}"
|
||||
[[ -n "$error_text" ]] && payload="${payload},\"error\":\"${error_text}\""
|
||||
[[ -n "$duration" ]] && payload="${payload},\"duration\":${duration}"
|
||||
payload="${payload}}"
|
||||
|
||||
local api_url="${TELEMETRY_URL:-https://telemetry.community-scripts.org/telemetry}"
|
||||
|
||||
# 2 attempts (retry once on failure) — original had no retry
|
||||
local attempt
|
||||
for attempt in 1 2; do
|
||||
if curl -fsS -m 5 -X POST "$api_url" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$payload" &>/dev/null; then
|
||||
return 0
|
||||
fi
|
||||
[[ $attempt -eq 1 ]] && sleep 1
|
||||
done
|
||||
return 0
|
||||
curl -fsS -m 5 -X POST "${TELEMETRY_URL:-https://telemetry.community-scripts.org/telemetry}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"random_id\":\"${RANDOM_UUID}\",\"execution_id\":\"${EXECUTION_ID:-${RANDOM_UUID}}\",\"type\":\"${TELEMETRY_TYPE:-lxc}\",\"nsapp\":\"${NSAPP:-${app:-unknown}}\",\"status\":\"failed\",\"exit_code\":${exit_code}}" &>/dev/null || true
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------------------
|
||||
@@ -472,12 +437,6 @@ on_exit() {
|
||||
# - Exits with code 130 (128 + SIGINT=2)
|
||||
# ------------------------------------------------------------------------------
|
||||
on_interrupt() {
|
||||
# Stop spinner and restore cursor before any output
|
||||
if declare -f stop_spinner >/dev/null 2>&1; then
|
||||
stop_spinner 2>/dev/null || true
|
||||
fi
|
||||
printf "\e[?25h" 2>/dev/null || true
|
||||
|
||||
_send_abort_telemetry "130"
|
||||
_stop_container_if_installing
|
||||
if declare -f msg_error >/dev/null 2>&1; then
|
||||
@@ -497,12 +456,6 @@ on_interrupt() {
|
||||
# - Exits with code 143 (128 + SIGTERM=15)
|
||||
# ------------------------------------------------------------------------------
|
||||
on_terminate() {
|
||||
# Stop spinner and restore cursor before any output
|
||||
if declare -f stop_spinner >/dev/null 2>&1; then
|
||||
stop_spinner 2>/dev/null || true
|
||||
fi
|
||||
printf "\e[?25h" 2>/dev/null || true
|
||||
|
||||
_send_abort_telemetry "143"
|
||||
_stop_container_if_installing
|
||||
if declare -f msg_error >/dev/null 2>&1; then
|
||||
@@ -525,11 +478,6 @@ on_terminate() {
|
||||
# - Exits with code 129 (128 + SIGHUP=1)
|
||||
# ------------------------------------------------------------------------------
|
||||
on_hangup() {
|
||||
# Stop spinner (no cursor restore needed — terminal is already gone)
|
||||
if declare -f stop_spinner >/dev/null 2>&1; then
|
||||
stop_spinner 2>/dev/null || true
|
||||
fi
|
||||
|
||||
_send_abort_telemetry "129"
|
||||
_stop_container_if_installing
|
||||
exit 129
|
||||
|
||||
Reference in New Issue
Block a user