feat: build system transition to release fork + archive hardening

Release fork infrastructure:
- REDBEAR_RELEASE=0.1.1 with offline enforcement (fetch/distclean/unfetch blocked)
- 195 BLAKE3-verified source archives in standard format
- Atomic provisioning via provision-release.sh (staging + .complete sentry)
- 5-phase improvement plan: restore format auto-detection, source tree
  validation (validate-source-trees.py), archive-map.json, REPO_BINARY fallback

Archive normalization:
- Removed 87 duplicate/unversioned archives from shared pool
- Regenerated all archives in consistent format with source/ + recipe.toml
- BLAKE3SUMS and manifest.json generated from stable tarball set

Patch management:
- verify-patches.sh: pre-sync dry-run report (OK/REVERSED/CONFLICT)
- 121 upstream-absorbed patches moved to absorbed/ directories
- 43 active patches verified clean against rebased sources
- Stress test: base updated to upstream HEAD, relibc reset and patched

Compilation fixes:
- relibc: Vec imports in redox-rt (proc.rs, lib.rs, sys.rs)
- relibc: unsafe from_raw_parts in mod.rs (2024 edition)
- fetch.rs: rev comparison handles short/full hash prefixes
- kibi recipe: corrected rev mismatch

New scripts: restore-sources.sh, provision-release.sh, verify-sources-archived.sh,
check-upstream-releases.sh, validate-source-trees.py, verify-patches.sh,
repair-archive-format.sh, generate-manifest.py

Documentation: AGENTS.md, README.md, local/AGENTS.md updated for release fork model
This commit is contained in:
2026-05-02 01:41:17 +01:00
parent f55acba68c
commit 5851974b20
242 changed files with 29015 additions and 1818 deletions
+57 -15
View File
@@ -2,9 +2,10 @@
# archive-sources.sh — Export fully-patched source archives for Red Bear OS.
#
# Usage:
# ./local/scripts/archive-sources.sh [--all] [--recipe <path>] [--target <triple>]
# ./local/scripts/archive-sources.sh [--release=<ver>] [--all] [--recipe <path>] [--target <triple>]
#
# Creates versioned, fully-patched source archives in sources/<target>/:
# Creates versioned, fully-patched source archives in sources/<target>/
# or sources/.staging/redbear-<release>/tarballs/:
# <category>-<pkgname>-v<version>-patched.tar.gz
#
# Each archive contains: source/ (fully patched) + recipe.toml
@@ -14,10 +15,9 @@ set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
TARGET="${TARGET:-x86_64-unknown-redox}"
SOURCES_DIR="${PROJECT_ROOT}/sources/${TARGET}"
MANIFEST="${SOURCES_DIR}/packages.txt"
mkdir -p "${SOURCES_DIR}"
RELEASE=""
SOURCES_DIR=""
MANIFEST=""
GREEN='\033[1;32m'
RED='\033[1;31m'
@@ -139,26 +139,68 @@ archive_all() {
done < <(find "${PROJECT_ROOT}/recipes" "${PROJECT_ROOT}/local/recipes" -name "recipe.toml" -print0 2>/dev/null)
echo ""
status "Archive complete: ${count} packages, ${failed} failures"
status "Archive complete: ${count} packages, ${failed} failures${SOURCES_DIR}"
}
# ── Main ────────────────────────────────────────────────────────────
case "${1:-}" in
--recipe)
if [ -z "${2:-}" ]; then
err "--recipe requires a path"
MODE=""
RECIPE_PATH=""
while [ $# -gt 0 ]; do
case "$1" in
--release=*)
RELEASE="${1#*=}"
;;
--recipe)
if [ -z "${2:-}" ]; then
err "--recipe requires a path"
exit 1
fi
MODE="recipe"
RECIPE_PATH="$2"
shift
;;
--all)
MODE="all"
;;
*)
echo "Usage: $0 [--release=<ver>] --all | --recipe <path>"
echo ""
echo " --release=<ver> Optional release staging target (e.g. 0.2.0)"
echo " --all Archive all recipes with source directories"
echo " --recipe PATH Archive a specific recipe (e.g. recipes/core/base)"
echo ""
echo " Environment: TARGET=x86_64-unknown-redox (default)"
exit 1
fi
;;
esac
shift
done
if [ -n "$RELEASE" ]; then
SOURCES_DIR="${PROJECT_ROOT}/sources/.staging/redbear-${RELEASE}/tarballs"
else
SOURCES_DIR="${PROJECT_ROOT}/sources/${TARGET}"
fi
MANIFEST="${SOURCES_DIR}/packages.txt"
mkdir -p "${SOURCES_DIR}"
case "$MODE" in
recipe)
status "Writing archives to ${SOURCES_DIR}"
> "$MANIFEST"
archive_recipe "${PROJECT_ROOT}/${2}"
archive_recipe "${PROJECT_ROOT}/${RECIPE_PATH}"
;;
--all)
all)
status "Writing archives to ${SOURCES_DIR}"
archive_all
;;
*)
echo "Usage: $0 --all | --recipe <path>"
echo "Usage: $0 [--release=<ver>] --all | --recipe <path>"
echo ""
echo " --release=<ver> Optional release staging target (e.g. 0.2.0)"
echo " --all Archive all recipes with source directories"
echo " --recipe PATH Archive a specific recipe (e.g. recipes/core/base)"
echo ""
+50 -10
View File
@@ -4,6 +4,18 @@ set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
# Source .config for release mode settings (REDBEAR_RELEASE, etc.)
if [ -f "$PROJECT_ROOT/.config" ]; then
while IFS='?=' read -r key value; do
key=$(echo "$key" | xargs)
value=$(echo "$value" | xargs)
[ -z "$key" ] && continue
[[ "$key" =~ ^# ]] && continue
# Only set if not already set in environment
[ -n "${!key:-}" ] || export "$key=$value"
done < "$PROJECT_ROOT/.config"
fi
CONFIG="redbear-full"
JOBS="${JOBS:-$(nproc)}"
APPLY_PATCHES="${APPLY_PATCHES:-1}"
@@ -79,7 +91,7 @@ echo ""
cd "$PROJECT_ROOT"
if [ -x "$PROJECT_ROOT/local/scripts/verify-overlay-integrity.sh" ]; then
if [ -x "$PROJECT_ROOT/local/scripts/verify-overlay-integrity.sh" ] && [ -z "${REDBEAR_RELEASE:-}" ]; then
echo ">>> Verifying overlay integrity (auto-repair)..."
"$PROJECT_ROOT/local/scripts/verify-overlay-integrity.sh" --repair
echo ""
@@ -124,7 +136,7 @@ ensure_relibc_desktop_surface() {
fi
}
if [ "$APPLY_PATCHES" = "1" ]; then
if [ "$APPLY_PATCHES" = "1" ] && [ -z "${REDBEAR_RELEASE:-}" ]; then
echo ">>> Applying local patches..."
apply_patch_dir() {
@@ -177,12 +189,8 @@ if [ "$APPLY_PATCHES" = "1" ]; then
stash_nested_repo_if_dirty "$PROJECT_ROOT/recipes/core/relibc/source" "relibc"
echo ""
fi
if [ -x "$PROJECT_ROOT/local/scripts/verify-overlay-integrity.sh" ]; then
echo ">>> Verifying overlay integrity (strict)..."
"$PROJECT_ROOT/local/scripts/verify-overlay-integrity.sh"
echo ""
elif [ -n "${REDBEAR_RELEASE:-}" ]; then
echo ">>> Release mode: skipping patch application (patches pre-applied in archived sources)"
fi
if [ ! -f "target/release/repo" ]; then
@@ -209,11 +217,43 @@ fi
echo ">>> Building Red Bear OS with config: $CONFIG"
echo ">>> This may take 30-60 minutes on first build..."
if [ "$ALLOW_UPSTREAM" -eq 1 ]; then
# In release mode, verify archives exist before building
if [ -n "${REDBEAR_RELEASE:-}" ]; then
echo ">>> Release mode: $REDBEAR_RELEASE"
if [ -f "./local/scripts/verify-sources-archived.sh" ]; then
bash "./local/scripts/verify-sources-archived.sh" --release="$REDBEAR_RELEASE" || {
echo "ERROR: Release archive verification failed. Run: provision-release.sh"
exit 1
}
fi
fi
if [ "${REDBEAR_ALLOW_UPSTREAM:-0}" = "1" ]; then
echo ">>> WARNING: Upstream fetch ENABLED (REDBEAR_ALLOW_UPSTREAM=1)"
REPO_OFFLINE=0 COOKBOOK_OFFLINE=false CI=1 make all "CONFIG_NAME=$CONFIG" "JOBS=$JOBS"
elif [ -n "${REDBEAR_RELEASE:-}" ]; then
echo ">>> Release mode: building from immutable archives (offline)"
# Validate source trees before building
if [ -f "$PROJECT_ROOT/local/scripts/validate-source-trees.sh" ]; then
echo ">>> Validating source trees..."
bash "$PROJECT_ROOT/local/scripts/validate-source-trees.sh" "$CONFIG" || {
echo "WARNING: Some source trees are missing."
echo "Attempting build with REPO_BINARY=1 fallback for missing packages..."
REPO_OFFLINE=1 COOKBOOK_OFFLINE=true CI=1 REPO_BINARY=1 make all "CONFIG_NAME=$CONFIG" "JOBS=$JOBS" || {
echo "ERROR: Build failed even with binary fallback."
echo "Run: ./local/scripts/restore-sources.sh --release=$REDBEAR_RELEASE"
exit 1
}
exit 0
}
fi
REPO_OFFLINE=1 COOKBOOK_OFFLINE=true CI=1 make all "CONFIG_NAME=$CONFIG" "JOBS=$JOBS"
elif [ "$ALLOW_UPSTREAM" -eq 1 ]; then
echo ">>> Upstream recipe refresh enabled"
REPO_OFFLINE=0 COOKBOOK_OFFLINE=false CI=1 make all "CONFIG_NAME=$CONFIG" "JOBS=$JOBS"
else
echo ">>> Upstream recipe refresh disabled (pass --upstream to enable)"
echo ">>> Upstream recipe refresh disabled (default: offline)"
REPO_OFFLINE=1 COOKBOOK_OFFLINE=true CI=1 make all "CONFIG_NAME=$CONFIG" "JOBS=$JOBS"
fi
+70
View File
@@ -0,0 +1,70 @@
#!/usr/bin/env bash
# check-upstream-releases.sh — Check for new Redox OS snapshots (read-only).
#
# Usage:
# ./local/scripts/check-upstream-releases.sh
#
# Queries Redox GitLab tags via git ls-remote.
# Prints snapshots newer than the current baseline.
# ZERO side effects — no clones, no disk writes, no state changes.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
REDOX_URL="${REDOX_URL:-https://gitlab.redox-os.org/redox-os/redox.git}"
MANIFEST="$PROJECT_ROOT/sources/redbear-0.1.0/manifest.txt"
GREEN='\033[1;32m'
YELLOW='\033[1;33m'
BLUE='\033[1;34m'
NC='\033[0m'
echo -e "${BLUE}Red Bear OS — Upstream Release Check${NC}"
echo ""
# Get our baseline
if [ -f "$MANIFEST" ]; then
BASELINE=$(head -3 "$MANIFEST" | grep 'Build system' | awk '{print $NF}' 2>/dev/null || echo "unknown")
echo "Baseline: Red Bear OS 0.1.0 (build system: $BASELINE)"
else
echo "Baseline: unknown (manifest not found at $MANIFEST)"
fi
# Get baseline date from manifest or git
if [ -f "$MANIFEST" ]; then
BASELINE_DATE=$(head -6 "$MANIFEST" | grep 'Generated' | sed 's/.*Generated: //' | head -1 2>/dev/null || echo "2026-05-01")
else
BASELINE_DATE="2026-05-01"
fi
echo "Baseline date: $BASELINE_DATE"
echo ""
# Query Redox tags
echo "Checking: $REDOX_URL"
echo ""
TAGS=$(git ls-remote --tags "$REDOX_URL" 2>/dev/null | grep -oP 'refs/tags/\K[0-9]+\.[0-9]+\.[0-9]+' | sort -V | tail -20 || echo "")
if [ -z "$TAGS" ]; then
echo -e "${YELLOW}Could not query Redox tags. Is the network available?${NC}"
echo "URL: $REDOX_URL"
exit 0
fi
echo "Redox releases available:"
echo "$TAGS" | while read -r tag; do
marker=""
if [ "$tag" = "0.9.0" ]; then
marker=" (current upstream stable)"
fi
echo " $tag$marker"
done
echo ""
echo "To evaluate a release:"
echo " ./local/scripts/provision-release.sh --ref=<tag> --release=0.2.0 --dry-run"
echo ""
echo "To rebase on a release:"
echo " ./local/scripts/provision-release.sh --ref=<tag> --release=0.2.0"
+436
View File
@@ -0,0 +1,436 @@
#!/usr/bin/env python3
"""Generate an authoritative Red Bear OS release manifest as JSON."""
from __future__ import annotations
import argparse
import json
import os
from pathlib import Path
import re
import shutil
import subprocess
import sys
import tarfile
import tomllib
PROJECT_ROOT = Path(__file__).resolve().parents[2]
RECIPES_DIR = PROJECT_ROOT / "recipes"
LOCAL_RECIPES_DIR = PROJECT_ROOT / "local" / "recipes"
ARCHIVES_DIR = PROJECT_ROOT / "sources" / "x86_64-unknown-redox"
HASH_TOOL = shutil.which("b3sum")
TAR_VERSION_PATTERNS = (
re.compile(r"/archive/v?(\d+\.\d+(?:\.\d+)?)/"),
re.compile(r"(?:^|[/-])v?(\d+\.\d+(?:\.\d+)?)(?=\.tar(?:\.[^./]+)+(?:/download)?$)"),
)
HEX_REV_RE = re.compile(r"[0-9a-fA-F]{7,}")
SAFE_VERSION_RE = re.compile(r"[^A-Za-z0-9._-]+")
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Generate authoritative manifest.json content for a Red Bear OS release."
)
parser.add_argument("--release", required=True, help="Release version to record in the manifest")
parser.add_argument("--staging", action="store_true", help="Look for archives in staging directory")
return parser.parse_args()
def main() -> int:
args = parse_args()
global ARCHIVES_DIR
if args.staging:
ARCHIVES_DIR = PROJECT_ROOT / "sources" / ".staging" / f"redbear-{args.release}" / "tarballs"
else:
ARCHIVES_DIR = PROJECT_ROOT / "sources" / "redbear-{args.release}" / "tarballs"
# Fallback to shared pool if release dir has no tarballs yet
if not list(ARCHIVES_DIR.glob("*.tar.gz")):
ARCHIVES_DIR = PROJECT_ROOT / "sources" / "x86_64-unknown-redox"
args = parse_args()
recipe_files = collect_recipe_files()
entries = {}
for relative_recipe_path, recipe_file in recipe_files.items():
entries[relative_recipe_path] = build_entry(relative_recipe_path, recipe_file, recipe_files)
manifest = {
"release": args.release,
"build_system_rev": resolve_build_system_rev(),
"entries": {key: entries[key] for key in sorted(entries)},
}
json.dump(manifest, sys.stdout, indent=2)
sys.stdout.write("\n")
return 0
def collect_recipe_files() -> dict[str, Path]:
recipe_files: dict[str, Path] = {}
for root in (LOCAL_RECIPES_DIR, RECIPES_DIR):
if not root.is_dir():
continue
for dirpath, dirnames, filenames in os.walk(root, followlinks=False):
dirnames[:] = sorted(
name for name in dirnames if name not in {"source", "target", ".git", "__pycache__"}
)
if "recipe.toml" not in filenames:
continue
recipe_file = Path(dirpath) / "recipe.toml"
if not recipe_file.is_file():
continue
relative_recipe_path = recipe_file.relative_to(root).parent.as_posix()
recipe_files.setdefault(relative_recipe_path, recipe_file)
return recipe_files
def build_entry(
relative_recipe_path: str, recipe_file: Path, recipe_files: dict[str, Path]
) -> dict[str, object]:
recipe_dir = recipe_file.parent
recipe_data = load_recipe_metadata(recipe_file)
source_data = recipe_data.get("source") if isinstance(recipe_data, dict) else None
source = source_data if isinstance(source_data, dict) else {}
recipe_type = classify_recipe(source)
entry: dict[str, object] = {
"type": recipe_type,
"restore_to": f"recipes/{relative_recipe_path}/source",
}
if recipe_type != "meta":
archive_name = expected_archive_name(
relative_recipe_path,
recipe_type,
source,
recipe_dir,
recipe_files,
)
archive_name = resolve_archive_name(relative_recipe_path, archive_name)
archive_path = ARCHIVES_DIR / archive_name
entry["archive"] = archive_name
entry["blake3"] = blake3_file(archive_path) if archive_path.is_file() else None
if recipe_type == "git":
rev = get_git_rev(source, recipe_dir)
entry["git_url"] = source.get("git")
entry["rev"] = rev
elif recipe_type == "tar":
entry["tar_url"] = source.get("tar")
source_blake3 = source.get("blake3") or source.get("b3sum")
if source_blake3:
entry["source_blake3"] = source_blake3
elif recipe_type == "path":
path_value = source.get("path")
entry["path"] = path_value
source_path = resolve_source_path(recipe_dir, path_value)
if source_path and source_path.exists():
entry["tree_blake3"] = blake3_tree(source_path)
elif recipe_type == "same_as":
entry["target"] = normalize_recipe_reference(recipe_dir, str(source.get("same_as", "")))
elif recipe_type == "meta":
entry["meta"] = "no_source"
return entry
def load_recipe_metadata(path: Path) -> dict[str, object]:
text = path.read_text(encoding="utf-8")
try:
data = tomllib.loads(text)
except tomllib.TOMLDecodeError:
return {"source": parse_source_block(text)}
return data if isinstance(data, dict) else {}
def parse_source_block(text: str) -> dict[str, object]:
source: dict[str, object] = {}
in_source = False
for raw_line in text.splitlines():
stripped = raw_line.strip()
if stripped.startswith("[") and stripped.endswith("]"):
if stripped == "[source]":
in_source = True
continue
if in_source:
break
continue
if not in_source or not stripped or stripped.startswith("#") or "=" not in raw_line:
continue
key, value = raw_line.split("=", 1)
key = key.strip()
value = value.split("#", 1)[0].strip()
if not key or not value:
continue
try:
source[key] = tomllib.loads(f"value = {value}")["value"]
except tomllib.TOMLDecodeError:
continue
return source
def classify_recipe(source: dict[str, object]) -> str:
if source.get("git"):
return "git"
if source.get("tar"):
return "tar"
if source.get("path"):
return "path"
if source.get("same_as"):
return "same_as"
return "meta"
def expected_archive_name(
relative_recipe_path: str,
recipe_type: str,
source: dict[str, object],
recipe_dir: Path,
recipe_files: dict[str, Path],
) -> str:
path = Path(relative_recipe_path)
pkg_name = path.name
category = path.parent.name if path.parent.as_posix() != "." else "root"
version = derive_archive_version(
relative_recipe_path,
recipe_type,
source,
recipe_dir,
recipe_files,
{relative_recipe_path},
)
return f"{category}-{pkg_name}-v{version}-patched.tar.gz"
def derive_archive_version(
relative_recipe_path: str,
recipe_type: str,
source: dict[str, object],
recipe_dir: Path,
recipe_files: dict[str, Path],
seen: set[str],
) -> str:
if recipe_type == "tar":
tar_url = str(source.get("tar", ""))
version = extract_tar_version(tar_url)
if version:
return version
if recipe_type == "git":
rev = get_git_rev(source, recipe_dir)
if isinstance(rev, str) and rev:
if HEX_REV_RE.fullmatch(rev):
return rev[:7]
return sanitize_version(rev)
if recipe_type == "same_as":
target = normalize_recipe_reference(recipe_dir, str(source.get("same_as", "")))
if target and target not in seen:
target_file = recipe_files.get(target)
if target_file is not None:
target_data = load_recipe_metadata(target_file)
target_source_data = target_data.get("source") if isinstance(target_data, dict) else None
target_source = target_source_data if isinstance(target_source_data, dict) else {}
target_type = classify_recipe(target_source)
return derive_archive_version(
target,
target_type,
target_source,
target_file.parent,
recipe_files,
seen | {target},
)
return "unknown"
def resolve_archive_name(relative_recipe_path: str, archive_name: str) -> str:
archive_path = ARCHIVES_DIR / archive_name
if archive_path.is_file():
return archive_name
recipe_path = Path(relative_recipe_path)
category = recipe_path.parent.name if recipe_path.parent.as_posix() != "." else "root"
pkg_name = recipe_path.name
matches = sorted(ARCHIVES_DIR.glob(f"{category}-{pkg_name}-v*-patched.tar.gz"))
if len(matches) == 1:
return matches[0].name
return archive_name
def extract_tar_version(tar_url: str) -> str | None:
for pattern in TAR_VERSION_PATTERNS:
match = pattern.search(tar_url)
if match:
return match.group(1)
return None
def get_git_rev(source: dict[str, object], recipe_dir: Path) -> str | None:
rev = source.get("rev")
if isinstance(rev, str) and rev.strip():
return rev.strip()
return resolve_git_head(recipe_dir / "source")
def resolve_git_head(repo_dir: Path) -> str | None:
git_dir = repo_dir / ".git"
if not git_dir.exists():
return None
result = subprocess.run(
["git", "-C", str(repo_dir), "rev-parse", "--short", "HEAD"],
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
return None
head = result.stdout.strip()
return head or None
def resolve_build_system_rev() -> str | None:
result = subprocess.run(
["git", "-C", str(PROJECT_ROOT), "rev-parse", "--short=9", "HEAD"],
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
return None
value = result.stdout.strip()
return value or None
def resolve_source_path(recipe_dir: Path, raw_path: object) -> Path | None:
if not isinstance(raw_path, str) or not raw_path:
return None
path = Path(raw_path)
candidate = path if path.is_absolute() else recipe_dir / path
try:
resolved = candidate.resolve(strict=True)
except FileNotFoundError:
return None
try:
resolved.relative_to(PROJECT_ROOT.resolve())
except ValueError:
return None
return resolved
def normalize_recipe_reference(recipe_dir: Path, raw_reference: str) -> str:
if not raw_reference:
return raw_reference
candidate = (recipe_dir / raw_reference).resolve(strict=False)
for root in (RECIPES_DIR, LOCAL_RECIPES_DIR):
try:
return candidate.relative_to(root).as_posix()
except ValueError:
continue
return raw_reference
def sanitize_version(value: str) -> str:
cleaned = SAFE_VERSION_RE.sub("-", value).strip("-.")
return cleaned or "unknown"
def require_hash_tool() -> str:
if HASH_TOOL:
return HASH_TOOL
raise RuntimeError("b3sum is required to compute BLAKE3 hashes")
def blake3_file(path: Path) -> str:
result = subprocess.run(
[require_hash_tool(), "--no-names", str(path)],
capture_output=True,
text=True,
check=False,
)
if result.returncode != 0:
stderr = result.stderr.strip() or f"failed to hash {path}"
raise RuntimeError(stderr)
return result.stdout.strip().split()[0]
def blake3_tree(root: Path) -> str:
process = subprocess.Popen(
[require_hash_tool(), "--no-names"],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
try:
assert process.stdin is not None
with tarfile.open(fileobj=process.stdin, mode="w|") as tar:
for entry in iter_tree_entries(root):
arcname = entry.relative_to(root).as_posix()
tar_info = tar.gettarinfo(str(entry), arcname=arcname)
tar_info.uid = 0
tar_info.gid = 0
tar_info.uname = ""
tar_info.gname = ""
tar_info.mtime = 0
if tar_info.isreg():
with entry.open("rb") as handle:
tar.addfile(tar_info, handle)
else:
tar.addfile(tar_info)
finally:
if process.stdin and not process.stdin.closed:
process.stdin.close()
stdout, stderr = process.communicate()
if process.returncode != 0:
message = stderr.decode().strip() or f"failed to hash tree {root}"
raise RuntimeError(message)
return stdout.decode().strip().split()[0]
def iter_tree_entries(root: Path) -> list[Path]:
entries: list[Path] = []
def walk(directory: Path) -> None:
children = sorted(directory.iterdir(), key=lambda path: path.name)
for child in children:
entries.append(child)
if child.is_dir() and not child.is_symlink():
walk(child)
if root.exists() and root.is_dir():
walk(root)
return entries
if __name__ == "__main__":
raise SystemExit(main())
+247
View File
@@ -0,0 +1,247 @@
#!/usr/bin/env bash
# provision-release.sh — Seal current build tree as a new Red Bear OS release (atomic).
#
# Usage:
# ./local/scripts/provision-release.sh --release=0.2.0 [--ref=<tag>] [--dry-run]
#
# Provisions a self-contained, immutable release archive via staging + atomic mv.
# All 7 completeness gates must pass before .complete sentry is written.
# On failure, staging directory is cleaned up automatically.
#
# Requires explicit --release. Never runs automatically.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
REF=""
RELEASE=""
DRY_RUN=0
usage() {
cat <<EOF
Usage: $(basename "$0") --release=<ver> [--ref=<redox-tag>] [--dry-run]
Seal current source tree as a new Red Bear OS release (atomic provisioning).
Options:
--release=<ver> Red Bear OS release version (e.g., 0.2.0) — REQUIRED
--ref=<tag> Optional Redox OS ref for provenance tracking
--dry-run Preview only — no filesystem changes
-h, --help Show this help
EOF
}
while [ $# -gt 0 ]; do
case "$1" in
--ref=*) REF="${1#*=}" ;;
--release=*) RELEASE="${1#*=}" ;;
--dry-run) DRY_RUN=1 ;;
-h|--help) usage; exit 0 ;;
*) echo "Unknown: $1"; usage >&2; exit 1 ;;
esac
shift
done
if [ -z "$RELEASE" ]; then
echo "ERROR: --release is required" >&2
usage >&2
exit 1
fi
cd "$PROJECT_ROOT"
RED='\033[1;31m'
GREEN='\033[1;32m'
YELLOW='\033[1;33m'
BLUE='\033[1;34m'
NC='\033[0m'
status() { echo -e "${GREEN}==>${NC} $*"; }
warn() { echo -e "${YELLOW}WARN${NC}: $*"; }
err() { echo -e "${RED}ERROR${NC}: $*" >&2; }
info() { echo -e "${BLUE} ${NC} $*"; }
STAGING="sources/.staging/redbear-${RELEASE}"
FINAL="sources/redbear-${RELEASE}"
cleanup_staging() {
if [ -d "$STAGING" ]; then
warn "Cleaning up staging directory..."
rm -rf "$STAGING"
fi
}
trap cleanup_staging EXIT
# ── Step 1: Verify current release is archived ──────────────────────
status "Step 1: Verifying current release..."
CURRENT_RELEASE="${REDBEAR_RELEASE:-0.1.0}"
CURRENT_ARCHIVE="sources/redbear-$CURRENT_RELEASE"
if [ ! -f "$CURRENT_ARCHIVE/.complete" ] && [ ! -f "$CURRENT_ARCHIVE/manifest.txt" ]; then
warn "Current release $CURRENT_RELEASE has no .complete sentry or manifest"
warn "It may not be fully archived. Continue anyway? (y/N)"
if [ "$DRY_RUN" -eq 0 ]; then
read -r confirm
[ "$confirm" = "y" ] || [ "$confirm" = "Y" ] || exit 1
fi
fi
info "Current release: $CURRENT_RELEASE"
# ── Step 2: Ref validation (optional) ───────────────────────────────
if [ -n "$REF" ]; then
status "Step 2: Validating ref=$REF..."
if [ "$DRY_RUN" -eq 1 ]; then
info "[dry-run] Would validate ref $REF"
else
REDOX_URL="https://gitlab.redox-os.org/redox-os/redox.git"
if timeout 10 git ls-remote --tags "$REDOX_URL" "$REF" 2>/dev/null | grep -q "$REF"; then
info "Ref $REF exists in Redox repository"
elif timeout 10 git ls-remote --tags "$REDOX_URL" 2>/dev/null | grep -q .; then
err "Ref $REF not found"
exit 1
else
warn "Cannot reach Redox repository — ref recorded as stated provenance"
fi
fi
fi
# ── Step 3: Staging safety check ────────────────────────────────────
status "Step 3: Checking staging..."
if [ -d "$STAGING" ]; then
err "Staging directory already exists: $STAGING"
err "This may be from a previous failed provisioning run."
err "Remove it first: rm -rf $STAGING"
[ "$DRY_RUN" -eq 1 ] || exit 1
fi
if [ -d "$FINAL" ]; then
err "Release already exists: $FINAL"
err "Releases are immutable. Choose a different --release version."
[ "$DRY_RUN" -eq 1 ] || exit 1
fi
info "Staging path is clear"
# ── Step 4: Archive sources ─────────────────────────────────────────
status "Step 4: Archiving sources..."
if [ "$DRY_RUN" -eq 1 ]; then
info "[dry-run] Would run: archive-sources.sh --release=$RELEASE --all"
else
mkdir -p "$STAGING"/{tarballs,snapshots,configs}
if [ -f "$SCRIPT_DIR/archive-sources.sh" ]; then
bash "$SCRIPT_DIR/archive-sources.sh" --release="$RELEASE" --all
info "Sources archived"
else
err "archive-sources.sh not found"
exit 1
fi
fi
# ── Step 5: Archive configs ─────────────────────────────────────────
status "Step 5: Archiving configs..."
if [ "$DRY_RUN" -eq 1 ]; then
info "[dry-run] Would copy configs"
else
cp config/redbear-*.toml config/base.toml config/minimal.toml "$STAGING/configs/" 2>/dev/null || true
cp .config "$STAGING/configs/" 2>/dev/null || true
info "Configs: $(ls "$STAGING/configs"/*.toml 2>/dev/null | wc -l) files"
fi
# ── Step 6: Archive patches ─────────────────────────────────────────
status "Step 6: Archiving patches..."
if [ "$DRY_RUN" -eq 1 ]; then
info "[dry-run] Would archive patches"
else
if [ -d "local/patches" ]; then
(cd local && tar czf "$PROJECT_ROOT/$STAGING/patches.tar.gz" patches/)
info "Patches archived: patches.tar.gz"
fi
fi
# ── Step 7: Generate manifest ───────────────────────────────────────
status "Step 7: Generating manifest..."
if [ "$DRY_RUN" -eq 1 ]; then
info "[dry-run] Would generate manifest.json"
else
if [ -f "$SCRIPT_DIR/generate-manifest.py" ]; then
python3 "$SCRIPT_DIR/generate-manifest.py" --release="$RELEASE" --staging > "$STAGING/manifest.json" || {
err "Manifest generation failed"
exit 1
}
info "Manifest: $(python3 -c "import json; d=json.load(open('$STAGING/manifest.json')); print(len(d.get('entries',{})))" 2>/dev/null || echo "?") entries"
else
err "generate-manifest.py not found"
exit 1
fi
fi
# ── Step 8: Generate BLAKE3SUMS ─────────────────────────────────────
status "Step 8: Generating checksums..."
if [ "$DRY_RUN" -eq 1 ]; then
info "[dry-run] Would generate BLAKE3SUMS and PAYLOAD.blake3"
else
if [ -d "$STAGING/tarballs" ] && ls "$STAGING/tarballs"/*.tar.gz >/dev/null 2>&1; then
(cd "$STAGING/tarballs" && b3sum *.tar.gz) > "$STAGING/BLAKE3SUMS"
info "BLAKE3SUMS: $(wc -l < "$STAGING/BLAKE3SUMS") entries"
fi
if [ -d "$STAGING/snapshots" ] && ls "$STAGING/snapshots"/*.tar.gz >/dev/null 2>&1; then
(cd "$STAGING/snapshots" && b3sum *.tar.gz) >> "$STAGING/BLAKE3SUMS"
fi
# Generate whole-payload hash
(cd "$STAGING" && find . -type f ! -name PAYLOAD.blake3 ! -name .complete -print0 | sort -z | xargs -0 b3sum) > "$STAGING/PAYLOAD.blake3" 2>/dev/null || true
fi
# ── Step 9: Completeness gates ──────────────────────────────────────
status "Step 9: Running completeness gates..."
if [ "$DRY_RUN" -eq 1 ]; then
info "[dry-run] Would run verify-release-completeness.sh"
else
if [ -f "$SCRIPT_DIR/verify-release-completeness.sh" ]; then
if bash "$SCRIPT_DIR/verify-release-completeness.sh" --release="$RELEASE" --staging; then
info "All completeness gates PASSED"
else
err "Completeness gates FAILED"
exit 1
fi
else
warn "verify-release-completeness.sh not found — skipping gate checks"
fi
fi
# ── Step 10: Seal and deploy ────────────────────────────────────────
status "Step 10: Sealing release..."
if [ "$DRY_RUN" -eq 1 ]; then
info "[dry-run] Would write .complete sentry and move to $FINAL"
else
echo "$(date -u +%Y-%m-%dT%H:%M:%SZ) — Release $RELEASE" > "$STAGING/.complete"
if [ -d "$FINAL" ]; then
err "Release directory already exists: $FINAL"
err "Releases are immutable. Choose a different --release version."
exit 1
fi
mv "$STAGING" "$FINAL"
fi
# ── Report ──────────────────────────────────────────────────────────
echo ""
echo -e "${GREEN}=========================================${NC}"
if [ "$DRY_RUN" -eq 0 ]; then
echo -e "${GREEN} Release $RELEASE provisioned${NC}"
else
echo -e "${GREEN} Dry-run complete — no changes made${NC}"
fi
echo -e "${GREEN}=========================================${NC}"
echo ""
if [ "$DRY_RUN" -eq 0 ]; then
echo "Archive: $FINAL/"
echo " tarballs/: $(ls "$FINAL/tarballs" 2>/dev/null | wc -l) archives"
echo " configs/: $(ls "$FINAL/configs" 2>/dev/null | wc -l) files"
echo " .complete: $(cat "$FINAL/.complete")"
echo ""
echo "To verify: ./local/scripts/verify-sources-archived.sh --release=$RELEASE"
echo ""
echo "To switch: edit .config → REDBEAR_RELEASE?=$RELEASE"
fi
# Prevent trap cleanup on success
trap - EXIT
+182
View File
@@ -0,0 +1,182 @@
#!/usr/bin/env bash
# restore-sources.sh — Extract patched source archives back to recipe directories.
#
# Usage:
# ./local/scripts/restore-sources.sh --release=0.1.0 [recipe ...]
#
# Reads sources/redbear-<release>/manifest.txt to find archives.
# Extracts each archive to recipes/<cat>/<name>/source/.
# Skips extraction if source/ already exists and has matching rev.
set -eo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
RELEASE=""
RECIPES=()
usage() {
cat <<EOF
Usage: $(basename "$0") --release=<ver> [recipe ...]
Restore recipe sources from release archives.
Options:
--release=<ver> Release version (e.g., 0.1.0)
--force Overwrite existing source directories
-h, --help Show this help
If no recipes specified, restores ALL recipes in the manifest.
EOF
}
FORCE=0
while [ $# -gt 0 ]; do
case "$1" in
--release=*) RELEASE="${1#*=}" ;;
--force) FORCE=1 ;;
-h|--help) usage; exit 0 ;;
*) RECIPES+=("$1") ;;
esac
shift
done
if [ -z "$RELEASE" ]; then
echo "ERROR: --release is required" >&2
usage >&2
exit 1
fi
ARCHIVE_DIR="$PROJECT_ROOT/sources/redbear-$RELEASE"
MANIFEST="$ARCHIVE_DIR/manifest.txt"
if [ ! -f "$MANIFEST" ]; then
echo "ERROR: Release manifest not found: $MANIFEST" >&2
echo "Run: ./local/scripts/provision-release.sh --release=$RELEASE" >&2
exit 1
fi
cd "$PROJECT_ROOT"
GREEN='\033[1;32m'
YELLOW='\033[1;33m'
RED='\033[1;31m'
NC='\033[0m'
status() { echo -e "${GREEN}==>${NC} $*"; }
warn() { echo -e "${YELLOW}WARN${NC}: $*"; }
err() { echo -e "${RED}ERROR${NC}: $*" >&2; }
restored=0
skipped=0
failed=0
# Read manifest and restore each recipe
while IFS= read -r line; do
[[ "$line" =~ ^# ]] && continue
[[ -z "$line" ]] && continue
# Parse: category/name type=... key=value ...
pkg_path=$(echo "$line" | awk '{print $1}')
pkg_type=$(echo "$line" | awk '{print $2}' | cut -d= -f1)
# If specific recipes requested, filter
if [ ${#RECIPES[@]} -gt 0 ]; then
match=0
for r in "${RECIPES[@]}"; do
[[ "$pkg_path" == "$r" ]] && match=1
done
[ "$match" -eq 0 ] && continue
fi
source_dir="$PROJECT_ROOT/recipes/$pkg_path/source"
# Skip if source exists and not forced
if [ -d "$source_dir" ] && [ "$FORCE" -eq 0 ]; then
warn "source exists: recipes/$pkg_path/source/ (use --force to overwrite)"
skipped=$((skipped + 1))
continue
fi
# Exact archive lookup in release tarballs directory
archive_name=""
if [ -f "$ARCHIVE_DIR/manifest.json" ]; then
archive_name=$(python3 -c "
import json, sys
with open('$ARCHIVE_DIR/manifest.json') as f:
data = json.load(f)
entry = data.get('entries', {}).get('$pkg_path', {})
if entry.get('type') == 'same_as':
target = entry.get('target', '')
target_entry = data.get('entries', {}).get(target, {})
print(target_entry.get('archive', target_entry.get('snapshot', '')))
elif entry.get('type') == 'path':
print('__LOCAL_PATH__')
else:
print(entry.get('archive', ''))
" 2>/dev/null)
fi
if [ -n "$archive_name" ]; then
if [ "$archive_name" = "__LOCAL_PATH__" ]; then
warn "local path source (no archive): $pkg_path"
skipped=$((skipped + 1))
continue
fi
archive="$ARCHIVE_DIR/tarballs/$archive_name"
if [ ! -f "$archive" ]; then
archive="$ARCHIVE_DIR/snapshots/$archive_name"
fi
fi
# Fallback: try glob pattern in release tarballs dir
if [ -z "$archive" ] || [ ! -f "$archive" ]; then
cat_name=$(dirname "$pkg_path")
pkg_name=$(basename "$pkg_path")
shopt -s nullglob
for f in "$ARCHIVE_DIR/tarballs/${cat_name}-${pkg_name}-"*.tar.gz; do
[ -f "$f" ] || continue
archive="$f"
break
done
shopt -u nullglob
fi
if [ -z "$archive" ]; then
err "no archive found for $pkg_path in $ARCHIVE_DIR/tarballs/"
failed=$((failed + 1))
continue
fi
# Extract with format auto-detection
mkdir -p "$(dirname "$source_dir")"
rm -rf "$source_dir"
status "restoring: $pkg_path"
first_entry=$(tar tf "$archive" 2>/dev/null | head -1)
case "$first_entry" in
source/*)
tar xzf "$archive" -C "$source_dir/.." 2>/dev/null ;;
*/source/*)
tar xzf "$archive" -C "$(dirname "$(dirname "$source_dir")")" 2>/dev/null ;;
*)
tar xzf "$archive" -C "$(dirname "$source_dir")" 2>/dev/null ;;
esac
# Verify extraction
if [ -d "$source_dir" ]; then
restored=$((restored + 1))
else
err "extraction failed: $pkg_path (archive: $archive)"
failed=$((failed + 1))
fi
done < "$MANIFEST"
echo ""
echo "========================================="
echo " Restore complete"
echo " Restored: $restored"
echo " Skipped: $skipped"
echo " Failed: $failed"
echo "========================================="
[ "$failed" -eq 0 ] || exit 1
+28 -274
View File
@@ -1,284 +1,38 @@
#!/usr/bin/env bash
# sync-upstream.sh — Update from upstream Redox and reapply Red Bear OS overlays.
# sync-upstream.sh — RETIRED. Red Bear OS is now a release-based fork.
#
# Usage:
# ./local/scripts/sync-upstream.sh # Rebase onto upstream master
# ./local/scripts/sync-upstream.sh --dry-run # Preview what would change
# ./local/scripts/sync-upstream.sh --no-merge # Only fetch + check for conflicts
# This script no longer performs upstream synchronization.
# Red Bear OS sources are frozen at the current baseline (0.1.0).
# Sources are immutable — never auto-refreshed from upstream.
#
# Strategy: git rebase (preserves Red Bear OS commits, replays on new upstream).
# Fallback: if rebase fails, patches in local/patches/build-system/ can be
# applied from scratch via: ./local/scripts/apply-patches.sh --force
# To check for newer Redox OS snapshots:
# ./local/scripts/check-upstream-releases.sh
#
# IMPORTANT: upstream WIP recipes are not treated as durable shipping inputs by Red Bear.
# After upstream sync, Red Bear-owned WIP work still needs to come from local/recipes/ and
# local/patches/, not from trust in recipes/wip/ alone.
# To provision a new release from a Redox ref:
# ./local/scripts/provision-release.sh --ref=<redox-tag> --release=0.2.0
#
# To restore archived sources:
# ./local/scripts/restore-sources.sh --release=0.1.0
#
# Documentation:
# local/docs/CONSOLE-TO-KDE-DESKTOP-PLAN.md
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
UPSTREAM_URL="${UPSTREAM_URL:-https://github.com/redox-os/redox.git}"
UPSTREAM_REMOTE="upstream-redox"
UPSTREAM_BRANCH="${UPSTREAM_BRANCH:-master}"
DRY_RUN=0
NO_MERGE=0
FORCE=0
usage() {
echo "Usage: $0 [--dry-run] [--no-merge] [--force]"
echo " --dry-run Show what would happen without making changes"
echo " --no-merge Only fetch and check patch conflicts"
echo " --force Skip safety checks (uncommitted local/ changes)"
}
for arg in "$@"; do
case "$arg" in
--dry-run) DRY_RUN=1 ;;
--no-merge) NO_MERGE=1 ;;
--force) FORCE=1 ;;
--help|-h)
usage
exit 0
;;
*)
echo "Unknown argument: $arg"
usage >&2
exit 1
;;
esac
done
cd "$REPO_ROOT"
# ── 1. Ensure upstream remote ───────────────────────────────────────
if ! git remote get-url "$UPSTREAM_REMOTE" &>/dev/null; then
echo "==> Adding upstream remote: $UPSTREAM_URL"
[ "$DRY_RUN" = "0" ] && git remote add "$UPSTREAM_REMOTE" "$UPSTREAM_URL"
fi
echo "==> Fetching $UPSTREAM_REMOTE/$UPSTREAM_BRANCH..."
[ "$DRY_RUN" = "0" ] && git fetch "$UPSTREAM_REMOTE" "$UPSTREAM_BRANCH"
UPSTREAM_REF="${UPSTREAM_REMOTE}/${UPSTREAM_BRANCH}"
# ── 2. Check patch conflicts with upstream changes ──────────────────
MERGE_BASE=$(git merge-base HEAD "$UPSTREAM_REF" 2>/dev/null || echo "")
if [ -n "$MERGE_BASE" ]; then
CHANGED_FILES=$(git diff --name-only "$MERGE_BASE" "$UPSTREAM_REF" 2>/dev/null || true)
CHANGE_COUNT=$(echo "$CHANGED_FILES" | grep -c . 2>/dev/null || echo "0")
echo " $CHANGE_COUNT files changed upstream since common ancestor"
if [ -n "$CHANGED_FILES" ] && [ -d local/patches ]; then
echo ""
echo "==> Checking patch conflict risks..."
for patch_file in local/patches/build-system/[0-9]*.patch; do
[ -f "$patch_file" ] || continue
PATCH_NAME=$(basename "$patch_file")
PATCHED_FILES=$(grep '^--- a/' "$patch_file" 2>/dev/null | sed 's|^--- a/||' | sort -u || true)
for pf in $PATCHED_FILES; do
if echo "$CHANGED_FILES" | grep -q "$pf" 2>/dev/null; then
echo " ⚠ CONFLICT RISK: $PATCH_NAME modifies $pf (also changed upstream)"
fi
done
done
for patch_dir in local/patches/kernel local/patches/base; do
[ -f "$patch_dir/redox.patch" ] || continue
echo " $patch_dir/redox.patch — check manually if kernel/base changed upstream"
done
fi
else
echo " WARNING: Could not find common ancestor with upstream"
fi
# ── 3. Summary ─────────────────────────────────────────────────────
AHEAD=$(git rev-list --count "$UPSTREAM_REF..HEAD" 2>/dev/null || echo "?")
BEHIND=$(git rev-list --count "HEAD..$UPSTREAM_REF" 2>/dev/null || echo "?")
echo ""
echo "=== Sync Summary ==="
echo "Upstream: $UPSTREAM_REF"
echo "Local: HEAD ($(git rev-parse --short HEAD))"
echo "Ahead: $AHEAD Red Bear OS commits"
echo "Behind: $BEHIND upstream commits"
if [ "$NO_MERGE" = 1 ]; then
echo ""
echo "To merge manually:"
echo " git rebase $UPSTREAM_REF"
exit 0
fi
if [ "$DRY_RUN" = "1" ]; then
echo ""
echo " [dry-run] Would rebase onto $UPSTREAM_REF"
exit 0
fi
# ── 3.5. Check for uncommitted local/ changes ──────────────────────
if [ "$NO_MERGE" = "0" ] && [ "$DRY_RUN" = "0" ]; then
LOCAL_CHANGES=""
LOCAL_UNTRACKED=""
if [ -d "local" ]; then
LOCAL_CHANGES=$(cd local && git diff --name-only HEAD 2>/dev/null || true)
LOCAL_UNTRACKED=$(cd local && git ls-files --others --exclude-standard 2>/dev/null || true)
fi
# Also check for uncommitted changes to tracked local/ files from repo root
ROOT_LOCAL_CHANGES=$(git diff --name-only HEAD -- local/ 2>/dev/null || true)
if [ -n "$LOCAL_CHANGES" ] || [ -n "$LOCAL_UNTRACKED" ] || [ -n "$ROOT_LOCAL_CHANGES" ]; then
echo ""
echo "!! WARNING: Uncommitted changes detected in local/"
if [ -n "$ROOT_LOCAL_CHANGES" ]; then
echo " Modified files:"
echo "$ROOT_LOCAL_CHANGES" | head -10 | while read -r f; do echo " $f"; done
TOTAL=$(echo "$ROOT_LOCAL_CHANGES" | grep -c .)
[ "$TOTAL" -gt 10 ] && echo " ... and $((TOTAL - 10)) more"
fi
if [ -n "$LOCAL_UNTRACKED" ]; then
echo " Untracked files (NOT protected by stash):"
echo "$LOCAL_UNTRACKED" | head -5 | while read -r f; do echo " $f"; done
TOTAL=$(echo "$LOCAL_UNTRACKED" | grep -c .)
[ "$TOTAL" -gt 5 ] && echo " ... and $((TOTAL - 5)) more"
fi
echo ""
echo " git stash does NOT protect untracked files."
echo " Commit your local/ changes before syncing, or use --force to proceed anyway."
if [ "$FORCE" = "0" ]; then
echo ""
echo " ABORT: Uncommitted local/ changes detected."
echo " Commit your changes first: git add local/ && git commit -m 'WIP'"
echo " Or use --force if you understand the risks (untracked files will be LOST)."
exit 1
else
# --force with untracked files requires explicit confirmation
if [ -n "$LOCAL_UNTRACKED" ]; then
echo ""
echo "!! DANGER: --force with untracked files will DELETE them permanently. !!"
echo " git stash does NOT protect untracked files."
echo " Untracked files found:"
echo "$LOCAL_UNTRACKED" | head -10 | while read -r f; do echo " $f"; done
TOTAL=$(echo "$LOCAL_UNTRACKED" | grep -c .)
[ "$TOTAL" -gt 10 ] && echo " ... and $((TOTAL - 10)) more"
echo ""
read -p " Type 'YES_DELETE' to confirm destruction of untracked local/ files: " CONFIRM
if [ "$CONFIRM" != "YES_DELETE" ]; then
echo " Aborted. Your untracked files are safe."
exit 1
fi
echo " Proceeding with --force — untracked files WILL be deleted..."
else
echo " --force specified, proceeding (tracked changes will be stashed)..."
fi
fi
fi
fi
# ── 4. Stash uncommitted changes ────────────────────────────────────
STASHED=0
if ! git diff --quiet 2>/dev/null || ! git diff --cached --quiet 2>/dev/null; then
echo "==> Stashing uncommitted changes..."
git stash push -u -m "redbear-sync-$(date +%Y%m%d-%H%M%S)"
STASHED=1
fi
PREV_HEAD=$(git rev-parse HEAD)
# ── 4.5. Verify overlay integrity before rebase ────────────────────
echo "==> Verifying Red Bear overlay integrity before rebase..."
BROKEN_SYMLINKS=""
while IFS= read -r link; do
if [ ! -e "$link" ]; then
BROKEN_SYMLINKS="$BROKEN_SYMLINKS
$link -> $(readlink "$link")"
fi
done < <(find recipes -maxdepth 3 -type l 2>/dev/null)
if [ -n "$BROKEN_SYMLINKS" ]; then
echo "!! WARNING: Broken symlinks detected in recipes/:"
echo "$BROKEN_SYMLINKS" | head -20
TOTAL=$(echo "$BROKEN_SYMLINKS" | grep -c .)
[ "$TOTAL" -gt 20 ] && echo " ... and $((TOTAL - 20)) more"
echo ""
echo " These symlinks may break further during rebase."
echo " Run ./local/scripts/apply-patches.sh after rebase to recreate them."
fi
# Check that key local/patches exist
for patch_file in local/patches/kernel/redox.patch local/patches/base/redox.patch local/patches/relibc/redox.patch; do
if [ ! -f "$patch_file" ]; then
echo "!! CRITICAL: Missing patch file: $patch_file"
echo " Cannot recover from rebase failure without this patch."
if [ "$FORCE" = "0" ]; then
exit 1
fi
fi
done
# ── 5. Rebase ───────────────────────────────────────────────────────
echo ""
echo "==> Rebasing Red Bear OS commits onto $UPSTREAM_REF..."
echo " (this replays our $AHEAD commits on top of updated upstream)"
if git rebase "$UPSTREAM_REF"; then
echo ""
echo "==> Rebase successful."
else
echo ""
echo "!! Rebase conflict. Options:"
echo " 1. Resolve conflicts: edit files, git add, git rebase --continue"
echo " 2. Abort: git rebase --abort"
echo " 3. Nuclear option (DESTRUCTIVE — loses uncommitted work):"
echo " git rebase --abort"
echo " git reset --hard $UPSTREAM_REF"
echo " ./local/scripts/apply-patches.sh --force"
echo ""
echo " Patches for recovery: local/patches/build-system/"
echo " Previous HEAD: $PREV_HEAD"
echo ""
echo " IMPORTANT: Before using the nuclear option, ensure all local/ changes"
echo " are committed. The nuclear option does NOT preserve uncommitted work."
echo " To recover to previous state: git reset --hard $PREV_HEAD"
exit 1
fi
# ── 6. Restore stash ────────────────────────────────────────────────
if [ "$STASHED" = 1 ]; then
echo "==> Restoring stashed changes..."
if git stash pop; then
echo " Stash restored successfully."
else
echo "!! Stash pop had conflicts."
echo " Your changes are preserved in the stash."
echo " Options:"
echo " 1. Resolve conflicts in the working tree"
echo " 2. git checkout --theirs . && git stash drop"
echo " 3. git reset --hard && git stash pop (try again on clean tree)"
echo " List stashes: git stash list"
fi
fi
# ── 7. Verify symlinks ─────────────────────────────────────────────
echo "==> Verifying recipe patch symlinks..."
if [ -f local/scripts/apply-patches.sh ]; then
bash local/scripts/apply-patches.sh
else
echo " apply-patches.sh not found — verify symlinks manually"
ls -la recipes/core/kernel/redox.patch recipes/core/base/redox.patch
fi
if [ -x local/scripts/verify-overlay-integrity.sh ]; then
echo "==> Verifying overlay integrity..."
local/scripts/verify-overlay-integrity.sh --repair
fi
GREEN='\033[1;32m'
BLUE='\033[1;34m'
NC='\033[0m'
echo ""
echo "==> Sync complete."
echo "==> Guarding recipe durability..."
./local/scripts/guard-recipes.sh --restore 2>/dev/null || echo " (guard-recipes.sh not found — run manually)"
echo " Previous HEAD: $PREV_HEAD"
echo " New HEAD: $(git rev-parse HEAD)"
echo -e "${GREEN}sync-upstream.sh has been retired.${NC}"
echo ""
echo "Next: make all CONFIG_NAME=redbear-full"
echo "Red Bear OS is now a release-based fork."
echo "Current baseline: 0.1.0 (f55acba68)"
echo "Sources are immutable — never auto-refreshed from upstream."
echo ""
echo -e "${BLUE}Available commands:${NC}"
echo " check-upstream-releases.sh See new Redox snapshots (read-only)"
echo " provision-release.sh Provision a new release"
echo " restore-sources.sh Restore sources from archives"
echo ""
exit 0
+75
View File
@@ -0,0 +1,75 @@
#!/usr/bin/env python3
"""Validate that all source trees required by a build config exist."""
import sys, tomllib
from pathlib import Path
PROJECT_ROOT = Path(__file__).resolve().parents[2]
CONFIG = sys.argv[1] if len(sys.argv) > 1 else "redbear-full"
def build_lookup():
lookup = {}
for root in (Path("recipes"), Path("local/recipes")):
for rt in root.rglob("recipe.toml"):
parts = rt.parts
if "source" in parts or "target" in parts:
continue
pkg = rt.parent.name
if pkg not in lookup:
lookup[pkg] = rt.parent
return lookup
def resolve_config(cp, visited=None):
if visited is None: visited = set()
cp = cp.resolve()
if cp in visited: return {}
visited.add(cp)
with open(cp, "rb") as f: c = tomllib.load(f)
pkgs = dict(c.get("packages", {}))
for inc in c.get("include", []):
ip = cp.parent / inc
if ip.exists():
incd = resolve_config(ip, visited)
for k, v in pkgs.items(): incd[k] = v
pkgs = incd
return pkgs
def main():
config_path = Path("config") / f"{CONFIG}.toml"
if not config_path.exists():
print(f"Config not found: {config_path}", file=sys.stderr)
return 1
lookup = build_lookup()
pkgs = resolve_config(config_path)
print(f"=== Validating source trees for config: {CONFIG} ===")
missing = 0
present = 0
for pkg_name, pkg_conf in sorted(pkgs.items()):
if str(pkg_conf) == "ignore": continue
# Meta packages have no source requirement
if pkg_name in ("libgcc", "libstdcxx"):
continue
rd = lookup.get(pkg_name)
if not rd:
print(f" NOT FOUND: {pkg_name}")
missing += 1
continue
src = rd / "source"
if src.is_dir() and any(src.iterdir()):
present += 1
else:
print(f" MISSING: {str(rd)}")
missing += 1
print(f"\n Total (config): {present + missing}")
print(f" Present: {present}")
print(f" Missing: {missing}")
if missing:
print("\nTo restore: ./local/scripts/restore-sources.sh --release=0.1.0")
return 1
print("All source trees present.")
return 0
if __name__ == "__main__":
sys.exit(main())
+9
View File
@@ -0,0 +1,9 @@
#!/usr/bin/env bash
# validate-source-trees.sh — Check all required source trees exist before building.
# Delegates to validate-source-trees.py for config parsing and validation.
set -eo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
CONFIG="${1:-redbear-full}"
cd "$PROJECT_ROOT"
exec python3 "$SCRIPT_DIR/validate-source-trees.py" "$CONFIG"
+86
View File
@@ -0,0 +1,86 @@
#!/usr/bin/env bash
# verify-patches.sh — Check which Red Bear patches need rebasing against current source trees.
#
# Usage:
# ./local/scripts/verify-patches.sh [--component=base|kernel|relibc] [--all]
#
# Dry-runs all patches against their target source trees and reports:
# OK — patch applies cleanly
# REV — reversed/already applied (upstream absorbed)
# CONFLICT — genuine conflict, needs rebasing
#
# Exit code: number of CONFLICT patches
set -eo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
COMPONENT="${1:-all}"
MODE="${2:-}"
cd "$PROJECT_ROOT"
GREEN='\033[1;32m'
RED='\033[1;31m'
YELLOW='\033[1;33m'
NC='\033[0m'
ok=0
rev=0
conflict=0
check_patches() {
local patch_dir="$1"
local target_dir="$2"
local label="$3"
[ -d "$patch_dir" ] || return
[ -d "$target_dir" ] || { echo " ${RED}SKIP${NC} $label: target not found"; return; }
echo "=== $label ==="
for patch in "$patch_dir"/*.patch; do
[ -f "$patch" ] || continue
local name=$(basename "$patch")
local result=$(patch -p1 --dry-run -d "$target_dir" < "$patch" 2>&1) || true
if echo "$result" | grep -q 'Reversed\|previously applied'; then
echo " ${YELLOW}REV${NC} $name (upstream absorbed)"
rev=$((rev + 1))
elif echo "$result" | grep -q 'FAILED\|hunks\? FAILED'; then
echo " ${RED}CONFLICT${NC} $name"
conflict=$((conflict + 1))
else
echo " ${GREEN}OK${NC} $name"
ok=$((ok + 1))
fi
done
}
case "$COMPONENT" in
base|all)
check_patches "local/patches/base" "recipes/core/base/source" "base"
;;
esac
case "$COMPONENT" in
kernel|all)
check_patches "local/patches/kernel" "recipes/core/kernel/source" "kernel"
# Fallback: kernel source may be nested from archive extraction
if [ ! -d "recipes/core/kernel/source" ] && [ -d "recipes/core/kernel/kernel/source" ]; then
check_patches "local/patches/kernel" "recipes/core/kernel/kernel/source" "kernel"
fi
;;
esac
case "$COMPONENT" in
relibc|all)
check_patches "local/patches/relibc" "recipes/core/relibc/source" "relibc"
;;
esac
echo ""
echo "========================================="
echo " OK: $ok"
echo " Reversed: $rev (upstream absorbed)"
echo " Conflict: $conflict (needs rebase)"
echo "========================================="
exit $conflict
+771
View File
@@ -0,0 +1,771 @@
#!/usr/bin/env bash
# verify-release-completeness.sh — Run 7 mechanical completeness gates.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
RELEASE=""
USE_STAGING=0
FAIL_COUNT=0
declare -A ENTRY_PRESENT=()
declare -A ENTRY_FIELDS=()
declare -A CONFIG_VISITED=()
declare -A CONFIG_PACKAGES=()
declare -A RECIPE_CACHE=()
declare -A CLOSURE_RECIPE_KEYS=()
declare -a CONFIG_ORDER=()
usage() {
cat <<EOF
Usage: $(basename "$0") --release=<ver> [--staging]
Run the 7 mechanical completeness gates for a Red Bear release directory.
Options:
--release=<ver> Release version (for example 0.2.0)
--staging Verify sources/.staging/redbear-<ver>
-h, --help Show this help
EOF
}
pass_gate() {
printf 'PASS: %s — %s\n' "$1" "$2"
}
fail_gate() {
FAIL_COUNT=$((FAIL_COUNT + 1))
printf 'FAIL: %s — %s\n' "$1" "$2" >&2
}
trim() {
local value="$1"
value="${value#"${value%%[![:space:]]*}"}"
value="${value%"${value##*[![:space:]]}"}"
printf '%s' "$value"
}
json_unquote() {
local value="$1"
if [[ "$value" == '"'*'"' ]]; then
value="${value:1:${#value}-2}"
fi
printf '%s' "$value"
}
json_tokenize() {
local json_file="$1"
awk '
BEGIN {
in_string = 0
escape = 0
token = ""
}
{
line = $0 "\n"
for (i = 1; i <= length(line); i++) {
c = substr(line, i, 1)
if (in_string) {
token = token c
if (escape) {
escape = 0
continue
}
if (c == "\\") {
escape = 1
continue
}
if (c == "\"") {
print token
token = ""
in_string = 0
}
continue
}
if (c ~ /[[:space:]]/) {
continue
}
if (c == "\"") {
in_string = 1
escape = 0
token = "\""
continue
}
if (c ~ /[\{\}\[\]:,]/) {
print c
continue
}
token = c
while (i + 1 <= length(line)) {
next_c = substr(line, i + 1, 1)
if (next_c ~ /[[:space:]\{\}\[\]:,]/) {
break
}
i++
token = token next_c
}
print token
token = ""
}
}
END {
if (in_string) {
exit 1
}
}
' "$json_file"
}
declare -a TOKENS=()
TOKEN_INDEX=0
CURRENT_TOKEN=''
current_token() {
if [ "$TOKEN_INDEX" -ge "${#TOKENS[@]}" ]; then
CURRENT_TOKEN=''
return
fi
CURRENT_TOKEN="${TOKENS[$TOKEN_INDEX]}"
}
consume_token() {
current_token
TOKEN_INDEX=$((TOKEN_INDEX + 1))
}
expect_token() {
local expected="$1"
consume_token
if [ "$CURRENT_TOKEN" != "$expected" ]; then
printf 'JSON parse error: expected %s but found %s\n' "$expected" "$CURRENT_TOKEN" >&2
exit 1
fi
}
skip_json_value() {
current_token
case "$CURRENT_TOKEN" in
'{')
consume_token >/dev/null
current_token
while [ "$CURRENT_TOKEN" != '}' ]; do
consume_token >/dev/null
expect_token ':'
skip_json_value
current_token
if [ "$CURRENT_TOKEN" = ',' ]; then
consume_token >/dev/null
fi
current_token
done
expect_token '}'
;;
'[')
consume_token >/dev/null
current_token
while [ "$CURRENT_TOKEN" != ']' ]; do
skip_json_value
current_token
if [ "$CURRENT_TOKEN" = ',' ]; then
consume_token >/dev/null
fi
current_token
done
expect_token ']'
;;
*)
consume_token >/dev/null
;;
esac
}
store_entry_scalar() {
local entry="$1"
local key="$2"
local raw="$3"
local value="$raw"
if [[ "$raw" == '"'*'"' ]]; then
value="$(json_unquote "$raw")"
fi
ENTRY_FIELDS["$entry:$key"]="$value"
}
parse_entry_object() {
local entry="$1"
local prefix="$2"
local field raw
expect_token '{'
current_token
while [ "$CURRENT_TOKEN" != '}' ]; do
consume_token
field="$(json_unquote "$CURRENT_TOKEN")"
expect_token ':'
current_token
case "$CURRENT_TOKEN" in
'{')
parse_entry_object "$entry" "${prefix}${field}."
;;
'[')
skip_json_value
;;
*)
consume_token
raw="$CURRENT_TOKEN"
store_entry_scalar "$entry" "${prefix}${field}" "$raw"
;;
esac
current_token
if [ "$CURRENT_TOKEN" = ',' ]; then
consume_token >/dev/null
fi
current_token
done
expect_token '}'
}
parse_entries_object() {
local entry_name
expect_token '{'
current_token
while [ "$CURRENT_TOKEN" != '}' ]; do
consume_token
entry_name="$(json_unquote "$CURRENT_TOKEN")"
ENTRY_PRESENT["$entry_name"]=1
expect_token ':'
parse_entry_object "$entry_name" ""
current_token
if [ "$CURRENT_TOKEN" = ',' ]; then
consume_token >/dev/null
fi
current_token
done
expect_token '}'
}
parse_manifest_json() {
local manifest_json="$1"
local key
if ! mapfile -t TOKENS < <(json_tokenize "$manifest_json"); then
printf 'failed to tokenize manifest JSON: %s\n' "$manifest_json" >&2
exit 1
fi
TOKEN_INDEX=0
expect_token '{'
current_token
while [ "$CURRENT_TOKEN" != '}' ]; do
consume_token
key="$(json_unquote "$CURRENT_TOKEN")"
expect_token ':'
if [ "$key" = 'entries' ]; then
parse_entries_object
else
skip_json_value
fi
current_token
if [ "$CURRENT_TOKEN" = ',' ]; then
consume_token >/dev/null
fi
current_token
done
expect_token '}'
}
entry_field() {
printf '%s' "${ENTRY_FIELDS["$1:$2"]-}"
}
first_nonempty_field() {
local entry="$1"
shift
local field value
for field in "$@"; do
value="$(entry_field "$entry" "$field")"
if [ -n "$value" ] && [ "$value" != 'null' ] && [ "$value" != 'false' ]; then
printf '%s' "$value"
return
fi
done
printf ''
}
resolve_config_path() {
local base_file="$1"
local include_rel="$2"
local base_dir resolved_dir
base_dir="$(cd "$(dirname "$base_file")" && pwd)"
resolved_dir="$(cd "$base_dir/$(dirname "$include_rel")" 2>/dev/null && pwd)" || return 1
printf '%s/%s' "$resolved_dir" "$(basename "$include_rel")"
}
collect_config_closure() {
local config_file="$1"
local rel_path section line trimmed include_text include_rel matched_include package_name package_value resolved
if [ ! -f "$config_file" ]; then
printf 'missing config file in repo: %s\n' "$config_file" >&2
exit 1
fi
rel_path="${config_file#"$PROJECT_ROOT/config/"}"
if [ "${CONFIG_VISITED["$rel_path"]-}" = '1' ]; then
return
fi
CONFIG_VISITED["$rel_path"]=1
CONFIG_ORDER+=("$rel_path")
section=''
while IFS= read -r line || [ -n "$line" ]; do
trimmed="$(trim "$line")"
if [[ "$trimmed" =~ ^include[[:space:]]*=[[:space:]]*\[(.*)\][[:space:]]*$ ]]; then
include_text="${BASH_REMATCH[1]}"
while [[ "$include_text" =~ \"([^\"]+)\" ]]; do
matched_include="${BASH_REMATCH[0]}"
include_rel="${BASH_REMATCH[1]}"
resolved="$(resolve_config_path "$config_file" "$include_rel")" || {
printf 'cannot resolve include %s from %s\n' "$include_rel" "$config_file" >&2
exit 1
}
collect_config_closure "$resolved"
include_text=${include_text#*${matched_include}}
done
continue
fi
trimmed="${trimmed%%#*}"
trimmed="$(trim "$trimmed")"
[ -z "$trimmed" ] && continue
if [[ "$trimmed" =~ ^\[(.+)\]$ ]]; then
section="${BASH_REMATCH[1]}"
continue
fi
if [ "$section" = 'packages' ] && [[ "$trimmed" =~ ^([A-Za-z0-9._+-]+)[[:space:]]*=[[:space:]]*(.+)$ ]]; then
package_name="${BASH_REMATCH[1]}"
package_value="$(trim "${BASH_REMATCH[2]}")"
if [[ "$package_value" =~ ^\"ignore\"$ ]]; then
CONFIG_PACKAGES["$package_name"]='ignore'
else
CONFIG_PACKAGES["$package_name"]='present'
fi
fi
done < "$config_file"
}
resolve_recipe_key() {
local package_name="$1"
local recipe_file match rel_path recipe_key
local -a matches=()
if [ -n "${RECIPE_CACHE["$package_name"]-}" ]; then
printf '%s' "${RECIPE_CACHE["$package_name"]}"
return
fi
while IFS= read -r recipe_file; do
[ -n "$recipe_file" ] || continue
matches+=("$recipe_file")
done < <(find -L "$PROJECT_ROOT/recipes" -path "*/${package_name}/recipe.toml" -not -path '*/source/*' -print 2>/dev/null | sort)
if [ "${#matches[@]}" -eq 1 ]; then
match="${matches[0]}"
rel_path="${match#"$PROJECT_ROOT/recipes/"}"
recipe_key="${rel_path%/recipe.toml}"
RECIPE_CACHE["$package_name"]="$recipe_key"
printf '%s' "$recipe_key"
return
fi
if [ "${#matches[@]}" -eq 0 ]; then
RECIPE_CACHE["$package_name"]=''
printf ''
return
fi
printf '__AMBIGUOUS__:'
printf '%s' "${matches[0]#"$PROJECT_ROOT/recipes/"}"
local index
for ((index = 1; index < ${#matches[@]}; index++)); do
printf ',%s' "${matches[$index]#"$PROJECT_ROOT/recipes/"}"
done
}
verify_archive_file() {
local entry="$1"
local kind="$2"
local directory="$3"
local file_name hash_value archive_path computed_hash
file_name="$(first_nonempty_field "$entry" "$kind" "$kind.path")"
[ -n "$file_name" ] || return 0
case "$kind" in
archive) hash_value="$(first_nonempty_field "$entry" 'archive.blake3' 'blake3')" ;;
snapshot) hash_value="$(first_nonempty_field "$entry" 'snapshot.blake3' 'blake3')" ;;
*) hash_value="$(first_nonempty_field "$entry" 'blake3')" ;;
esac
archive_path="$directory/$file_name"
if [ ! -f "$archive_path" ]; then
printf ' - %s: missing %s file %s\n' "$entry" "$kind" "$archive_path" >&2
return 1
fi
if [ -z "$hash_value" ]; then
printf ' - %s: missing BLAKE3 for %s file %s\n' "$entry" "$kind" "$file_name" >&2
return 1
fi
computed_hash="$(b3sum "$archive_path" | awk '{print $1}')"
if [ "$computed_hash" != "$hash_value" ]; then
printf ' - %s: checksum mismatch for %s (expected %s, got %s)\n' "$entry" "$file_name" "$hash_value" "$computed_hash" >&2
return 1
fi
return 0
}
run_gate_closure_completeness() {
local package_name recipe_key
local closure_ok=0
local -a closure_missing=() closure_ambiguous=()
for package_name in "${CONFIG_PACKAGES_SORTED[@]}"; do
recipe_key="$(resolve_recipe_key "$package_name")"
if [ -z "$recipe_key" ]; then
closure_missing+=("$package_name (no recipe path under recipes/)")
continue
fi
if [[ "$recipe_key" == __AMBIGUOUS__:* ]]; then
closure_ambiguous+=("$package_name (${recipe_key#__AMBIGUOUS__:})")
continue
fi
CLOSURE_RECIPE_KEYS["$package_name"]="$recipe_key"
if [ -n "${ENTRY_PRESENT["$recipe_key"]-}" ]; then
closure_ok=$((closure_ok + 1))
else
closure_missing+=("$package_name ($recipe_key)")
fi
done
if [ "${#closure_missing[@]}" -eq 0 ] && [ "${#closure_ambiguous[@]}" -eq 0 ]; then
pass_gate '1/7 closure completeness' "$closure_ok closure packages all have manifest entries"
return
fi
if [ "${#closure_missing[@]}" -gt 0 ]; then
printf ' Missing closure entries:\n' >&2
printf ' %s\n' "${closure_missing[@]}" >&2
fi
if [ "${#closure_ambiguous[@]}" -gt 0 ]; then
printf ' Ambiguous recipe matches:\n' >&2
printf ' %s\n' "${closure_ambiguous[@]}" >&2
fi
fail_gate '1/7 closure completeness' 'one or more closure packages could not be matched to a manifest entry'
}
run_gate_git_provenance() {
local package_name recipe_key entry_type entry_rev
local git_checked=0
local -a blank_rev=()
for package_name in "${CONFIG_PACKAGES_SORTED[@]}"; do
recipe_key="${CLOSURE_RECIPE_KEYS["$package_name"]-}"
[ -n "$recipe_key" ] || continue
[ -n "${ENTRY_PRESENT["$recipe_key"]-}" ] || continue
entry_type="$(first_nonempty_field "$recipe_key" 'type')"
if [ "$entry_type" = 'git' ]; then
git_checked=$((git_checked + 1))
entry_rev="$(trim "$(first_nonempty_field "$recipe_key" 'rev')")"
if [ -z "$entry_rev" ]; then
blank_rev+=("$recipe_key")
fi
fi
done
if [ "${#blank_rev[@]}" -eq 0 ]; then
pass_gate '2/7 git provenance' "$git_checked closure git entries have non-blank rev values"
return
fi
printf ' Blank rev entries:\n' >&2
printf ' %s\n' "${blank_rev[@]}" >&2
fail_gate '2/7 git provenance' 'one or more closure git entries have a blank rev'
}
run_gate_archive_coverage() {
local entry_name archive_name snapshot_name target_name meta_value
local total_entries=0
local -a coverage_missing=()
while IFS= read -r entry_name; do
[ -n "$entry_name" ] || continue
total_entries=$((total_entries + 1))
archive_name="$(first_nonempty_field "$entry_name" 'archive' 'archive.path')"
snapshot_name="$(first_nonempty_field "$entry_name" 'snapshot' 'snapshot.path')"
target_name="$(first_nonempty_field "$entry_name" 'target' 'same_as.target')"
meta_value="$(first_nonempty_field "$entry_name" 'meta' 'meta.kind')"
if [ -z "$archive_name" ] && [ -z "$snapshot_name" ] && [ -z "$target_name" ] && [ -z "$meta_value" ]; then
coverage_missing+=("$entry_name")
fi
done < <(printf '%s\n' "${!ENTRY_PRESENT[@]}" | sort)
if [ "${#coverage_missing[@]}" -eq 0 ]; then
pass_gate '3/7 archive coverage' "$total_entries manifest entries all have archive, snapshot, target, or meta resolution"
return
fi
printf ' Entries without resolution path:\n' >&2
printf ' %s\n' "${coverage_missing[@]}" >&2
fail_gate '3/7 archive coverage' 'one or more manifest entries have no resolution path'
}
run_gate_archive_integrity() {
local entry_name archive_name snapshot_name
local archive_checks=0
local -a integrity_failures=()
while IFS= read -r entry_name; do
[ -n "$entry_name" ] || continue
archive_name="$(first_nonempty_field "$entry_name" 'archive' 'archive.path')"
snapshot_name="$(first_nonempty_field "$entry_name" 'snapshot' 'snapshot.path')"
if [ -n "$archive_name" ]; then
archive_checks=$((archive_checks + 1))
if ! verify_archive_file "$entry_name" archive "$RELEASE_DIR/tarballs"; then
integrity_failures+=("$entry_name")
fi
fi
if [ -n "$snapshot_name" ]; then
archive_checks=$((archive_checks + 1))
if ! verify_archive_file "$entry_name" snapshot "$RELEASE_DIR/snapshots"; then
integrity_failures+=("$entry_name")
fi
fi
done < <(printf '%s\n' "${!ENTRY_PRESENT[@]}" | sort)
if [ "${#integrity_failures[@]}" -eq 0 ]; then
pass_gate '4/7 archive integrity' "$archive_checks archive or snapshot payloads exist and match their BLAKE3 hashes"
return
fi
fail_gate '4/7 archive integrity' 'one or more archive or snapshot payloads are missing or have hash mismatches'
}
run_gate_same_as_validation() {
local entry_name entry_type target_name next_target next_type seen cursor
local same_as_checked=0
local -a same_as_missing=() same_as_cycles=()
while IFS= read -r entry_name; do
[ -n "$entry_name" ] || continue
entry_type="$(first_nonempty_field "$entry_name" 'type')"
target_name="$(first_nonempty_field "$entry_name" 'target' 'same_as.target')"
if [ "$entry_type" != 'same_as' ] && [ -z "$(entry_field "$entry_name" 'same_as.target')" ] && [ -z "$target_name" ]; then
continue
fi
same_as_checked=$((same_as_checked + 1))
if [ -z "$target_name" ]; then
same_as_missing+=("$entry_name (blank target)")
continue
fi
if [ -z "${ENTRY_PRESENT["$target_name"]-}" ]; then
same_as_missing+=("$entry_name -> $target_name")
continue
fi
seen="|$entry_name|"
cursor="$target_name"
while :; do
next_target="$(first_nonempty_field "$cursor" 'target' 'same_as.target')"
next_type="$(first_nonempty_field "$cursor" 'type')"
if [ "$next_type" != 'same_as' ] && [ -z "$(entry_field "$cursor" 'same_as.target')" ]; then
break
fi
if [ -z "$next_target" ]; then
same_as_missing+=("$cursor (blank target)")
break
fi
if [[ "$seen" == *"|$next_target|"* ]]; then
same_as_cycles+=("$entry_name -> $next_target")
break
fi
if [ -z "${ENTRY_PRESENT["$next_target"]-}" ]; then
same_as_missing+=("$cursor -> $next_target")
break
fi
seen+="$cursor|"
cursor="$next_target"
done
done < <(printf '%s\n' "${!ENTRY_PRESENT[@]}" | sort)
if [ "${#same_as_missing[@]}" -eq 0 ] && [ "${#same_as_cycles[@]}" -eq 0 ]; then
pass_gate '5/7 same_as validation' "$same_as_checked same_as links resolve cleanly without cycles"
return
fi
if [ "${#same_as_missing[@]}" -gt 0 ]; then
printf ' Missing same_as targets:\n' >&2
printf ' %s\n' "${same_as_missing[@]}" >&2
fi
if [ "${#same_as_cycles[@]}" -gt 0 ]; then
printf ' same_as cycles:\n' >&2
printf ' %s\n' "${same_as_cycles[@]}" >&2
fi
fail_gate '5/7 same_as validation' 'same_as target resolution failed or contains a cycle'
}
run_gate_config_closure() {
local config_rel
local -a missing_configs=()
for config_rel in "${CONFIG_ORDER[@]}"; do
if [ -f "$RELEASE_CONFIG_DIR/$config_rel" ] || [ -f "$RELEASE_CONFIG_DIR/$(basename "$config_rel")" ]; then
continue
fi
missing_configs+=("$config_rel")
done
if [ "${#missing_configs[@]}" -eq 0 ]; then
pass_gate '6/7 config closure' "${#CONFIG_ORDER[@]} reachable config files are present in configs/"
return
fi
printf ' Missing archived configs:\n' >&2
printf ' %s\n' "${missing_configs[@]}" >&2
fail_gate '6/7 config closure' 'one or more reachable config files are missing from configs/'
}
run_gate_dirty_tree() {
local package_name recipe_key entry_type source_dir
local git_dirty_checked=0
local -a dirty_recipes=()
for package_name in "${CONFIG_PACKAGES_SORTED[@]}"; do
recipe_key="${CLOSURE_RECIPE_KEYS["$package_name"]-}"
[ -n "$recipe_key" ] || continue
[ -n "${ENTRY_PRESENT["$recipe_key"]-}" ] || continue
entry_type="$(first_nonempty_field "$recipe_key" 'type')"
if [ "$entry_type" != 'git' ]; then
continue
fi
git_dirty_checked=$((git_dirty_checked + 1))
source_dir="$PROJECT_ROOT/recipes/$recipe_key/source"
if ! git -C "$source_dir" rev-parse --is-inside-work-tree >/dev/null 2>&1; then
dirty_recipes+=("$recipe_key (source is not a git worktree: $source_dir)")
continue
fi
if ! git -C "$source_dir" diff --quiet; then
dirty_recipes+=("$recipe_key")
fi
done
if [ "${#dirty_recipes[@]}" -eq 0 ]; then
pass_gate '7/7 dirty-tree check' "$git_dirty_checked closure git source trees are clean"
return
fi
printf ' Dirty git source trees:\n' >&2
printf ' %s\n' "${dirty_recipes[@]}" >&2
fail_gate '7/7 dirty-tree check' 'one or more closure git source trees have uncommitted changes'
}
while [ $# -gt 0 ]; do
case "$1" in
--release=*) RELEASE="${1#*=}" ;;
--staging) USE_STAGING=1 ;;
-h|--help) usage; exit 0 ;;
*) printf 'Unknown argument: %s\n' "$1" >&2; usage >&2; exit 1 ;;
esac
shift
done
if [ -z "$RELEASE" ]; then
printf 'ERROR: --release is required\n' >&2
usage >&2
exit 1
fi
if ! command -v b3sum >/dev/null 2>&1; then
printf 'ERROR: b3sum is required\n' >&2
exit 1
fi
if ! command -v git >/dev/null 2>&1; then
printf 'ERROR: git is required\n' >&2
exit 1
fi
if [ "$USE_STAGING" -eq 1 ]; then
RELEASE_DIR="$PROJECT_ROOT/sources/.staging/redbear-$RELEASE"
else
RELEASE_DIR="$PROJECT_ROOT/sources/redbear-$RELEASE"
fi
MANIFEST_JSON="$RELEASE_DIR/manifest.json"
RELEASE_CONFIG_DIR="$RELEASE_DIR/configs"
ROOT_CONFIG="$PROJECT_ROOT/config/redbear-full.toml"
if [ ! -d "$RELEASE_DIR" ]; then
printf 'ERROR: release directory not found: %s\n' "$RELEASE_DIR" >&2
exit 1
fi
collect_config_closure "$ROOT_CONFIG"
CONFIG_PACKAGES_SORTED=()
while IFS= read -r package_name; do
[ -n "$package_name" ] || continue
if [ "${CONFIG_PACKAGES["$package_name"]}" = 'present' ]; then
CONFIG_PACKAGES_SORTED+=("$package_name")
fi
done < <(printf '%s\n' "${!CONFIG_PACKAGES[@]}" | sort)
if [ ! -f "$MANIFEST_JSON" ]; then
fail_gate '1/7 closure completeness' 'manifest.json is missing, so manifest-backed checks cannot run'
fail_gate '2/7 git provenance' 'manifest.json is missing, so git provenance cannot be verified'
fail_gate '3/7 archive coverage' 'manifest.json is missing, so resolution paths cannot be verified'
fail_gate '4/7 archive integrity' 'manifest.json is missing, so archive hashes cannot be verified'
fail_gate '5/7 same_as validation' 'manifest.json is missing, so same_as targets cannot be verified'
run_gate_config_closure
fail_gate '7/7 dirty-tree check' 'manifest.json is missing, so closure git source trees cannot be verified'
else
parse_manifest_json "$MANIFEST_JSON"
run_gate_closure_completeness
run_gate_git_provenance
run_gate_archive_coverage
run_gate_archive_integrity
run_gate_same_as_validation
run_gate_config_closure
run_gate_dirty_tree
fi
printf '\n'
if [ "$FAIL_COUNT" -eq 0 ]; then
printf 'Release completeness PASSED for %s\n' "$RELEASE_DIR"
exit 0
fi
printf 'Release completeness FAILED for %s (%d gate(s) failed)\n' "$RELEASE_DIR" "$FAIL_COUNT" >&2
exit 1
+140
View File
@@ -0,0 +1,140 @@
#!/usr/bin/env bash
# verify-sources-archived.sh — Verify release archive integrity.
#
# Usage:
# ./local/scripts/verify-sources-archived.sh --release=0.1.0
#
# Checks that BLAKE3SUMS file exists and all archives match.
# If archives are in sources/<target>/ format, verifies those too.
# Returns non-zero if any archive is missing or corrupted.
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
RELEASE=""
usage() {
cat <<EOF
Usage: $(basename "$0") --release=<ver>
Verify release archive integrity.
Options:
--release=<ver> Release version (e.g., 0.1.0)
-h, --help Show this help
EOF
}
while [ $# -gt 0 ]; do
case "$1" in
--release=*) RELEASE="${1#*=}" ;;
-h|--help) usage; exit 0 ;;
*) echo "Unknown: $1"; usage >&2; exit 1 ;;
esac
shift
done
if [ -z "$RELEASE" ]; then
echo "ERROR: --release is required" >&2
exit 1
fi
ARCHIVE_DIR="$PROJECT_ROOT/sources/redbear-$RELEASE"
MANIFEST="$ARCHIVE_DIR/manifest.txt"
GREEN='\033[1;32m'
RED='\033[1;31m'
NC='\033[0m'
pass() { echo -e "${GREEN}PASS${NC}: $*"; }
fail() { echo -e "${RED}FAIL${NC}: $*"; }
errors=0
# 1. Verify .complete sentry exists (release is sealed)
if [ -f "$ARCHIVE_DIR/.complete" ]; then
pass ".complete sentry: $(cat "$ARCHIVE_DIR/.complete")"
else
fail ".complete sentry NOT FOUND — release may be incomplete or corrupted"
errors=$((errors + 1))
fi
# 2. Verify configs
if [ -d "$ARCHIVE_DIR/configs" ]; then
config_count=$(ls "$ARCHIVE_DIR/configs"/*.toml 2>/dev/null | wc -l)
pass "configs: $config_count files"
else
fail "configs directory not found"
errors=$((errors + 1))
fi
# 3. Verify patches
if [ -d "$ARCHIVE_DIR/patches" ]; then
patch_count=$(ls "$ARCHIVE_DIR/patches"/*.patch 2>/dev/null | wc -l)
pass "patches: $patch_count files"
fi
SOURCES_TARGET="$PROJECT_ROOT/sources/x86_64-unknown-redox"
# 4. Check for BLAKE3SUMS
if [ -f "$ARCHIVE_DIR/BLAKE3SUMS" ]; then
pass "BLAKE3SUMS present ($(wc -l < "$ARCHIVE_DIR/BLAKE3SUMS") entries)"
# Verify checksums against actual archive files
verified=0
failed_checksums=0
while read -r hash filename; do
[ -z "$hash" ] && continue
archive_path="$ARCHIVE_DIR/tarballs/$filename"
if [ ! -f "$archive_path" ]; then
archive_path="$ARCHIVE_DIR/snapshots/$filename"
fi
if [ ! -f "$archive_path" ]; then
fail "archive missing: $filename"
errors=$((errors + 1))
continue
fi
if command -v b3sum >/dev/null 2>&1; then
computed=$(b3sum "$archive_path" | awk '{print $1}')
else
fail "b3sum not available — cannot verify BLAKE3SUMS"
errors=$((errors + 1))
break
fi
if [ "$computed" != "$hash" ]; then
fail "checksum mismatch: $filename (expected $hash, got $computed)"
failed_checksums=$((failed_checksums + 1))
errors=$((errors + 1))
else
verified=$((verified + 1))
fi
done < "$ARCHIVE_DIR/BLAKE3SUMS"
if [ "$verified" -gt 0 ]; then
pass "checksums verified: $verified archives"
fi
if [ "$failed_checksums" -gt 0 ]; then
fail "$failed_checksums checksum mismatches"
fi
else
fail "BLAKE3SUMS not found in $ARCHIVE_DIR"
errors=$((errors + 1))
fi
# 5. Count archives in sources/<target>/
SOURCES_TARGET="$PROJECT_ROOT/sources/x86_64-unknown-redox"
if [ -d "$ARCHIVE_DIR/tarballs" ]; then
archive_count=$(ls "$ARCHIVE_DIR/tarballs"/*.tar.gz 2>/dev/null | wc -l)
pass "source archives: $archive_count files in $ARCHIVE_DIR/tarballs/"
fi
echo ""
if [ "$errors" -eq 0 ]; then
echo -e "${GREEN}=========================================${NC}"
echo -e "${GREEN} Release $RELEASE: VERIFIED${NC}"
echo -e "${GREEN}=========================================${NC}"
else
echo -e "${RED}=========================================${NC}"
echo -e "${RED} Release $RELEASE: $errors error(s)${NC}"
echo -e "${RED}=========================================${NC}"
exit 1
fi