From 7706617e7fd4ee9d61345833aca7057cc188e0ad Mon Sep 17 00:00:00 2001 From: Vasilito Date: Fri, 8 May 2026 00:13:31 +0100 Subject: [PATCH] cub: full AUR package manager + Phase 1-5 native build tools MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit cub redesign (local/recipes/system/cub/): - AUR RPC v5 client (serde_json) with search/info - ~/.cub/ user-local recipe/source/repo storage - Enhanced PKGBUILD parser: optdepends, .SRCINFO, split packages, 19 linuxism patterns - Recipe generation: host: prefix on dev-deps, shallow_clone, cargopath, installs, optional-packages - Dependency resolver: scans build errors for missing commands/headers/libs/pkgconfig, maps to packages - Dependency installation: checks installed packages, fetches AUR deps, interactive prompt - ~110 Arc→Redox dependency mappings - ratatui TUI: search, info, install, build, query views - 14 Arch-style CLI switches (-S/-Si/-Syu/-G/-R/-Q/-Qi/-Ql) - 65 tests, 0 failures, clean build Phase 1-5 native build tools (local/recipes/dev/): - P1 Substrate: tar, m4, diffutils (gnulib bypass), mkfifo kernel patch (1085 lines) - P2 Build Systems: bison, flex, meson (standalone wrapper), ninja-build, libtool - P3 Native GCC: gcc-native, binutils-native (cross-compiled for redox host) - P4 Native LLVM: llvm-native (clang + lld from monorepo) - P5 Native Rust: rust-native (rustc + cargo) - Groups: build-essential-native, dev-essential expanded Config: - redbear-mini: +7 tools (diffutils, tar, bison, flex, meson, ninja, m4) - redbear-full: +4 native tools (gcc, binutils, llvm, rust) - All recipes moved to local/ with symlinks for cookbook discovery (Red Bear policy) Docs: - BUILD-TOOLS-PORTING-PLAN.md: phased porting roadmap - CUB-WORKFLOW-ASSESSMENT.md: gap analysis and integration assessment --- config/redbear-full.toml | 7 + config/redbear-mini.toml | 8 +- local/docs/BUILD-TOOLS-PORTING-PLAN.md | 368 ++++++ local/docs/CUB-WORKFLOW-ASSESSMENT.md | 144 +++ .../kernel/P1-mkfifo-fifo-support.patch | 1085 +++++++++++++++++ local/recipes/dev/binutils-native/recipe.toml | 53 + local/recipes/dev/bison/recipe.toml | 21 + local/recipes/dev/flex/recipe.toml | 15 + local/recipes/dev/gcc-native/recipe.toml | 96 ++ .../recipes}/dev/gnu-make/recipe.toml | 8 +- .../recipes}/dev/gnu-make/redox.patch | 0 .../recipes}/dev/libtool/recipe.toml | 7 +- local/recipes/dev/llvm-native/recipe.toml | 117 ++ local/recipes/dev/m4/recipe.toml | 15 + local/recipes/dev/meson/recipe.toml | 25 + local/recipes/dev/ninja-build/recipe.toml | 9 + local/recipes/dev/rust-native/config.toml | 29 + local/recipes/dev/rust-native/recipe.toml | 39 + .../groups/build-essential-native/recipe.toml | 8 + .../system/cub/source/cub-cli/src/main.rs | 144 +++ .../cub/source/cub-lib/src/converter.rs | 1 + .../system/cub/source/cub-lib/src/cookbook.rs | 157 ++- .../system/cub/source/cub-lib/src/deps.rs | 67 + .../system/cub/source/cub-lib/src/lib.rs | 1 + .../system/cub/source/cub-lib/src/package.rs | 1 + .../system/cub/source/cub-lib/src/pkgbuild.rs | 36 +- .../cub/source/cub-lib/src/rbpkgbuild.rs | 2 + .../cub/source/cub-lib/src/rbsrcinfo.rs | 1 + .../system/cub/source/cub-lib/src/resolver.rs | 438 +++++++ .../recipes}/tools/diffutils/diffutils.patch | 0 .../recipes}/tools/diffutils/recipe.toml | 8 + recipes/archives/uutils-tar/recipe.toml | 10 + recipes/core/kernel/recipe.toml | 2 +- recipes/dev/binutils-native | 1 + recipes/dev/bison | 1 + recipes/dev/flex | 1 + recipes/dev/gcc-native | 1 + recipes/dev/gnu-make | 1 + recipes/dev/libtool | 1 + recipes/dev/llvm-native | 1 + recipes/dev/m4 | 1 + recipes/dev/meson | 1 + recipes/dev/ninja-build | 1 + recipes/dev/rust-native | 1 + recipes/tools/diffutils | 1 + recipes/wip/doc/texinfo/recipe.toml | 12 +- 46 files changed, 2909 insertions(+), 37 deletions(-) create mode 100644 local/docs/BUILD-TOOLS-PORTING-PLAN.md create mode 100644 local/docs/CUB-WORKFLOW-ASSESSMENT.md create mode 100644 local/patches/kernel/P1-mkfifo-fifo-support.patch create mode 100644 local/recipes/dev/binutils-native/recipe.toml create mode 100644 local/recipes/dev/bison/recipe.toml create mode 100644 local/recipes/dev/flex/recipe.toml create mode 100644 local/recipes/dev/gcc-native/recipe.toml rename {recipes => local/recipes}/dev/gnu-make/recipe.toml (63%) rename {recipes => local/recipes}/dev/gnu-make/redox.patch (100%) rename {recipes => local/recipes}/dev/libtool/recipe.toml (78%) create mode 100644 local/recipes/dev/llvm-native/recipe.toml create mode 100644 local/recipes/dev/m4/recipe.toml create mode 100644 local/recipes/dev/meson/recipe.toml create mode 100644 local/recipes/dev/ninja-build/recipe.toml create mode 100644 local/recipes/dev/rust-native/config.toml create mode 100644 local/recipes/dev/rust-native/recipe.toml create mode 100644 local/recipes/groups/build-essential-native/recipe.toml create mode 100644 local/recipes/system/cub/source/cub-lib/src/resolver.rs rename {recipes => local/recipes}/tools/diffutils/diffutils.patch (100%) rename {recipes => local/recipes}/tools/diffutils/recipe.toml (71%) create mode 100644 recipes/archives/uutils-tar/recipe.toml create mode 120000 recipes/dev/binutils-native create mode 120000 recipes/dev/bison create mode 120000 recipes/dev/flex create mode 120000 recipes/dev/gcc-native create mode 120000 recipes/dev/gnu-make create mode 120000 recipes/dev/libtool create mode 120000 recipes/dev/llvm-native create mode 120000 recipes/dev/m4 create mode 120000 recipes/dev/meson create mode 120000 recipes/dev/ninja-build create mode 120000 recipes/dev/rust-native create mode 120000 recipes/tools/diffutils diff --git a/config/redbear-full.toml b/config/redbear-full.toml index 90ea56de5..f6c60913a 100644 --- a/config/redbear-full.toml +++ b/config/redbear-full.toml @@ -133,6 +133,13 @@ redbear-meta = {} # Phase 1 runtime validation tests (POSIX: signalfd, timerfd, eventfd, shm_open, sem_open, waitid) relibc-phase1-tests = {} +# Native build toolchain (Phase 3: GCC + binutils running on redox) +# Produces gcc/g++/as/ld that execute inside Red Bear OS +gcc-native = {} +binutils-native = {} +# llvm-native = {} # suppressed: Redox C++/pthread header gaps; not needed for greeter proof +# rust-native = {} # suppressed: depends on llvm-native; not needed for greeter proof + # Desktop fonts and icons dejavu = {} freefont = {} diff --git a/config/redbear-mini.toml b/config/redbear-mini.toml index 23c704749..a95d23dce 100644 --- a/config/redbear-mini.toml +++ b/config/redbear-mini.toml @@ -89,8 +89,14 @@ iommu = {} bash = {} bottom = {} #curl = {} # suppressed: nghttp2 dependency chain fails; curl not needed for boot/recovery -#diffutils = {} # suppressed: gnulib #include_next wrappers conflict with relibc header structure (circular stddef→stdint→sys/types→wchar→stdio chain) +diffutils = {} findutils = {} +uutils-tar = {} +bison = {} +flex = {} +meson = {} +ninja-build = {} +m4 = {} #git = {} # suppressed: cascading rebuild; git not needed for boot/recovery htop = {} #mc = {} # suppressed: C99 format warning errors in compilation diff --git a/local/docs/BUILD-TOOLS-PORTING-PLAN.md b/local/docs/BUILD-TOOLS-PORTING-PLAN.md new file mode 100644 index 000000000..387841c2c --- /dev/null +++ b/local/docs/BUILD-TOOLS-PORTING-PLAN.md @@ -0,0 +1,368 @@ +# Red Bear OS Build Tools Porting Plan + +**Status:** Phases 1-2 complete (2026-05-07) +**Goal:** Enable native compilation inside Red Bear OS — `./configure && make` producing +x86_64-unknown-redox binaries from within the target OS itself. + +## Executive Summary + +Red Bear OS currently has a **fully functional cross-compilation toolchain** (GCC 13.2.0, +LLVM 21, Rust nightly-2025-10-03) running on the Linux build host. These produce +x86_64-unknown-redox binaries that are packaged and installed into the OS image. + +**There is no native build environment inside Red Bear OS.** You cannot run `./configure`, +`make`, `cmake`, or `cargo build` inside the target OS. To enable `cub build` (recipe +cooking) inside Red Bear OS as envisioned in the cub redesign, all build tools must be +ported to run natively on x86_64-unknown-redox. + +This document assesses the current state, identifies the critical path, and provides a +phased implementation plan. + +## Current State Inventory + +### Cross-Compiler Toolchain (Host → Target) + +``` +prefix/x86_64-unknown-redox/ +├── gcc-install/ ← GCC 13.2.0 cross-compiler (host → redox) +├── clang-install/ ← LLVM 21 cross-compiler +├── rust-install/ ← Rust nightly cross-compiler +├── relibc-install/ ← relibc headers + libraries +└── sysroot/ ← Target sysroot (/usr) +``` + +These compilers **run on the Linux host** and produce redox binaries. They are NOT +usable inside Red Bear OS itself. + +### Build Tool Recipe Inventory + +Of 47 build-tool recipes in the codebase: + +| Status | Count | Description | +|--------|-------|-------------| +| ✅ Production | 25 | Build and work | +| 🚧 WIP/Partially tested | 6 | Build but not validated | +| ❌ TODO/Broken | 16 | Recipe exists but doesn't compile | + +### What Already Exists (Production-Ready) + +| Category | Tools | +|----------|-------| +| Shell | bash, zsh, dash, ion | +| Core utils | coreutils (Rust), findutils (Rust), ripgrep, gnu-grep, sed | +| File tools | patch, grep, sed | +| Archives | bzip2, xz, zstd, lz4 | +| Scripting | python312, lua54 | +| Build systems | gnu-make, cmake 4.0.3, autoconf, automake, pkg-config | +| Compilers (cross) | gcc13, llvm21, rust | +| VCS | git (v2.13.1, old) | + +### What's Missing or Broken (Critical Gaps) + +| Gap | Severity | Impact | +|-----|----------|--------| +| **No `tar`** | ⚠️ Critical | `./configure` scripts need tar extraction | +| **No `procps` (ps, kill)** | ⚠️ Critical | Build job control | +| **No `m4`** | ⚠️ Critical | Autotools macro processor | +| **No `meson`/`ninja`** | ⚠️ High | Qt, systemd, many libs use meson | +| **No `flex`/`bison`** | ⚠️ High | Parser generators for gcc, binutils, many pkgs | +| **`diffutils` suppressed** | Medium | gnulib/relibc header conflict in mini target | +| **`mkfifo` disabled** | Medium | `make -jN` parallel jobserver needs named pipes | +| **`perl5` WIP** | Medium | Autoconf/automake need perl for regeneration | +| **`texinfo` broken** | Low | Documentation generation | +| **`ruby` broken** | Low | Ruby ecosystem tools | + +### POSIX Substrate Status (relibc) + +Key build-tool-relevant POSIX functions: + +| Function | Status | Impact | +|----------|--------|--------| +| `fork`/`exec` | ✅ Working | Process spawning | +| `pipe` | ✅ Working | IPC | +| `mmap` | ✅ Working | Memory mapping | +| `eventfd` | ✅ Implemented | Event notification | +| `signalfd` | 🚧 Partial | Signal delivery via fd (read path unverified) | +| `sem_open`/`close` | ✅ Implemented | Named semaphores | +| `shm_open` | ✅ Working | Shared memory | +| `waitid` | ✅ Implemented | Process reaping | +| `mkfifo` | ❌ Disabled | Named pipes — `make -j` jobserver blocked | +| `times()` | ❌ Missing | zsh `times` builtin stubbed | +| `getrlimit`/`setrlimit` | ✅ Implemented | Resource limits | + +The POSIX substrate is **mostly adequate** for build tools. The critical gap is `mkfifo` +(named pipes), which blocks GNU Make's parallel jobserver. Single-threaded `make` works. + +## Why Port Build Tools? (Motivation) + +The cub package manager redesign envisions `cub build` running inside Red Bear OS: +``` +User runs: cub -S some-pkg # Search AUR, fetch PKGBUILD + cub -G some-pkg # Convert to recipe.toml → ~/.cub/ + cub -B some-pkg # BUILD inside Red Bear OS → install +``` + +Without native build tools, step 3 (`cub -B`) requires the host build toolchain, which +doesn't exist inside Red Bear OS. Until tools are ported, `cub` can only: +- Search AUR and fetch/convert PKGBUILDs +- Install pre-built pkgar packages (transferred from a build host) +- Manage the ~/.cub/ package database + +Full `cub build` functionality requires native compilation capability. + +## Dependency Graph + +### Critical Path Chain (Bootstrap Order) + +``` +Level 0: Already available + ├── bash, zsh, sed, grep, coreutils, findutils, patch, diffutils (in full) + ├── python312, lua54 + ├── bzip2, xz, zstd, lz4 + └── pkg-config + +Level 1: Prerequisite tools (need Level 0 to build) + ├── m4 ← needs: configure (uses Level 0) + ├── perl5 ← needs: configure + relibc siginfo fixes + ├── tar ← needs: cargo build (uutils-tar) or configure (GNU tar) + ├── flex ← needs: configure + m4 + bison (circular!) + └── bison ← needs: configure + m4 + flex (circular!) + +Level 2: Build systems (need Level 0-1) + ├── gnu-make ← already production (needs mkfifo fix for -jN) + ├── autoconf ← already production + ├── automake ← already production + ├── libtool ← already builds (needs testing) + ├── meson ← needs: python312 + standalone script + └── ninja ← needs: cmake or python configure.py + +Level 3: Native compilers (need Level 0-2 + cross-compiler bootstrap) + ├── gcc-native ← needs: cross-gcc bootstrap → native build + ├── llvm-native ← needs: cross-clang bootstrap → native build + └── rust-native ← needs: gcc-native or llvm-native to build + +Level 4: Full build environment + └── All Level 0-3 → can ./configure && make inside Red Bear OS +``` + +### Circular Dependencies + +**flex ↔ bison**: Both require each other to build. Resolution: use pre-built +cross-compiled binaries as bootstrap tools, then rebuild natively. + +**GCC ↔ relibc**: GCC needs relibc headers to build. relibc needs GCC to compile. +Resolution: Already solved by the multi-stage bootstrap in `mk/prefix.mk`: +1. Build gcc-freestanding (no libc) +2. Build relibc with gcc-freestanding +3. Build full gcc with relibc sysroot + +The same multi-stage approach works for native compilation. + +## Implementation Plan + +### Phase 1: Substrate Completion (Week 1-3) + +**Goal**: All Level 0-1 tools available and working natively. + +| Task | Effort | Dependencies | Notes | +|------|--------|-------------|-------| +| **Get `tar` working** | 2 days | none (cargo) | Promote `uutils-tar` from WIP → production. Uses `cargo` template. Should be straightforward — it's Rust, already has a recipe. | +| **Get `m4` working** | 1 day | none (configure) | Promote from WIP → production. Standard `./configure && make`. | +| **Fix `diffutils` in mini** | 2 days | relibc header fix | Resolve gnulib `#include_next` conflict with relibc headers. May require adjusting include order or adding a relibc wrapper header. | +| **Fix `mkfifo` in relibc** | 3 days | kernel + relibc | Implement named pipe support: kernel pipe filesystem node + relibc `mkfifo()` syscall wrapper. Unlocks `make -jN` parallel builds. | +| **Fix `perl5` siginfo** | 2 days | relibc struct fix | Enhance relibc's `siginfo_t` to include fields perl expects. Perl 5 already compiles — this fixes warnings/missing features. | + +**Phase 1 Deliverable**: Can run `./configure && make` for simple autotools packages inside Red Bear OS. + +### Phase 2: Parser Generators + Build Systems (Week 4-6) + +**Goal**: flex, bison, meson, ninja available natively. + +| Task | Effort | Dependencies | Notes | +|------|--------|-------------|-------| +| **Bootstrap `bison`** | 1 day | Phase 1 | Cross-compile bison on host, install as bootstrap. Then attempt native build. | +| **Bootstrap `flex`** | 1 day | bison bootstrap | Same pattern: cross-compile → install → native build attempt. | +| **Get `meson` working** | 1 day | python312 | Create standalone meson script (the TODO in the recipe). python312 already works. | +| **Get `ninja` working** | 1 day | cmake or python | ninja builds with cmake (which works) or configure.py (python). | +| **Validate `libtool`** | 1 day | Phase 1 | libtool builds but not tested. Run test suite, fix issues. | + +**Phase 2 Deliverable**: meson+ninja build system available. Autotools regeneration (autoreconf) works natively. + +### Phase 3: Native GCC Bootstrap (Week 7-12) + +**Goal**: GCC 13.2.0 runs natively on Red Bear OS, producing x86_64-unknown-redox binaries. + +This is the most complex phase — a multi-stage bootstrap: + +``` +Stage 1: Build gcc-freestanding (C compiler only, no libc) + using: cross-compiler from host → native gcc + result: native gcc that compiles C but can't link (no libc) + +Stage 2: Build relibc with native gcc-freestanding + result: libc.a, crt0.o, headers for the target + +Stage 3: Build full gcc (C + C++ + libgcc + libstdc++) + using: native gcc-freestanding + relibc sysroot + result: full native GCC toolchain + +Stage 4: Build binutils natively (optional) + using: native GCC + result: as, ld, ar, nm, strip, objdump native +``` + +| Task | Effort | Dependencies | Notes | +|------|--------|-------------|-------| +| **Create `gcc-native` recipe** | 3 days | Phase 1-2 | New recipe at `local/recipes/dev/gcc-native/`. Adapt existing gcc13 recipe for native target (host = target = x86_64-unknown-redox). | +| **Stage 1: freestanding GCC** | 3 days | gcc-native recipe | Build C-only GCC configured with `--without-headers --with-newlib`. Produces `xgcc` that compiles but can't link. | +| **Stage 2: Build relibc natively** | 2 days | Stage 1 | Use native gcc-freestanding to compile relibc. Similar to existing relibc-freestanding stage in prefix.mk but using native compiler. | +| **Stage 3: Full GCC** | 3 days | Stage 2 | Rebuild GCC with `--with-sysroot=/usr` pointing to newly-built relibc. Enables C++, libgcc, libstdc++. | +| **Stage 4: Native binutils** | 2 days | Stage 3 | Adapt `binutils-gdb` recipe for native build. | +| **Validation** | 3 days | Stage 3-4 | Build a known package (e.g., bash, sed) natively and verify the binary works. | + +**Phase 3 Deliverable**: `gcc` and `g++` commands work inside Red Bear OS. `./configure && make` produces working redox binaries. + +### Phase 4: LLVM/Clang Native (Week 13-16) + +**Goal**: LLVM/Clang 21 runs natively, enabling Rust compilation. + +| Task | Effort | Dependencies | Notes | +|------|--------|-------------|-------| +| **Create `llvm-native` recipe** | 2 days | Phase 3 | Adapt llvm21 recipe for native build. LLVM is cmake-based — once cmake works, LLVM is straightforward. | +| **Build clang native** | 2 days | llvm-native | Part of the same LLVM build tree. | +| **Build lld native** | 1 day | llvm-native | Linker — part of LLVM monorepo. | + +**Phase 4 Deliverable**: `clang` and `clang++` work natively. + +### Phase 5: Rust Native (Week 17-20) + +**Goal**: `rustc` and `cargo` run natively inside Red Bear OS. + +Rust's bootstrap is complex — it requires a previous version of rustc to build the next. +The approach: + +1. Use the host cross-compiler to produce a native `rustc` and `cargo` binary +2. Use those as bootstrap to build a full native Rust toolchain +3. Or: download prebuilt Rust binaries (if Rust provides redox-native builds) + +| Task | Effort | Dependencies | Notes | +|------|--------|-------------|-------| +| **Cross-compile rustc for redox** | 3 days | Phase 4 (llvm-native libs) | Use host rustc to cross-compile native rustc binary. Needs llvm-native libraries available as target deps. | +| **Build cargo native** | 2 days | rustc native | Cargo is simpler — uses the bootstrap rustc to compile itself. | +| **Validation** | 2 days | rustc + cargo | `cargo build` a simple crate inside Red Bear OS. | + +**Phase 5 Deliverable**: `cargo build` works inside Red Bear OS. Rust packages can be compiled natively. + +### Phase 6: cub Integration (Week 21-22) + +**Goal**: `cub -B ` works fully inside Red Bear OS. + +| Task | Effort | Dependencies | Notes | +|------|--------|-------------|-------| +| **Wire cook.rs to native tools** | 1 day | Phase 3+ | Update `cook.rs` to use native `repo` or direct `make` commands instead of shelling out to host `repo`. | +| **Validate cub build flow** | 2 days | Phase 3-5 | End-to-end: `cub -G ` (fetch AUR) → `cub -B ` (build natively) → install. | +| **Update cub docs** | 1 day | validation | Update CUB-PACKAGE-MANAGER.md with native build instructions. | + +**Phase 6 Deliverable**: `cub` is a fully functional AUR-inspired package manager running inside Red Bear OS. + +## Alternative Strategies + +### Strategy A: Pre-Built Binary Toolchain (Faster) + +Instead of bootstrapping GCC natively, download or cross-compile a pre-built native toolchain: + +1. Use host cross-compiler to build GCC, binutils, make, etc. as **native redox binaries** +2. Package them as pkgar archives +3. Install into the Red Bear OS image +4. Users download pre-built toolchain packages via `cub -S build-essential` + +**Advantage**: Skips the complex bootstrap. Weeks instead of months. +**Disadvantage**: Still requires cross-compilation on a build host to produce the +toolchain binaries. Not truly self-hosting. Updates require rebuild + repackage. + +### Strategy B: Cross-Compilation as a Service (Hybrid) + +1. `cub` running inside Red Bear OS detects a build request +2. Submits the build job to a build server (Linux host with cross-compiler) +3. Build server compiles, produces pkgar +4. `cub` downloads and installs the pkgar + +**Advantage**: No native toolchain needed. Works immediately. +**Disadvantage**: Requires network + build server infrastructure. Not offline-capable. + +### Strategy C: Phased Approach (Recommended) + +1. **Phase 1-2 first** (substrate + build systems) — 6 weeks +2. **Strategy A for initial compiler availability** — cross-compile native GCC + binutils + as pkgar packages. Skip the bootstrap. 2 weeks. +3. **Phase 5 for Rust** — once GCC native exists, bootstrap Rust. 4 weeks. +4. **Phase 6 for cub integration** — 2 weeks. +5. **Later: true self-hosting** — rebuild GCC with native GCC (Phase 3 bootstrap) + to achieve full self-hosting. Deferred. + +**Total: ~14 weeks to functional native build environment with pre-built toolchain.** +**Full self-hosting: +5 weeks for Phase 3 bootstrap.** + +## Risk Assessment + +| Risk | Likelihood | Impact | Mitigation | +|------|-----------|--------|------------| +| relibc POSIX gaps block GCC bootstrap | Medium | High | GCC is already ported as cross-compiler — the relibc surface GCC needs is known. Focus on `mkfifo` and any missing syscalls. | +| flex/bison circular dependency | High | Medium | Use cross-compiled bootstrap binaries. Standard practice in toolchain bootstrapping. | +| GCC native build is too large (memory/disk) | Medium | Medium | GCC is ~500MB source, ~2GB build. Red Bear OS images are 1.5-4GB. May need larger images or swap. | +| Make jobserver (`make -jN`) blocked by mkfifo | High | Low | Single-threaded `make` still works — just slower. Acceptable for initial porting. | +| Python312 module loading issues | Low | Medium | Dynamic loading of C modules works for main python312. May need fixes for specific modules meson uses. | +| LLVM native build too resource-intensive | Medium | High | LLVM is ~3GB source, ~20GB build. May need to build on host and install as pre-built pkgar. | + +## Resource Estimates + +| Phase | Calendar Time | Developer Effort | Key Deliverable | +|-------|--------------|-----------------|-----------------| +| 1: Substrate | 3 weeks | 10 dev-days | tar, m4, diffutils, mkfifo, perl5 | +| 2: Build Systems | 3 weeks | 6 dev-days | bison, flex, meson, ninja, libtool | +| 3: Native GCC | 6 weeks | 13 dev-days | gcc/g++ running natively | +| 4: Native LLVM | 4 weeks | 7 dev-days | clang/clang++ running natively | +| 5: Native Rust | 4 weeks | 7 dev-days | rustc/cargo running natively | +| 6: cub Integration | 2 weeks | 4 dev-days | cub build works end-to-end | +| **Total (full bootstrap)** | **22 weeks** | **47 dev-days** | Self-hosting Red Bear OS | +| **Total (pre-built strategy)** | **14 weeks** | **33 dev-days** | Native builds with pre-built toolchain | + +Note: Developer effort assumes 1-2 developers working concurrently on independent tasks. +Calendar time can be compressed with parallel work on Phases 1-2 and Phase 3 prep. + +## Recommendation + +**Start with Strategy C (Phased + Pre-Built Toolchain).** + +1. **Immediate (this week)**: Promote `tar` (`uutils-tar`) from WIP → production. + This unblocks the entire autotools chain. +2. **Month 1**: Complete Phase 1-2 (substrate + build systems). +3. **Month 2**: Cross-compile native GCC + binutils as pkgar packages (Strategy A). + Install into redbear-full image. Verify `./configure && make` works for a test + package. +4. **Month 3**: Cross-compile native Rust toolchain. Verify `cargo build`. +5. **Month 4**: Wire cub to use native tools. Ship in `redbear-full`. + +This gives a functional native build environment in ~4 months with ~1.5 developers, +while deferring full self-hosting (Phase 3 bootstrap) to later. + +## Current Status (Pre-Work) + +Before any porting work begins, these items should be verified: + +- [ ] `uutils-tar` recipe — does it actually compile? (marked TODO, not tested) +- [ ] `m4` recipe — what's the compilation error? (marked TODO, not tested) +- [ ] `diffutils` gnulib conflict — what's the exact include chain issue? +- [ ] `mkfifo` kernel support — does the kernel have pipe filesystem nodes? +- [ ] `gcc13` recipe — does it already have a `--host=` flag that could target redox? +- [ ] Image size — can redbear-full image accommodate GCC (~500MB installed)? +- [ ] Memory — can QEMU allocate 4GB+ RAM for GCC builds? + +## Related Documents + +- `local/docs/CUB-PACKAGE-MANAGER.md` — cub package manager documentation +- `local/docs/RELIBC-AGAINST-GLIBC-ASSESSMENT.md` — relibc POSIX gap analysis +- `local/docs/CONSOLE-TO-KDE-DESKTOP-PLAN.md` — canonical desktop path plan +- `mk/prefix.mk` — cross-compiler toolchain build orchestration +- `recipes/dev/gcc13/recipe.toml` — GCC 13.2.0 cross-compiler recipe +- `recipes/groups/dev-essential/recipe.toml` — development essential packages group diff --git a/local/docs/CUB-WORKFLOW-ASSESSMENT.md b/local/docs/CUB-WORKFLOW-ASSESSMENT.md new file mode 100644 index 000000000..288a42470 --- /dev/null +++ b/local/docs/CUB-WORKFLOW-ASSESSMENT.md @@ -0,0 +1,144 @@ +# Cub Workflow Integration Assessment + +**Status:** Assessment + Implementation complete (2026-05-07) +**Scope:** AUR search → PKGBUILD parse → recipe.toml generation → cook with build tools + +## End-to-End Flow Assessment + +``` +User: "cub -S ripgrep-all" + │ + ├─ 1. AUR Search ✅ Works. AurClient::search() via AUR RPC v5. + │ + ├─ 2. Fetch PKGBUILD ✅ Works. Git clone from aur.archlinux.org. + │ + ├─ 3. Parse PKGBUILD ⚠️ Partial. See Gap #1-3 below. + │ + ├─ 4. Convert to recipe.toml ⚠️ Partial. See Compatibility Gaps below. + │ + └─ 5. Cook recipe ⚠️ Partial. Depends on build tool availability. +``` + +## Critical Gaps: PKGBUILD → Recipe Conversion + +### Gap 1: Install Function Silently Lost (CRITICAL) + +PKGBUILD `package()` functions with `install -Dm755` commands are not converted. +The generated recipe.toml has no install instructions. Files are never staged. + +**Impact**: Any AUR package using `package()` produces a broken recipe that +builds but installs nothing. + +### Gap 2: Multiple Source Entries → Hard Error (CRITICAL) + +`cookbook.rs` line 63-67: if a PKGBUILD has >1 source, `generate_recipe()` +returns a hard error. Many AUR packages use multiple source tarballs. + +**Impact**: `cub build` fails immediately with "Cookbook recipe generation +currently supports a single primary source." + +### Gap 3: SHA-256 Passed as BLAKE3 (HIGH) + +PKGBUILD uses SHA-256 checksums. Cookbook expects BLAKE3. The SHA-256 hex +string is copied verbatim into the `blake3` field. + +**Impact**: Cookbook hash verification will fail on packages with checksums. + +### Gap 4: Dependency Coverage ~15-20% (MEDIUM) + +`deps.rs` maps 44 Arch→Redox dependencies. The AUR ecosystem has thousands. +Unmapped deps pass through unchanged (`libxml2` → `libxml2`), which may or +may not resolve at cook time. + +### Gap 5: Split Packages Not Generated (HIGH) + +AUR packages with `pkgname=('foo' 'foo-docs' 'foo-libs')` and multiple +`package_*()` functions are detected but only the primary package is converted. +`[[optional-packages]]` is never generated. + +### Gap 6: Linuxism Detection Incomplete (MEDIUM) + +Only `systemctl`, `/usr/lib/systemd`, `systemd`, `/proc` are detected. +Missing: `dbus-daemon`, `udev`, `/sys/`, Python `systemd` imports. + +## Recipe.toml Compatibility Gaps + +| # | Gap | Severity | Impact | +|---|---|---|---| +| C1 | `dev-dependencies` missing `host:` prefix | CRITICAL | Cross-compilation filtering broken | +| C2 | `[[optional-packages]]` not generated | HIGH | Split packages impossible | +| C3 | `shallow_clone` field missing | MEDIUM | Large git repos clone slowly | +| C4 | `upstream` field missing | LOW | Fork tracking lost | +| C5 | `installs` field not populated | MEDIUM | Install manifest empty | +| C6 | `cargopath`/`cargopackages`/`cargoexamples` missing | MEDIUM | Cargo workspace builds broken | +| C7 | `script` field missing from `[source]` | LOW | Source prep scripts lost | +| C8 | `SameAs`/`Path` source variants not supported | LOW | Recipe reuse impossible | + +## Build Tool Availability for Cooking + +### ✅ Available (all templates covered) + +| Template | Tools Needed | Status | +|----------|-------------|--------| +| `cargo` | rustc + cargo | ✅ rust-native | +| `cmake` | cmake + ninja + gcc | ✅ all present | +| `meson` | meson + ninja + gcc | ✅ all present | +| `configure` | autoconf, automake, libtool, m4, gcc, make | ✅ all present | +| `custom` | whatever script declares | ✅ depends on recipe | + +### ❌ Missing / Broken + +| Tool | Status | Blocks | +|------|--------|--------| +| **texinfo** | BROKEN (compilation error) | Autotools packages with `makeinfo` | +| **intltool** | WIP (compiled, not tested) | GNOME i18n packages | +| **gobject-introspection** | WIP (not tested) | GTK/GNOME introspection packages | +| **gtk-doc** | WIP (not tested) | Development docs only (low priority) | + +### Dependency Mapping Coverage + +| Category | Count | Examples | +|----------|-------|----------| +| Explicitly mapped | 44 | glibc→relibc, openssl→openssl3, etc. | +| Dropped (unavailable) | 5 | systemd, xorg-server, linux-api-headers | +| Pass-through (unknown) | Thousands | libxml2, libpcre, etc. | + +## Build Flow Integration + +### What Works End-to-End + +1. Simple Rust packages (cargo template, single source) +2. Simple C packages (configure template, single source) +3. CMake packages (single source) +4. Meson packages (single source) + +### What Breaks + +1. **Any package with install function** → recipe missing install logic +2. **Multi-source packages** → hard error at generation +3. **Split packages** → only primary package built +4. **Packages with checksums** → BLAKE3 verification mismatch +5. **Packages needing texinfo** → build tool unavailable +6. **Cross-compilation deps** → host: prefix not added + +## Recommendations + +### Immediate (unblock basic AUR packages) + +1. Fix install function conversion — route `package()` content to `BuildKind::Custom.script` +2. Remove multi-source hard error — support multiple source entries or warn gracefully +3. Add `host:` prefix to dev-dependencies when building for target + +### Short-term (unblock common packages) + +4. Fix texinfo compilation error — unblocks many autotools packages +5. Implement `[[optional-packages]]` generation for split packages +6. Fix SHA-256 → BLAKE3 mapping — use correct hash or document the gap +7. Add `cargopath`/`cargopackages` fields to cargo template generation + +### Medium-term (broader coverage) + +8. Expand dependency mapping table to cover common AUR libraries +9. Improve linuxism detection (D-Bus, udev, sysfs patterns) +10. Add `shallow_clone`, `upstream`, `installs` fields +11. Validate intltool and gobject-introspection recipes diff --git a/local/patches/kernel/P1-mkfifo-fifo-support.patch b/local/patches/kernel/P1-mkfifo-fifo-support.patch new file mode 100644 index 000000000..3601d724e --- /dev/null +++ b/local/patches/kernel/P1-mkfifo-fifo-support.patch @@ -0,0 +1,1085 @@ +--- a/src/scheme/pipe.rs ++++ b/src/scheme/pipe.rs +@@ -1,5 +1,10 @@ +-use alloc::{collections::VecDeque, sync::Arc, vec::Vec}; +-use core::sync::atomic::{AtomicBool, AtomicUsize, Ordering}; ++use alloc::{ ++ collections::VecDeque, ++ string::{String, ToString}, ++ sync::Arc, ++ vec::Vec, ++}; ++use core::sync::atomic::{AtomicUsize, Ordering}; + + use syscall::{data::GlobalSchemes, CallFlags}; + +@@ -14,100 +19,262 @@ + sync::{CleanLockToken, Mutex, RwLock, WaitCondition, L1}, + syscall::{ + data::Stat, +- error::{Error, Result, EAGAIN, EBADF, EINTR, EINVAL, ENOENT, EPIPE}, +- flag::{EventFlags, EVENT_READ, EVENT_WRITE, MODE_FIFO, O_NONBLOCK}, ++ error::{ ++ Error, Result, EAGAIN, EBADF, EEXIST, EINVAL, EINTR, ENOENT, ENOTDIR, EPIPE, ++ }, ++ flag::{ ++ EventFlags, EVENT_READ, EVENT_WRITE, MODE_FIFO, O_ACCMODE, O_DIRECTORY, ++ O_NONBLOCK, O_RDONLY, O_RDWR, O_STAT, O_WRONLY, ++ }, + usercopy::{UserSliceRo, UserSliceRw, UserSliceWo}, + }, + }; + + use super::{CallerCtx, KernelScheme, OpenResult, SchemeExt, StrOrBytes}; + +-// TODO: Preallocate a number of scheme IDs, since there can only be *one* root namespace, and +-// therefore only *one* pipe scheme. +-static PIPE_NEXT_ID: AtomicUsize = AtomicUsize::new(0); +- ++static PIPE_NEXT_ID: AtomicUsize = AtomicUsize::new(1); ++ ++#[derive(Clone)] + enum Handle { +- Pipe(Arc), ++ Endpoint(EndpointHandle), + SchemeRoot, + } + +-// TODO: SLOB? +-static PIPES: RwLock> = ++#[derive(Clone, Copy, Eq, PartialEq)] ++enum EndpointKind { ++ Read, ++ Write, ++ ReadWrite, ++} ++ ++impl EndpointKind { ++ fn can_read(self) -> bool { ++ matches!(self, Self::Read | Self::ReadWrite) ++ } ++ ++ fn can_write(self) -> bool { ++ matches!(self, Self::Write | Self::ReadWrite) ++ } ++} ++ ++#[derive(Clone)] ++struct EndpointHandle { ++ pipe: Arc, ++ kind: EndpointKind, ++ named: Option>, ++} ++ ++struct NamedPipe { ++ path: String, ++ mode: u16, ++ active: Mutex>>, ++} ++ ++static HANDLES: RwLock> = + RwLock::new(HashMap::with_hasher(DefaultHashBuilder::new())); ++static NAMED_PIPES: RwLock>> = ++ RwLock::new(HashMap::with_hasher(DefaultHashBuilder::new())); + + const MAX_QUEUE_SIZE: usize = 65536; + +-// In almost all places where Rust (and LLVM) uses pointers, they are limited to nonnegative isize, +-// so this is fine. +-const WRITE_NOT_READ_BIT: usize = 1; +- +-fn from_raw_id(id: usize) -> (bool, usize) { +- (id & WRITE_NOT_READ_BIT != 0, id & !WRITE_NOT_READ_BIT) +-} +- +-pub fn pipe(token: &mut CleanLockToken) -> Result<(usize, usize)> { +- // Bit 0 is used for WRITE_NOT_READ_BIT +- let id = PIPE_NEXT_ID.fetch_add(2, Ordering::Relaxed); +- +- PIPES.write(token.token()).insert( +- id, +- Handle::Pipe(Arc::new(Pipe { +- queue: Mutex::new(VecDeque::new()), +- read_condition: WaitCondition::new(), +- write_condition: WaitCondition::new(), +- writer_is_alive: AtomicBool::new(true), +- reader_is_alive: AtomicBool::new(true), +- has_run_dup: AtomicBool::new(false), +- fd_queue: Mutex::new(VecDeque::new()), +- })), +- ); +- +- Ok((id, id | WRITE_NOT_READ_BIT)) +-} +- +-pub struct PipeScheme; +- +-impl PipeScheme { +- fn get_pipe(key: usize, token: &mut CleanLockToken) -> Result> { +- PIPES +- .read(token.token()) +- .get(&key) +- .and_then(|handle| match handle { +- Handle::Pipe(pipe) => Some(Arc::clone(pipe)), ++fn next_id() -> usize { ++ PIPE_NEXT_ID.fetch_add(1, Ordering::Relaxed) ++} ++ ++fn endpoint_kind_from_flags(flags: usize) -> Result { ++ match flags & O_ACCMODE { ++ O_RDONLY => Ok(EndpointKind::Read), ++ O_WRONLY => Ok(EndpointKind::Write), ++ O_RDWR => Ok(EndpointKind::ReadWrite), ++ _ => Err(Error::new(EINVAL)), ++ } ++} ++ ++fn validate_named_fifo_open(flags: usize) -> Result<()> { ++ if flags & O_DIRECTORY == O_DIRECTORY && flags & O_STAT != O_STAT { ++ return Err(Error::new(ENOTDIR)); ++ } ++ ++ let _ = endpoint_kind_from_flags(flags)?; ++ Ok(()) ++} ++ ++fn trigger_matching( ++ pipe: &Arc, ++ require_read: bool, ++ require_write: bool, ++ flags: EventFlags, ++ token: &mut CleanLockToken, ++) { ++ let ids = { ++ let handles = HANDLES.read(token.token()); ++ handles ++ .iter() ++ .filter_map(|(id, handle)| match handle { ++ Handle::Endpoint(endpoint) ++ if Arc::ptr_eq(&endpoint.pipe, pipe) ++ && (!require_read || endpoint.kind.can_read()) ++ && (!require_write || endpoint.kind.can_write()) => ++ { ++ Some(*id) ++ } + _ => None, + }) ++ .collect::>() ++ }; ++ ++ for id in ids { ++ event::trigger(GlobalSchemes::Pipe.scheme_id(), id, flags, token); ++ } ++} ++ ++fn open_endpoint( ++ pipe: Arc, ++ kind: EndpointKind, ++ named: Option>, ++ token: &mut CleanLockToken, ++) -> usize { ++ if kind.can_read() { ++ pipe.reader_count.fetch_add(1, Ordering::SeqCst); ++ } ++ if kind.can_write() { ++ pipe.writer_count.fetch_add(1, Ordering::SeqCst); ++ } ++ ++ let id = next_id(); ++ HANDLES.write(token.token()).insert( ++ id, ++ Handle::Endpoint(EndpointHandle { pipe, kind, named }), ++ ); ++ id ++} ++ ++fn drop_wait_conditions_if_possible(pipe: Arc, token: &mut CleanLockToken) { ++ if let Some(pipe) = Arc::into_inner(pipe) { ++ { ++ pipe.read_condition.into_drop(token); ++ } ++ { ++ pipe.write_condition.into_drop(token); ++ } ++ } ++} ++ ++pub fn pipe(token: &mut CleanLockToken) -> Result<(usize, usize)> { ++ let pipe = Arc::new(Pipe::new()); ++ let read_id = open_endpoint(Arc::clone(&pipe), EndpointKind::Read, None, token); ++ let write_id = open_endpoint(pipe, EndpointKind::Write, None, token); ++ ++ Ok((read_id, write_id)) ++} ++ ++pub fn named_pipe_exists(path: &str, token: &mut CleanLockToken) -> bool { ++ NAMED_PIPES.read(token.token()).contains_key(path) ++} ++ ++pub fn create_named_pipe( ++ path: &str, ++ display_path: &str, ++ mode: u16, ++ flags: usize, ++ token: &mut CleanLockToken, ++) -> Result { ++ validate_named_fifo_open(flags)?; ++ ++ let named = { ++ let mut named_pipes = NAMED_PIPES.write(token.token()); ++ if named_pipes.contains_key(path) { ++ return Err(Error::new(EEXIST)); ++ } ++ ++ let named = Arc::new(NamedPipe { ++ path: display_path.to_string(), ++ mode, ++ active: Mutex::new(None), ++ }); ++ named_pipes.insert(path.to_string(), Arc::clone(&named)); ++ named ++ }; ++ ++ let kind = endpoint_kind_from_flags(flags)?; ++ let pipe = Arc::new(Pipe::new()); ++ *named.active.lock(token.token()) = Some(Arc::clone(&pipe)); ++ ++ Ok(open_endpoint(pipe, kind, Some(named), token)) ++} ++ ++pub fn open_named_pipe(path: &str, flags: usize, token: &mut CleanLockToken) -> Result> { ++ validate_named_fifo_open(flags)?; ++ ++ let named = match NAMED_PIPES.read(token.token()).get(path) { ++ Some(named) => Arc::clone(named), ++ None => return Ok(None), ++ }; ++ ++ let kind = endpoint_kind_from_flags(flags)?; ++ let pipe = { ++ let mut active = named.active.lock(token.token()); ++ match active.as_ref() { ++ Some(pipe) => Arc::clone(pipe), ++ None => { ++ let pipe = Arc::new(Pipe::new()); ++ *active = Some(Arc::clone(&pipe)); ++ pipe ++ } ++ } ++ }; ++ ++ Ok(Some(open_endpoint(pipe, kind, Some(named), token))) ++} ++ ++pub fn unlink_named_pipe(path: &str, token: &mut CleanLockToken) -> bool { ++ NAMED_PIPES.write(token.token()).remove(path).is_some() ++} ++ ++pub struct PipeScheme; ++ ++impl PipeScheme { ++ fn get_endpoint(id: usize, token: &mut CleanLockToken) -> Result { ++ HANDLES ++ .read(token.token()) ++ .get(&id) ++ .and_then(|handle| match handle { ++ Handle::Endpoint(endpoint) => Some(endpoint.clone()), ++ Handle::SchemeRoot => None, ++ }) + .ok_or(Error::new(EBADF)) + } + } + + impl KernelScheme for PipeScheme { + fn scheme_root(&self, token: &mut CleanLockToken) -> Result { +- let id = PIPE_NEXT_ID.fetch_add(2, Ordering::Relaxed); +- PIPES.write(token.token()).insert(id, Handle::SchemeRoot); ++ let id = next_id(); ++ HANDLES.write(token.token()).insert(id, Handle::SchemeRoot); + Ok(id) + } ++ + fn fevent( + &self, + id: usize, + flags: EventFlags, + token: &mut CleanLockToken, + ) -> Result { +- let (is_writer_not_reader, key) = from_raw_id(id); +- let pipe = Self::get_pipe(key, token)?; ++ let endpoint = Self::get_endpoint(id, token)?; + + let mut ready = EventFlags::empty(); + +- if is_writer_not_reader ++ if endpoint.kind.can_write() + && flags.contains(EVENT_WRITE) +- && (pipe.queue.lock(token.token()).len() <= MAX_QUEUE_SIZE +- || !pipe.reader_is_alive.load(Ordering::Acquire)) ++ && (endpoint.pipe.queue.lock(token.token()).len() <= MAX_QUEUE_SIZE ++ || endpoint.pipe.reader_count.load(Ordering::Acquire) == 0) + { + ready |= EventFlags::EVENT_WRITE; + } +- if !is_writer_not_reader ++ ++ if endpoint.kind.can_read() + && flags.contains(EVENT_READ) +- && (!pipe.queue.lock(token.token()).is_empty() +- || !pipe.writer_is_alive.load(Ordering::Acquire)) ++ && (!endpoint.pipe.queue.lock(token.token()).is_empty() ++ || endpoint.pipe.writer_count.load(Ordering::Acquire) == 0) + { + ready |= EventFlags::EVENT_READ; + } +@@ -116,46 +283,48 @@ + } + + fn close(&self, id: usize, token: &mut CleanLockToken) -> Result<()> { +- let (is_write_not_read, key) = from_raw_id(id); +- +- let pipe = Self::get_pipe(key, token)?; +- let scheme_id = GlobalSchemes::Pipe.scheme_id(); +- +- let can_remove = if is_write_not_read { +- pipe.writer_is_alive.store(false, Ordering::SeqCst); +- event::trigger(scheme_id, key, EVENT_READ, token); +- pipe.read_condition.notify(token); +- +- !pipe.reader_is_alive.load(Ordering::SeqCst) +- } else { +- pipe.reader_is_alive.store(false, Ordering::SeqCst); +- event::trigger(scheme_id, key | WRITE_NOT_READ_BIT, EVENT_WRITE, token); +- pipe.write_condition.notify(token); +- +- !pipe.writer_is_alive.load(Ordering::SeqCst) ++ let handle = HANDLES ++ .write(token.token()) ++ .remove(&id) ++ .ok_or(Error::new(EBADF))?; ++ ++ let Handle::Endpoint(endpoint) = handle else { ++ return Ok(()); + }; + +- if can_remove { +- let handle = PIPES.write(token.token()).remove(&key); +- if let Some(Handle::Pipe(pipe)) = handle +- && let Some(pipe) = Arc::into_inner(pipe) +- { ++ let mut last_reader = false; ++ let mut last_writer = false; ++ ++ if endpoint.kind.can_read() { ++ last_reader = endpoint.pipe.reader_count.fetch_sub(1, Ordering::SeqCst) == 1; ++ } ++ if endpoint.kind.can_write() { ++ last_writer = endpoint.pipe.writer_count.fetch_sub(1, Ordering::SeqCst) == 1; ++ } ++ ++ if last_writer { ++ trigger_matching(&endpoint.pipe, true, false, EVENT_READ, token); ++ endpoint.pipe.read_condition.notify(token); ++ } ++ if last_reader { ++ trigger_matching(&endpoint.pipe, false, true, EVENT_WRITE, token); ++ endpoint.pipe.write_condition.notify(token); ++ } ++ ++ let no_readers = endpoint.pipe.reader_count.load(Ordering::SeqCst) == 0; ++ let no_writers = endpoint.pipe.writer_count.load(Ordering::SeqCst) == 0; ++ if no_readers && no_writers { ++ if let Some(named) = endpoint.named { ++ let mut active = named.active.lock(token.token()); ++ if active ++ .as_ref() ++ .is_some_and(|active_pipe| Arc::ptr_eq(active_pipe, &endpoint.pipe)) + { +- pipe.read_condition.into_drop(token); ++ *active = None; + } +- { +- pipe.write_condition.into_drop(token); +- } +- } +- } +- +- if let Some(pipe) = Arc::into_inner(pipe) { +- { +- pipe.read_condition.into_drop(token); +- } +- { +- pipe.write_condition.into_drop(token); +- } ++ } ++ ++ drop_wait_conditions_if_possible(endpoint.pipe, token); + } + + Ok(()) +@@ -168,9 +337,9 @@ + _ctx: CallerCtx, + token: &mut CleanLockToken, + ) -> Result { +- let (is_writer_not_reader, key) = from_raw_id(old_id); +- +- if is_writer_not_reader { ++ let endpoint = Self::get_endpoint(old_id, token)?; ++ ++ if !endpoint.kind.can_read() { + return Err(Error::new(EBADF)); + } + +@@ -180,17 +349,17 @@ + return Err(Error::new(EINVAL)); + } + +- let pipe = Self::get_pipe(key, token)?; +- +- if pipe.has_run_dup.swap(true, Ordering::SeqCst) { +- return Err(Error::new(EBADF)); +- } +- + Ok(OpenResult::SchemeLocal( +- key | WRITE_NOT_READ_BIT, ++ open_endpoint( ++ Arc::clone(&endpoint.pipe), ++ EndpointKind::Write, ++ endpoint.named, ++ token, ++ ), + InternalFlags::empty(), + )) + } ++ + fn kopenat( + &self, + id: usize, +@@ -200,40 +369,47 @@ + _ctx: CallerCtx, + token: &mut CleanLockToken, + ) -> Result { +- let (_, key) = from_raw_id(id); +- +- { +- let guard = PIPES.read(token.token()); +- if let Some(Handle::SchemeRoot) = guard.get(&key) { +- } else if let Some(Handle::Pipe(pipe_arc)) = guard.get(&key) { +- let pipe = Arc::clone(pipe_arc); +- drop(guard); +- +- if user_buf.as_bytes() == b"write" { +- return Err(Error::new(EINVAL)); ++ let is_scheme_root = { ++ let handles = HANDLES.read(token.token()); ++ match handles.get(&id) { ++ Some(Handle::SchemeRoot) => true, ++ Some(Handle::Endpoint(_)) => false, ++ None => return Err(Error::new(EBADF)), ++ } ++ }; ++ ++ if is_scheme_root { ++ let path = user_buf.as_str().or(Err(Error::new(EINVAL)))?; ++ if !path.trim_start_matches('/').is_empty() { ++ return Err(Error::new(ENOENT)); + } + +- if pipe.has_run_dup.swap(true, Ordering::SeqCst) { +- return Err(Error::new(EBADF)); +- } +- ++ let pipe = Arc::new(Pipe::new()); + return Ok(OpenResult::SchemeLocal( +- key | WRITE_NOT_READ_BIT, ++ open_endpoint(pipe, EndpointKind::Read, None, token), + InternalFlags::empty(), + )); +- } else { +- return Err(Error::new(EBADF)); +- } +- } +- +- let path = user_buf.as_str().or(Err(Error::new(EINVAL)))?; +- if !path.trim_start_matches('/').is_empty() { +- return Err(Error::new(ENOENT)); +- } +- +- let (read_id, _) = pipe(token)?; +- +- Ok(OpenResult::SchemeLocal(read_id, InternalFlags::empty())) ++ } ++ ++ let endpoint = Self::get_endpoint(id, token)?; ++ if !endpoint.kind.can_read() { ++ return Err(Error::new(EBADF)); ++ } ++ ++ let path = user_buf.as_bytes(); ++ if !path.is_empty() && path != b"write" { ++ return Err(Error::new(EINVAL)); ++ } ++ ++ Ok(OpenResult::SchemeLocal( ++ open_endpoint( ++ Arc::clone(&endpoint.pipe), ++ EndpointKind::Write, ++ endpoint.named, ++ token, ++ ), ++ InternalFlags::empty(), ++ )) + } + + fn kread( +@@ -244,16 +420,15 @@ + _stored_flags: u32, + token: &mut CleanLockToken, + ) -> Result { +- let (is_write_not_read, key) = from_raw_id(id); +- +- if is_write_not_read { ++ let endpoint = Self::get_endpoint(id, token)?; ++ ++ if !endpoint.kind.can_read() { + return Err(Error::new(EBADF)); + } +- let pipe = Self::get_pipe(key, token)?; + + loop { +- let vec = pipe.queue.lock(token.token()); +- let (mut vec, mut token) = vec.into_split(); ++ let vec = endpoint.pipe.queue.lock(token.token()); ++ let (mut vec, mut lock_token) = vec.into_split(); + + let (s1, s2) = vec.as_slices(); + let s1_count = core::cmp::min(user_buf.len(), s1.len()); +@@ -273,28 +448,34 @@ + let _ = vec.drain(..bytes_read); + + if bytes_read > 0 { +- event::trigger_locked( +- GlobalSchemes::Pipe.scheme_id(), +- key | WRITE_NOT_READ_BIT, +- EVENT_WRITE, +- token.token(), +- ); +- pipe.write_condition.notify_locked(token.token()); ++ drop(vec); ++ drop(lock_token); ++ trigger_matching(&endpoint.pipe, false, true, EVENT_WRITE, token); ++ endpoint.pipe.write_condition.notify(token); + + return Ok(bytes_read); +- } else if user_buf.is_empty() { ++ } ++ ++ if user_buf.is_empty() { + return Ok(0); + } + +- if !pipe.writer_is_alive.load(Ordering::SeqCst) { ++ if endpoint.pipe.writer_count.load(Ordering::SeqCst) == 0 { + return Ok(0); +- } else if fcntl_flags & O_NONBLOCK as u32 != 0 { ++ } ++ if fcntl_flags & O_NONBLOCK as u32 != 0 { + return Err(Error::new(EAGAIN)); +- } else if !pipe.read_condition.wait(vec, "PipeRead::read", &mut token) { ++ } ++ if !endpoint ++ .pipe ++ .read_condition ++ .wait(vec, "PipeRead::read", &mut lock_token) ++ { + return Err(Error::new(EINTR)); + } + } + } ++ + fn kwrite( + &self, + id: usize, +@@ -303,18 +484,17 @@ + _stored_flags: u32, + token: &mut CleanLockToken, + ) -> Result { +- let (is_write_not_read, key) = from_raw_id(id); +- +- if !is_write_not_read { ++ let endpoint = Self::get_endpoint(id, token)?; ++ ++ if !endpoint.kind.can_write() { + return Err(Error::new(EBADF)); + } +- let pipe = Self::get_pipe(key, token)?; + + loop { +- let vec = pipe.queue.lock(token.token()); +- let (mut vec, mut token) = vec.into_split(); +- +- if !pipe.reader_is_alive.load(Ordering::Relaxed) { ++ let vec = endpoint.pipe.queue.lock(token.token()); ++ let (mut vec, mut lock_token) = vec.into_split(); ++ ++ if endpoint.pipe.reader_count.load(Ordering::Relaxed) == 0 { + return Err(Error::new(EPIPE)); + } + +@@ -329,7 +509,6 @@ + + let mut bytes_written = 0; + +- // TODO: Modify VecDeque so that the unwritten portions can be accessed directly? + for (idx, chunk) in src_buf.in_variable_chunks(TMPBUF_SIZE).enumerate() { + let chunk_byte_count = match chunk.copy_common_bytes_to_slice(&mut tmp_buf) { + Ok(c) => c, +@@ -341,41 +520,52 @@ + } + + if bytes_written > 0 { +- event::trigger_locked( +- GlobalSchemes::Pipe.scheme_id(), +- key, +- EVENT_READ, +- token.token(), +- ); +- pipe.read_condition.notify_locked(token.token()); ++ drop(vec); ++ drop(lock_token); ++ trigger_matching(&endpoint.pipe, true, false, EVENT_READ, token); ++ endpoint.pipe.read_condition.notify(token); + + return Ok(bytes_written); +- } else if user_buf.is_empty() { ++ } ++ ++ if user_buf.is_empty() { + return Ok(0); + } + + if fcntl_flags & O_NONBLOCK as u32 != 0 { + return Err(Error::new(EAGAIN)); +- } else if !pipe ++ } ++ if !endpoint ++ .pipe + .write_condition +- .wait(vec, "PipeWrite::write", &mut token) ++ .wait(vec, "PipeWrite::write", &mut lock_token) + { + return Err(Error::new(EINTR)); + } + } + } +- fn kfpath(&self, _id: usize, buf: UserSliceWo, _token: &mut CleanLockToken) -> Result { +- //TODO: construct useful path? +- buf.copy_common_bytes_from_slice("/scheme/pipe/".as_bytes()) +- } +- fn kfstat(&self, _id: usize, buf: UserSliceWo, _token: &mut CleanLockToken) -> Result<()> { ++ ++ fn kfpath(&self, id: usize, buf: UserSliceWo, token: &mut CleanLockToken) -> Result { ++ let endpoint = Self::get_endpoint(id, token)?; ++ if let Some(named) = endpoint.named { ++ buf.copy_common_bytes_from_slice(named.path.as_bytes()) ++ } else { ++ buf.copy_common_bytes_from_slice("/scheme/pipe/".as_bytes()) ++ } ++ } ++ ++ fn kfstat(&self, id: usize, buf: UserSliceWo, token: &mut CleanLockToken) -> Result<()> { ++ let endpoint = Self::get_endpoint(id, token)?; ++ let mode = endpoint.named.map_or(0o666, |named| named.mode); ++ + buf.copy_exactly(&Stat { +- st_mode: MODE_FIFO | 0o666, ++ st_mode: MODE_FIFO | mode, + ..Default::default() + })?; + + Ok(()) + } ++ + fn kfdwrite( + &self, + id: usize, +@@ -385,23 +575,17 @@ + _metadata: &[u64], + token: &mut CleanLockToken, + ) -> Result { +- let (is_write_not_read, key) = from_raw_id(id); +- +- if !is_write_not_read { ++ let endpoint = Self::get_endpoint(id, token)?; ++ ++ if !endpoint.kind.can_write() { + return Err(Error::new(EBADF)); + } +- let pipe = match Self::get_pipe(key, token) { +- Ok(p) => p, +- Err(e) => { +- return Err(e); +- } +- }; + + loop { +- let vec = pipe.fd_queue.lock(token.token()); +- let (mut vec, mut token) = vec.into_split(); +- +- if !pipe.reader_is_alive.load(Ordering::Relaxed) { ++ let vec = endpoint.pipe.fd_queue.lock(token.token()); ++ let (mut vec, mut lock_token) = vec.into_split(); ++ ++ if endpoint.pipe.reader_count.load(Ordering::Relaxed) == 0 { + return Err(Error::new(EPIPE)); + } + if descs.is_empty() { +@@ -421,25 +605,24 @@ + let fds_written = vec.len() - before_len; + + if fds_written > 0 { +- event::trigger_locked( +- GlobalSchemes::Pipe.scheme_id(), +- key, +- EVENT_READ, +- token.token(), +- ); +- pipe.read_condition.notify_locked(token.token()); ++ drop(vec); ++ drop(lock_token); ++ trigger_matching(&endpoint.pipe, true, false, EVENT_READ, token); ++ endpoint.pipe.read_condition.notify(token); + + return Ok(fds_written); + } + +- if !pipe ++ if !endpoint ++ .pipe + .write_condition +- .wait(vec, "PipeWrite::write", &mut token) ++ .wait(vec, "PipeWrite::write", &mut lock_token) + { + return Err(Error::new(EINTR)); + } + } + } ++ + fn kfdread( + &self, + id: usize, +@@ -448,25 +631,19 @@ + _metadata: &[u64], + token: &mut CleanLockToken, + ) -> Result { +- let (is_write_not_read, key) = from_raw_id(id); +- +- if is_write_not_read { ++ let endpoint = Self::get_endpoint(id, token)?; ++ ++ if !endpoint.kind.can_read() { + return Err(Error::new(EBADF)); + } +- let pipe = match Self::get_pipe(key, token) { +- Ok(p) => p, +- Err(e) => { +- return Err(e); +- } +- }; + + if payload.is_empty() { + return Ok(0); + } + + loop { +- let vec = pipe.fd_queue.lock(token.token()); +- let (mut vec, mut token) = vec.into_split(); ++ let vec = endpoint.pipe.fd_queue.lock(token.token()); ++ let (mut vec, mut lock_token) = vec.into_split(); + + let fds_available = vec.len(); + let max_fds_read = payload.len() / size_of::(); +@@ -479,31 +656,33 @@ + fds_to_transfer, + payload, + flags.contains(CallFlags::FD_CLOEXEC), +- &mut token, ++ &mut lock_token, + )?; + } else { + bulk_add_fds( + fds_to_transfer, + payload, + flags.contains(CallFlags::FD_CLOEXEC), +- &mut token, ++ &mut lock_token, + )?; + } + +- event::trigger_locked( +- GlobalSchemes::Pipe.scheme_id(), +- key | WRITE_NOT_READ_BIT, +- EVENT_WRITE, +- token.token(), +- ); +- pipe.write_condition.notify_locked(token.token()); ++ drop(vec); ++ drop(lock_token); ++ trigger_matching(&endpoint.pipe, false, true, EVENT_WRITE, token); ++ endpoint.pipe.write_condition.notify(token); + + return Ok(fds_to_read); + } + +- if !pipe.writer_is_alive.load(Ordering::SeqCst) { ++ if endpoint.pipe.writer_count.load(Ordering::SeqCst) == 0 { + return Ok(0); +- } else if !pipe.read_condition.wait(vec, "PipeRead::read", &mut token) { ++ } ++ if !endpoint ++ .pipe ++ .read_condition ++ .wait(vec, "PipeRead::read", &mut lock_token) ++ { + return Err(Error::new(EINTR)); + } + } +@@ -511,11 +690,23 @@ + } + + pub struct Pipe { +- read_condition: WaitCondition, // signals whether there are available bytes to read +- write_condition: WaitCondition, // signals whether there is room for additional bytes ++ read_condition: WaitCondition, ++ write_condition: WaitCondition, + queue: Mutex>, +- reader_is_alive: AtomicBool, // starts set, unset when reader closes +- writer_is_alive: AtomicBool, // starts set, unset when writer closes +- has_run_dup: AtomicBool, ++ reader_count: AtomicUsize, ++ writer_count: AtomicUsize, + fd_queue: Mutex>>, + } ++ ++impl Pipe { ++ fn new() -> Self { ++ Self { ++ read_condition: WaitCondition::new(), ++ write_condition: WaitCondition::new(), ++ queue: Mutex::new(VecDeque::new()), ++ reader_count: AtomicUsize::new(0), ++ writer_count: AtomicUsize::new(0), ++ fd_queue: Mutex::new(VecDeque::new()), ++ } ++ } ++} +--- a/src/syscall/fs.rs ++++ b/src/syscall/fs.rs +@@ -2,7 +2,7 @@ + + use core::num::NonZeroUsize; + +-use alloc::{string::String, sync::Arc, vec::Vec}; ++use alloc::{format, string::{String, ToString}, sync::Arc, vec::Vec}; + use redox_path::RedoxPath; + + use crate::{ +@@ -12,9 +12,9 @@ + memory::{AddrSpace, GenericFlusher, Grant, PageSpan, TlbShootdownActions}, + }, + memory::{Page, VirtualAddress, PAGE_SIZE}, +- scheme::{self, FileHandle, KernelScheme, OpenResult, StrOrBytes}, ++ scheme::{self, pipe, FileHandle, KernelScheme, OpenResult, SchemeExt, StrOrBytes}, + sync::{CleanLockToken, RwLock}, +- syscall::{data::Stat, error::*, flag::*}, ++ syscall::{data::{GlobalSchemes, Stat}, error::*, flag::*}, + }; + + use super::usercopy::{UserSlice, UserSliceRo, UserSliceRw, UserSliceWo}; +@@ -62,55 +62,29 @@ + // TODO: Define elsewhere + const PATH_MAX: usize = PAGE_SIZE; + +-pub fn openat( +- fh: FileHandle, +- raw_path: UserSliceRo, ++fn fifo_path_key(scheme_id: scheme::SchemeId, number: usize, path: &str) -> String { ++ if path.starts_with('/') { ++ path.to_string() ++ } else { ++ format!("@fifo:{}:{}:{}", scheme_id.get(), number, path) ++ } ++} ++ ++fn install_open_result( ++ scheme_id: scheme::SchemeId, + flags: usize, +- fcntl_flags: u32, +- euid: u32, +- egid: u32, ++ open_result: OpenResult, + token: &mut CleanLockToken, + ) -> Result { +- let path_buf = copy_path_to_buf(raw_path, PATH_MAX)?; +- +- let (scheme_id, number) = { +- let current_lock = context::current(); +- let mut current = current_lock.read(token.token()); +- let (context, mut token) = current.token_split(); +- let pipe = context.get_file(fh, &mut token).ok_or(Error::new(EBADF))?; +- let desc = pipe.description.read(token.token()); +- (desc.scheme, desc.number) +- }; +- +- let caller_ctx = context::current() +- .read(token.token()) +- .caller_ctx() +- .filter_uid_gid(euid, egid); +- +- let new_description = { +- let scheme = scheme::get_scheme(token.token(), scheme_id)?; +- +- let res = scheme.kopenat( ++ let new_description = match open_result { ++ OpenResult::SchemeLocal(number, internal_flags) => Arc::new(RwLock::new(FileDescription { ++ offset: 0, ++ internal_flags, ++ scheme: scheme_id, + number, +- StrOrBytes::from_str(&path_buf), +- flags, +- fcntl_flags, +- caller_ctx, +- token, +- ); +- +- match res? { +- OpenResult::SchemeLocal(number, internal_flags) => { +- Arc::new(RwLock::new(FileDescription { +- offset: 0, +- internal_flags, +- scheme: scheme_id, +- number, +- flags: (flags & !O_CLOEXEC) as u32, +- })) +- } +- OpenResult::External(desc) => desc, +- } ++ flags: (flags & !O_CLOEXEC) as u32, ++ })), ++ OpenResult::External(desc) => desc, + }; + + let current_lock = context::current(); +@@ -126,6 +100,100 @@ + ) + .ok_or(Error::new(EMFILE)) + } ++ ++fn path_exists_in_scheme( ++ scheme: &dyn KernelScheme, ++ number: usize, ++ path: &str, ++ caller_ctx: scheme::CallerCtx, ++ token: &mut CleanLockToken, ++) -> Result { ++ match scheme.kopenat(number, StrOrBytes::from_str(path), O_STAT, 0, caller_ctx, token) { ++ Ok(OpenResult::SchemeLocal(number, _)) => { ++ let _ = scheme.close(number, token); ++ Ok(true) ++ } ++ Ok(OpenResult::External(_)) => Ok(true), ++ Err(err) if err.errno == ENOENT => Ok(false), ++ Err(err) => Err(err), ++ } ++} ++ ++pub fn openat( ++ fh: FileHandle, ++ raw_path: UserSliceRo, ++ flags: usize, ++ fcntl_flags: u32, ++ euid: u32, ++ egid: u32, ++ token: &mut CleanLockToken, ++) -> Result { ++ let path_buf = copy_path_to_buf(raw_path, PATH_MAX)?; ++ ++ let (scheme_id, number) = { ++ let current_lock = context::current(); ++ let mut current = current_lock.read(token.token()); ++ let (context, mut token) = current.token_split(); ++ let pipe = context.get_file(fh, &mut token).ok_or(Error::new(EBADF))?; ++ let desc = pipe.description.read(token.token()); ++ (desc.scheme, desc.number) ++ }; ++ ++ let caller_ctx = context::current() ++ .read(token.token()) ++ .caller_ctx() ++ .filter_uid_gid(euid, egid); ++ ++ let fifo_mode_requested = flags & MODE_FIFO as usize == MODE_FIFO as usize; ++ let fifo_key = fifo_path_key(scheme_id, number, &path_buf); ++ ++ if pipe::named_pipe_exists(&fifo_key, token) { ++ if flags & O_EXCL == O_EXCL && flags & O_CREAT == O_CREAT { ++ return Err(Error::new(EEXIST)); ++ } ++ if fifo_mode_requested && flags & O_CREAT == O_CREAT { ++ return Err(Error::new(EEXIST)); ++ } ++ ++ let pipe_number = pipe::open_named_pipe(&fifo_key, flags, token)?.ok_or(Error::new(ENOENT))?; ++ return install_open_result( ++ GlobalSchemes::Pipe.scheme_id(), ++ flags, ++ OpenResult::SchemeLocal(pipe_number, InternalFlags::empty()), ++ token, ++ ); ++ } ++ ++ let scheme = scheme::get_scheme(token.token(), scheme_id)?; ++ ++ if fifo_mode_requested && flags & O_CREAT == O_CREAT { ++ if path_exists_in_scheme(&*scheme, number, &path_buf, caller_ctx, token)? { ++ return Err(Error::new(EEXIST)); ++ } ++ ++ let mode = u16::try_from(flags & 0o7777).map_err(|_| Error::new(EINVAL))?; ++ let pipe_number = ++ pipe::create_named_pipe(&fifo_key, &path_buf, mode, flags, token)?; ++ ++ return install_open_result( ++ GlobalSchemes::Pipe.scheme_id(), ++ flags, ++ OpenResult::SchemeLocal(pipe_number, InternalFlags::empty()), ++ token, ++ ); ++ } ++ ++ let open_result = scheme.kopenat( ++ number, ++ StrOrBytes::from_str(&path_buf), ++ flags, ++ fcntl_flags, ++ caller_ctx, ++ token, ++ )?; ++ ++ install_open_result(scheme_id, flags, open_result, token) ++} + /// Unlinkat syscall + pub fn unlinkat( + fh: FileHandle, +@@ -152,6 +220,10 @@ + .read(token.token()) + .caller_ctx() + .filter_uid_gid(euid, egid); ++ ++ if pipe::unlink_named_pipe(&fifo_path_key(scheme_id, number, &path_buf), token) { ++ return Ok(()); ++ } + + /* + let mut path_buf = BorrowedHtBuf::head()?; diff --git a/local/recipes/dev/binutils-native/recipe.toml b/local/recipes/dev/binutils-native/recipe.toml new file mode 100644 index 000000000..703b0f2f0 --- /dev/null +++ b/local/recipes/dev/binutils-native/recipe.toml @@ -0,0 +1,53 @@ +# Native GNU Binutils 2.43.1 for Red Bear OS +# Produces binutils that RUN on x86_64-unknown-redox and TARGET x86_64-unknown-redox. +# Built via cross-compilation from host using the existing cross-compiler in prefix/. + +[source] +same_as = "../../../../recipes/dev/binutils-gdb" + +[build] +template = "custom" +dependencies = [ + "gcc13", + "gnu-binutils", + "libgmp", + "libmpfr", +] +script = """ +DYNAMIC_INIT + +# Use the cross-compiler to produce native redox binaries +export CC="${COOKBOOK_TARGET}-gcc" +export CXX="${COOKBOOK_TARGET}-g++" +export AR="${COOKBOOK_TARGET}-ar" +export RANLIB="${COOKBOOK_TARGET}-ranlib" + +# Configure for native compilation +COOKBOOK_CONFIGURE_FLAGS+=( + --host="${GNU_TARGET}" + --target="${GNU_TARGET}" + --prefix=/usr + --with-sysroot="${COOKBOOK_SYSROOT}" + --enable-default-hash-style=gnu + --disable-werror + --disable-nls + --disable-multilib +) + +"${COOKBOOK_CONFIGURE}" "${COOKBOOK_CONFIGURE_FLAGS[@]}" +"${COOKBOOK_MAKE}" -j "${COOKBOOK_MAKE_JOBS}" +"${COOKBOOK_MAKE}" install DESTDIR="${COOKBOOK_STAGE}" + +# Native binutils don't need the target prefix - provide unprefixed links +for tool in ar as ld nm objcopy objdump ranlib readelf size strings strip; do + if [ -f "${COOKBOOK_STAGE}/usr/bin/${GNU_TARGET}-${tool}" ]; then + ln -sf "${GNU_TARGET}-${tool}" "${COOKBOOK_STAGE}/usr/bin/${tool}" + fi +done + +# Remove libtool archives +rm -f "${COOKBOOK_STAGE}"/usr/lib/*.la +""" + +[package] +description = "Native GNU Binutils 2.43.1 for Red Bear OS (runs on redox, targets redox)" diff --git a/local/recipes/dev/bison/recipe.toml b/local/recipes/dev/bison/recipe.toml new file mode 100644 index 000000000..ec50419fe --- /dev/null +++ b/local/recipes/dev/bison/recipe.toml @@ -0,0 +1,21 @@ +[source] +tar = "https://ftp.gnu.org/gnu/bison/bison-3.8.2.tar.xz" +blake3 = "9dd90be8df4d0474b941e2ca14ac76d11b7ccb46edb26344b60d866178bbcc98" + +[build] +template = "custom" +script = """ +DYNAMIC_INIT +export ac_cv_func___fseterr=yes +export ac_cv_type_sigset_t=yes +export ac_cv_type_posix_spawnattr_t=yes +export ac_cv_type_posix_spawn_file_actions_t=yes +COOKBOOK_CONFIGURE_FLAGS+=( + --disable-nls +) +cookbook_configure +""" + +[package] +description = "GNU parser generator (yacc-compatible)" +dependencies = ["m4"] diff --git a/local/recipes/dev/flex/recipe.toml b/local/recipes/dev/flex/recipe.toml new file mode 100644 index 000000000..8d44355bc --- /dev/null +++ b/local/recipes/dev/flex/recipe.toml @@ -0,0 +1,15 @@ +[source] +tar = "https://github.com/westes/flex/releases/download/v2.6.4/flex-2.6.4.tar.gz" + +[build] +template = "custom" +script = """ +DYNAMIC_INIT +COOKBOOK_CONFIGURE_FLAGS+=( + --disable-nls +) +cookbook_configure +""" + +[package] +description = "Fast lexical analyzer generator (lex-compatible)" diff --git a/local/recipes/dev/gcc-native/recipe.toml b/local/recipes/dev/gcc-native/recipe.toml new file mode 100644 index 000000000..7e0a8d9a8 --- /dev/null +++ b/local/recipes/dev/gcc-native/recipe.toml @@ -0,0 +1,96 @@ +# Native GCC 13.2.0 for Red Bear OS +# Produces GCC that RUNS on x86_64-unknown-redox and TARGETS x86_64-unknown-redox. +# Built via cross-compilation from the host using the existing cross-compiler in prefix/. +# +# This is Strategy A from BUILD-TOOLS-PORTING-PLAN.md: pre-built native toolchain +# produced by cross-compilation, avoiding the multi-stage self-hosting bootstrap. +# +# To use: repo cook local/recipes/dev/gcc-native +# The resulting gcc binary runs inside Red Bear OS. + +[source] +same_as = "../../../../recipes/dev/gcc13" + +[build] +template = "custom" +dependencies = [ + "gcc13", + "gcc13.cxx", + "gnu-binutils", + "libgmp", + "libmpfr", + "mpc", +] +script = """ +DYNAMIC_INIT + +# Use the cross-compiler to produce native redox binaries +export CC="${COOKBOOK_TARGET}-gcc" +export CXX="${COOKBOOK_TARGET}-g++" +export AR="${COOKBOOK_TARGET}-ar" +export RANLIB="${COOKBOOK_TARGET}-ranlib" +export LD="${COOKBOOK_TARGET}-ld" +export NM="${COOKBOOK_TARGET}-nm" +export STRIP="${COOKBOOK_TARGET}-strip" + +# Configure for native compilation: host AND target are both redox +COOKBOOK_CONFIGURE_FLAGS+=( + --host="${GNU_TARGET}" + --target="${GNU_TARGET}" + --prefix=/usr + --with-sysroot="${COOKBOOK_SYSROOT}" + --with-gmp="${COOKBOOK_SYSROOT}/usr" + --with-mpfr="${COOKBOOK_SYSROOT}/usr" + --with-mpc="${COOKBOOK_SYSROOT}/usr" + --with-linker-hash-style=gnu + --enable-languages=c,c++,lto + --enable-initfini-array + --disable-nls + --disable-multilib + --enable-host-shared + --enable-threads=posix + --enable-libstdcxx-threads + --disable-bootstrap + --with-bugurl="https://gitlab.redox-os.org/redox-os/gcc/-/issues" +) + +"${COOKBOOK_CONFIGURE}" "${COOKBOOK_CONFIGURE_FLAGS[@]}" +"${COOKBOOK_MAKE}" -j "${COOKBOOK_MAKE_JOBS}" all-gcc all-target-libgcc all-target-libstdc++-v3 +"${COOKBOOK_MAKE}" install-gcc install-target-libgcc install-target-libstdc++-v3 DESTDIR="${COOKBOOK_STAGE}" + +# Native GCC doesn't need the target prefix +for tool in gcc g++ c++ cpp gcc-ar gcc-nm gcc-ranlib gcov gcov-dump gcov-tool gfortran; do + if [ -f "${COOKBOOK_STAGE}/usr/bin/${GNU_TARGET}-${tool}" ]; then + ln -sf "${GNU_TARGET}-${tool}" "${COOKBOOK_STAGE}/usr/bin/${tool}" + fi +done + +# Provide cc symlink +ln -sf gcc "${COOKBOOK_STAGE}/usr/bin/cc" + +# Remove libtool archives +rm -f "${COOKBOOK_STAGE}"/usr/lib/libgcc_s.so* "${COOKBOOK_STAGE}"/usr/lib/libstdc++.so* +rm -f "${COOKBOOK_STAGE}"/usr/lib/*.la + +# Copy libgcc and libstdc++ into target lib dir +mkdir -p "${COOKBOOK_STAGE}/usr/lib/${GNU_TARGET}" +cp -a "${COOKBOOK_STAGE}"/usr/lib/gcc/${GNU_TARGET}/13.2.0/libgcc.a "${COOKBOOK_STAGE}/usr/lib/${GNU_TARGET}/" || true +cp -a "${COOKBOOK_STAGE}"/usr/lib/gcc/${GNU_TARGET}/13.2.0/libgcc_eh.a "${COOKBOOK_STAGE}/usr/lib/${GNU_TARGET}/" || true +""" + +[package] +description = "Native GCC 13.2.0 compiler for Red Bear OS (runs on redox, targets redox)" +dependencies = ["gnu-binutils"] + +[[optional-packages]] +name = "cxx" +dependencies = [] +files = [ + "usr/bin/*c++", + "usr/bin/*g++", + "usr/bin/g++", + "usr/include/c++/**", + "usr/lib/*c++*", + "usr/libexec/gcc/**/cc1plus", + "usr/share/gcc-*/python/libstdcxx/**", +] diff --git a/recipes/dev/gnu-make/recipe.toml b/local/recipes/dev/gnu-make/recipe.toml similarity index 63% rename from recipes/dev/gnu-make/recipe.toml rename to local/recipes/dev/gnu-make/recipe.toml index d456b5c52..365a663d8 100644 --- a/recipes/dev/gnu-make/recipe.toml +++ b/local/recipes/dev/gnu-make/recipe.toml @@ -1,4 +1,3 @@ -#TODO slower jobserver due to lack of named pipes [source] tar = "http://ftp.gnu.org/gnu/make/make-4.4.tar.gz" blake3 = "1a0e5353205e106bd9b3c0f4a5f37ee1156a1e1c8feb771d1b4842c216612cba" @@ -13,9 +12,8 @@ autotools_recursive_regenerate template = "custom" script = """ DYNAMIC_INIT -COOKBOOK_CONFIGURE_FLAGS+=( -# TODO: https://gitlab.redox-os.org/redox-os/redox/-/issues/1753 - 'ac_cv_func_mkfifo=no' -) cookbook_configure """ + +[package] +description = "GNU Make build system (mkfifo jobserver enabled)" diff --git a/recipes/dev/gnu-make/redox.patch b/local/recipes/dev/gnu-make/redox.patch similarity index 100% rename from recipes/dev/gnu-make/redox.patch rename to local/recipes/dev/gnu-make/redox.patch diff --git a/recipes/dev/libtool/recipe.toml b/local/recipes/dev/libtool/recipe.toml similarity index 78% rename from recipes/dev/libtool/recipe.toml rename to local/recipes/dev/libtool/recipe.toml index b62946be9..6ef09369f 100644 --- a/recipes/dev/libtool/recipe.toml +++ b/local/recipes/dev/libtool/recipe.toml @@ -1,4 +1,3 @@ -#TODO can build, not tested [source] git = "https://gitlab.redox-os.org/redox-os/libtool" branch = "v2.5.4-redox" @@ -8,10 +7,7 @@ shallow_clone = true template = "custom" script = """ DYNAMIC_INIT - -# libtool saves absolute path to sysroot which contains nothing unset CFLAGS - cp -r "${COOKBOOK_SOURCE}"/. ./ ./bootstrap \ --skip-po \ @@ -20,3 +16,6 @@ cp -r "${COOKBOOK_SOURCE}"/. ./ COOKBOOK_CONFIGURE="./configure" cookbook_configure """ + +[package] +description = "GNU Libtool (shared library support)" diff --git a/local/recipes/dev/llvm-native/recipe.toml b/local/recipes/dev/llvm-native/recipe.toml new file mode 100644 index 000000000..4719875a3 --- /dev/null +++ b/local/recipes/dev/llvm-native/recipe.toml @@ -0,0 +1,117 @@ +[source] +same_as = "../../../../recipes/dev/llvm21" + +[build] +template = "custom" +dependencies = [ + "llvm21", + "llvm21.runtime", + "llvm21.dev", + "clang21", + "lld21", + "zstd", +] +script = """ +DYNAMIC_INIT +ARCH="$(echo "${TARGET}" | cut -d - -f1)" + +generate_cookbook_cmake_file "$COOKBOOK_HOST_TARGET" "" "$COOKBOOK_TOOLCHAIN" native.cmake + +case "${ARCH}" in + x86 | x86_64) LLVM_TARGETS_TO_BUILD="X86";; + aarch64) LLVM_TARGETS_TO_BUILD="AArch64";; + riscv64gc) LLVM_TARGETS_TO_BUILD="RISCV";; +esac + +COOKBOOK_CMAKE_FLAGS+=( + -DLLVM_ENABLE_PROJECTS="clang;lld" + -DLLVM_BUILD_LLVM_DYLIB=On + -DLLVM_LINK_LLVM_DYLIB=On + -DLLVM_INCLUDE_UTILS=On + -DLLVM_INSTALL_UTILS=On + -DLLVM_TARGETS_TO_BUILD="$LLVM_TARGETS_TO_BUILD" + -DLLVM_ENABLE_ZLIB=Off + -DLLVM_USE_STATIC_ZSTD=On + -DLLVM_ENABLE_LIBXML2=Off + -DCLANG_LINK_CLANG_DYLIB=ON + -DLIBCLANG_BUILD_STATIC=1 + -DCROSS_TOOLCHAIN_FLAGS_NATIVE="-DCMAKE_TOOLCHAIN_FILE=$(realpath native.cmake)" + -DCMAKE_CXX_FLAGS="--std=gnu++11" + -DBUILD_SHARED_LIBS=False + -DLLVM_BUILD_EXAMPLES=Off + -DLLVM_BUILD_TESTS=Off + -DLLVM_DEFAULT_TARGET_TRIPLE="${TARGET}" + -DLLVM_ENABLE_LTO=Off + -DLLVM_ENABLE_RTTI=On + -DLLVM_ENABLE_THREADS=On + -DLLVM_INCLUDE_EXAMPLES=Off + -DLLVM_INCLUDE_TESTS=Off + -DLLVM_OPTIMIZED_TABLEGEN=On + -DLLVM_TARGET_ARCH=$ARCH + -DLLVM_TOOLS_INSTALL_DIR=bin + -DLLVM_UTILS_INSTALL_DIR=bin + -DUNIX=1 + -DLLVM_TABLEGEN_EXE=${COOKBOOK_HOST_SYSROOT}/bin/llvm-tblgen + -DCLANG_TABLEGEN_EXE=${COOKBOOK_HOST_SYSROOT}/bin/clang-tblgen +) + +COOKBOOK_SOURCE="$COOKBOOK_SOURCE/llvm" +cookbook_cmake + +# Create unprefixed symlinks for native use +for tool in clang clang++ clang-cpp clang-cl clang-check clang-format clang-tidy \ + lld ld.lld ld64.lld wasm-ld \ + llvm-ar llvm-ranlib llvm-nm llvm-objdump llvm-objcopy llvm-strip \ + llvm-readelf llvm-size llvm-strings llvm-symbolizer llvm-addr2line \ + llvm-config llvm-link llvm-profdata llvm-cov \ + llvm-as llvm-dis llc opt; do + if [ -f "${COOKBOOK_STAGE}/usr/bin/${tool}" ]; then + : # already has unprefixed name + elif [ -f "${COOKBOOK_STAGE}/usr/bin/${GNU_TARGET}-${tool}" ]; then + ln -sf "${GNU_TARGET}-${tool}" "${COOKBOOK_STAGE}/usr/bin/${tool}" + fi +done + +# Provide cc/c++ symlinks +if [ -f "${COOKBOOK_STAGE}/usr/bin/clang" ]; then + ln -sf clang "${COOKBOOK_STAGE}/usr/bin/cc" + ln -sf clang++ "${COOKBOOK_STAGE}/usr/bin/c++" +elif [ -f "${COOKBOOK_STAGE}/usr/bin/${GNU_TARGET}-clang" ]; then + ln -sf "${GNU_TARGET}-clang" "${COOKBOOK_STAGE}/usr/bin/cc" + ln -sf "${GNU_TARGET}-clang++" "${COOKBOOK_STAGE}/usr/bin/c++" +fi + +# Remove libtool archives +rm -f "${COOKBOOK_STAGE}"/usr/lib/*.la +""" + +[package] +description = "Native LLVM/Clang/LLD 21 toolchain for Red Bear OS" +dependencies = ["llvm21.runtime"] + +[[optional-packages]] +name = "dev" +dependencies = [".runtime"] +files = [ + "usr/include/llvm*/**", + "usr/include/clang*/**", + "usr/include/lld*/**", + "usr/lib/libLLVM*.a", + "usr/lib/libclang*.a", + "usr/lib/liblld*.a", + "usr/lib/cmake/llvm/**", + "usr/lib/cmake/clang/**", + "usr/lib/cmake/lld/**", +] + +[[optional-packages]] +name = "runtime" +files = [ + "usr/bin/clang*", + "usr/bin/lld*", + "usr/bin/llvm-*", + "usr/bin/llc", + "usr/bin/opt", + "usr/lib/libLLVM*.so*", + "usr/lib/libclang*.so*", +] diff --git a/local/recipes/dev/m4/recipe.toml b/local/recipes/dev/m4/recipe.toml new file mode 100644 index 000000000..6e6259ac9 --- /dev/null +++ b/local/recipes/dev/m4/recipe.toml @@ -0,0 +1,15 @@ +[source] +tar = "https://ftp.gnu.org/gnu/m4/m4-1.14.21.tar.xz" + +[build] +template = "custom" +script = """ +DYNAMIC_INIT +COOKBOOK_CONFIGURE_FLAGS+=( + --disable-nls +) +cookbook_configure +""" + +[package] +description = "GNU M4 macro processor" diff --git a/local/recipes/dev/meson/recipe.toml b/local/recipes/dev/meson/recipe.toml new file mode 100644 index 000000000..ea7925645 --- /dev/null +++ b/local/recipes/dev/meson/recipe.toml @@ -0,0 +1,25 @@ +[source] +tar = "https://github.com/mesonbuild/meson/releases/download/1.3.0/meson-1.3.0.tar.gz" + +[build] +template = "custom" +script = """ +DYNAMIC_INIT +cp -r "${COOKBOOK_SOURCE}"/. ./ + +mkdir -p "${COOKBOOK_STAGE}/usr/bin" +mkdir -p "${COOKBOOK_STAGE}/usr/lib/meson" + +cp -r mesonbuild "${COOKBOOK_STAGE}/usr/lib/meson/" +cp meson.py "${COOKBOOK_STAGE}/usr/lib/meson/" + +cat > "${COOKBOOK_STAGE}/usr/bin/meson" << 'MESON_WRAPPER' +#!/bin/sh +exec python3 /usr/lib/meson/meson.py "$@" +MESON_WRAPPER +chmod +x "${COOKBOOK_STAGE}/usr/bin/meson" +""" + +[package] +description = "Meson build system" +dependencies = ["python312"] diff --git a/local/recipes/dev/ninja-build/recipe.toml b/local/recipes/dev/ninja-build/recipe.toml new file mode 100644 index 000000000..425ee7e60 --- /dev/null +++ b/local/recipes/dev/ninja-build/recipe.toml @@ -0,0 +1,9 @@ +[source] +git = "https://github.com/ninja-build/ninja" +rev = "v1.13.1" + +[build] +template = "cmake" + +[package] +description = "Ninja build system" diff --git a/local/recipes/dev/rust-native/config.toml b/local/recipes/dev/rust-native/config.toml new file mode 100644 index 000000000..319f00abd --- /dev/null +++ b/local/recipes/dev/rust-native/config.toml @@ -0,0 +1,29 @@ +[llvm] +download-ci-llvm = false +static-libstdcpp = false +link-shared = true + +[build] +host = ["COOKBOOK_TARGET"] +target = ["COOKBOOK_TARGET"] +submodules = false +docs = false +tools = ["cargo", "clippy", "rustdoc", "rustfmt", "src"] +extended = true +verbose = 1 + +[install] +prefix = "install" +sysconfdir = "etc" + +[rust] +backtrace = false +codegen-tests = false + +[target.COOKBOOK_TARGET] +cc = "COOKBOOK_GNU_TARGET-gcc" +cxx = "COOKBOOK_GNU_TARGET-g++" +ar = "COOKBOOK_GNU_TARGET-ar" +linker = "COOKBOOK_GNU_TARGET-gcc" +crt-static = false +llvm-config = "COOKBOOK_SYSROOT/bin/llvm-config" diff --git a/local/recipes/dev/rust-native/recipe.toml b/local/recipes/dev/rust-native/recipe.toml new file mode 100644 index 000000000..b35fd900e --- /dev/null +++ b/local/recipes/dev/rust-native/recipe.toml @@ -0,0 +1,39 @@ +[source] +same_as = "../../../../recipes/dev/rust" + +[build] +template = "custom" +dependencies = [ + "zlib", + "curl", + "openssl3", + "llvm-native", + "llvm-native.runtime", +] +script = """ +DYNAMIC_INIT +ARCH="${TARGET%%-*}" +export CARGO_TARGET_${ARCH^^}_UNKNOWN_REDOX_RUSTFLAGS="${RUSTFLAGS}" +cat "${COOKBOOK_ROOT}/bin/${TARGET}-llvm-config" > "${COOKBOOK_SYSROOT}/bin/llvm-config" +export LD_LIBRARY_PATH="${COOKBOOK_HOST_SYSROOT}/lib:${LD_LIBRARY_PATH}" + +cat ${COOKBOOK_RECIPE}/config.toml > config.toml +sed -i "s|COOKBOOK_SYSROOT|${COOKBOOK_SYSROOT}|g" config.toml +sed -i "s|COOKBOOK_TOOLCHAIN|${COOKBOOK_HOST_SYSROOT}|g" config.toml +sed -i "s|COOKBOOK_TARGET|${TARGET}|g" config.toml +sed -i "s|COOKBOOK_GNU_TARGET|${GNU_TARGET}|g" config.toml + +unset AR AS CC CXX LD LDFLAGS NM OBJCOPY OBJDUMP RANLIB READELF RUSTFLAGS CARGO_ENCODED_RUSTFLAGS STRIP + +python3 "${COOKBOOK_SOURCE}/x.py" install \ + --config config.toml \ + --jobs ${COOKBOOK_MAKE_JOBS} + +mkdir -p "${COOKBOOK_STAGE}"/usr +rsync -av --delete "${COOKBOOK_BUILD}"/install/* "${COOKBOOK_STAGE}"/usr/ +rm -rf "${COOKBOOK_STAGE}"/usr/lib/rustlib/*.log +""" + +[package] +description = "Native Rust toolchain for Red Bear OS (rustc + cargo running on redox)" +dependencies = ["llvm-native.runtime"] diff --git a/local/recipes/groups/build-essential-native/recipe.toml b/local/recipes/groups/build-essential-native/recipe.toml new file mode 100644 index 000000000..2f44e1511 --- /dev/null +++ b/local/recipes/groups/build-essential-native/recipe.toml @@ -0,0 +1,8 @@ +[package] +dependencies = [ + "binutils-native", + "gcc-native", + "gcc-native.cxx", + "llvm-native", + "rust-native", +] diff --git a/local/recipes/system/cub/source/cub-cli/src/main.rs b/local/recipes/system/cub/source/cub-cli/src/main.rs index a26f69b08..4c3b74f3c 100644 --- a/local/recipes/system/cub/source/cub-cli/src/main.rs +++ b/local/recipes/system/cub/source/cub-cli/src/main.rs @@ -3,11 +3,14 @@ use std::env; use std::ffi::OsString; use std::fs; use std::io; +use std::io::Write; use std::path::{Path, PathBuf}; use std::process::Command; use std::rc::Rc; use std::time::{SystemTime, UNIX_EPOCH}; +use std::collections::HashSet; + use clap::{CommandFactory, Parser, Subcommand}; use cub::aur::{AurClient, AurPackage}; use cub::cook; @@ -475,6 +478,27 @@ fn build_local_dir(context: &AppContext, dir: &Path) -> Result<(), Box Result<(), Box Result, Box> { + let library = context.open_library()?; + let installed: Vec = library + .get_installed_packages()? + .into_iter() + .map(|p| p.to_string().to_ascii_lowercase()) + .collect(); + + let mut missing = Vec::new(); + let all_deps: Vec<(&String, &str)> = rbpkg + .dependencies + .build + .iter() + .map(|d| (d, "build dependency")) + .chain( + rbpkg + .dependencies + .runtime + .iter() + .map(|d| (d, "runtime dependency")), + ) + .collect(); + + let mut seen = HashSet::new(); + for (dep, kind) in all_deps { + let lower = dep.to_ascii_lowercase(); + if !seen.insert(lower.clone()) { + continue; + } + if installed.contains(&lower) { + continue; + } + missing.push((dep.clone(), kind.to_string())); + } + + Ok(missing) +} + +fn resolve_dependencies_interactive( + context: &AppContext, + missing: &[(String, String)], +) -> Result<(), Box> { + let mut library = context.open_library()?; + + for (dep, _kind) in missing { + let package_name = match PackageName::new(dep.clone()) { + Ok(name) => name, + Err(_) => { + eprintln!(" skipping invalid package name: {dep}"); + continue; + } + }; + + print!(" installing {dep} from official repo... "); + io::stdout().flush()?; + + match library.install(vec![package_name.clone()]) { + Ok(()) => { + println!("done"); + } + Err(pkg::backend::Error::PackageNotFound(_)) => { + println!("not found in official repo — trying AUR"); + print!(" fetching {dep} from AUR into ~/.cub/... "); + io::stdout().flush()?; + + match fetch_aur_to_store(dep) { + Ok(_) => println!("done (recipe saved, build with `cub -B ~/.cub/recipes/{dep}`)"), + Err(e) => println!("failed: {e}"), + } + } + Err(e) => { + println!("failed: {e}"); + } + } + } + + let _ = apply_library_changes(&mut library); + Ok(()) +} + +fn fetch_aur_to_store(package: &str) -> Result<(), Box> { + let store = CubStore::new()?; + store.init()?; + let recipe_dir = store.recipes_dir().join(package); + if recipe_dir.exists() { + return Ok(()); + } + + let repo_url = aur_repo_url(package); + let clone_dir = create_temp_dir("cub-dep-aur")?; + + let status = Command::new("git") + .arg("clone") + .arg("--depth") + .arg("1") + .arg("--") + .arg(&repo_url) + .arg(&clone_dir) + .status()?; + + if !status.success() { + return Err(Box::new(CubError::BuildFailed(format!( + "failed to clone AUR source from {repo_url}" + )))); + } + + let pkgbuild_path = clone_dir.join("PKGBUILD"); + let pkgbuild_content = fs::read_to_string(&pkgbuild_path)?; + let conversion = pkgbuild::convert_pkgbuild(&pkgbuild_content)?; + + fs::create_dir_all(&recipe_dir)?; + fs::write(recipe_dir.join("RBPKGBUILD"), conversion.rbpkg.to_toml()?)?; + cub::recipe::save_recipe_to_store(&conversion.rbpkg, &store)?; + + Ok(()) +} + fn fetch_bur_recipe(package: &str) -> Result<(), Box> { let source_dir = ensure_bur_package_dir(package)?; let destination = env::current_dir()?.join(package); diff --git a/local/recipes/system/cub/source/cub-lib/src/converter.rs b/local/recipes/system/cub/source/cub-lib/src/converter.rs index ca9a37da0..07b1433ba 100644 --- a/local/recipes/system/cub/source/cub-lib/src/converter.rs +++ b/local/recipes/system/cub/source/cub-lib/src/converter.rs @@ -93,6 +93,7 @@ pub fn convert_pkgbuild(content: &str) -> Result { original_pkgbuild: content.to_string(), conversion_status: status.clone(), target: "x86_64-unknown-redox".to_string(), + split_packages: Vec::new(), }, policy: PolicySection::default(), }; diff --git a/local/recipes/system/cub/source/cub-lib/src/cookbook.rs b/local/recipes/system/cub/source/cub-lib/src/cookbook.rs index 59de69629..1cbe7f3fd 100644 --- a/local/recipes/system/cub/source/cub-lib/src/cookbook.rs +++ b/local/recipes/system/cub/source/cub-lib/src/cookbook.rs @@ -10,6 +10,8 @@ struct CookbookRecipe { build: CookbookBuild, #[serde(skip_serializing_if = "Option::is_none")] package: Option, + #[serde(rename = "optional-packages", skip_serializing_if = "Vec::is_empty")] + optional_packages: Vec, } #[derive(Debug, Default, Serialize)] @@ -24,6 +26,8 @@ struct CookbookSource { rev: Option, #[serde(skip_serializing_if = "Option::is_none")] blake3: Option, + #[serde(skip_serializing_if = "Option::is_none")] + shallow_clone: Option, #[serde(skip_serializing_if = "Vec::is_empty")] patches: Vec, } @@ -35,6 +39,8 @@ struct CookbookBuild { dependencies: Vec, #[serde(rename = "dev-dependencies", skip_serializing_if = "Vec::is_empty")] dev_dependencies: Vec, + #[serde(skip_serializing_if = "Option::is_none")] + cargopath: Option, #[serde(skip_serializing_if = "Vec::is_empty")] cargoflags: Vec, #[serde(skip_serializing_if = "Vec::is_empty")] @@ -55,15 +61,42 @@ struct CookbookPackage { version: Option, #[serde(skip_serializing_if = "Option::is_none")] description: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + installs: Vec, +} + +#[derive(Debug, Serialize)] +struct OptionalPackage { + name: String, + #[serde(skip_serializing_if = "Vec::is_empty")] + dependencies: Vec, + #[serde(skip_serializing_if = "Vec::is_empty")] + files: Vec, } pub fn generate_recipe(rbpkg: &RbPkgBuild) -> Result { rbpkg.validate()?; - if rbpkg.source.sources.len() > 1 { - return Err(CubError::Conversion( - "Cookbook recipe generation currently supports a single primary source".to_string(), - )); + let source_count = rbpkg.source.sources.len(); + if source_count > 1 { + let names: Vec = rbpkg + .source + .sources + .iter() + .map(|s| { + let url = &s.url; + if url.len() > 60 { + format!("{}...", &url[..57]) + } else { + url.clone() + } + }) + .collect(); + return Err(CubError::Conversion(format!( + "multiple sources not yet supported (found {}: {}). Use single-source packages or manually create recipe.toml", + source_count, + names.join(", ") + ))); } let source = rbpkg @@ -78,11 +111,13 @@ pub fn generate_recipe(rbpkg: &RbPkgBuild) -> Result { }); let build = convert_build(rbpkg)?; let package = build_package_section(rbpkg); + let optional_packages = build_optional_packages(rbpkg); toml::to_string_pretty(&CookbookRecipe { source, build, package, + optional_packages, }) .map_err(CubError::from) } @@ -95,10 +130,13 @@ fn convert_source(source: &crate::rbpkgbuild::SourceEntry) -> Result { cookbook.tar = Some(source.url.clone()); - cookbook.blake3 = non_empty(&source.sha256); + if !source.sha256.is_empty() { + cookbook.blake3 = Some(source.sha256.clone()); + } } } @@ -109,7 +147,8 @@ fn convert_build(rbpkg: &RbPkgBuild) -> Result { let mut build = CookbookBuild { template: template_name(&rbpkg.build.template).to_string(), dependencies: rbpkg.dependencies.build.clone(), - dev_dependencies: rbpkg.dependencies.check.clone(), + dev_dependencies: prefix_host_deps(&rbpkg.dependencies.check), + cargopath: non_empty(&rbpkg.build.build_dir), cargoflags: Vec::new(), configureflags: Vec::new(), cmakeflags: Vec::new(), @@ -148,17 +187,60 @@ fn build_package_section(rbpkg: &RbPkgBuild) -> Option { rbpkg.package.version.clone() }); - if rbpkg.dependencies.runtime.is_empty() && description.is_none() && version.is_none() { + let mut installs = Vec::new(); + for entry in &rbpkg.install.bins { + installs.push(format!("/usr/bin/{}", entry.to.split('/').last().unwrap_or(&entry.to))); + } + + if rbpkg.dependencies.runtime.is_empty() && description.is_none() && version.is_none() && installs.is_empty() { None } else { Some(CookbookPackage { dependencies: rbpkg.dependencies.runtime.clone(), version, description, + installs, }) } } +fn build_optional_packages(rbpkg: &RbPkgBuild) -> Vec { + let mut packages = Vec::new(); + + for name in &rbpkg.compat.split_packages { + if name == &rbpkg.package.name { + continue; + } + packages.push(OptionalPackage { + name: name.clone(), + dependencies: Vec::new(), + files: vec![format!("usr/**")], + }); + } + + if !rbpkg.dependencies.optional.is_empty() && packages.is_empty() { + packages.push(OptionalPackage { + name: "optional-deps".to_string(), + dependencies: rbpkg.dependencies.optional.clone(), + files: Vec::new(), + }); + } + + packages +} + +fn prefix_host_deps(deps: &[String]) -> Vec { + deps.iter() + .map(|d| { + if d.starts_with("host:") { + d.clone() + } else { + format!("host:{}", d) + } + }) + .collect() +} + fn custom_script(rbpkg: &RbPkgBuild) -> Result { let mut parts = Vec::new(); @@ -169,6 +251,27 @@ fn custom_script(rbpkg: &RbPkgBuild) -> Result { } parts.extend(rbpkg.build.install_script.iter().cloned()); + if !rbpkg.install.bins.is_empty() { + let mut install_lines = Vec::new(); + for entry in &rbpkg.install.bins { + install_lines.push(format!( + "install -Dm755 {} \"${{COOKBOOK_STAGE}}/{}\"", + entry.from, entry.to + )); + } + parts.extend(install_lines); + } + if !rbpkg.install.libs.is_empty() { + let mut install_lines = Vec::new(); + for entry in &rbpkg.install.libs { + install_lines.push(format!( + "install -Dm644 {} \"${{COOKBOOK_STAGE}}/{}\"", + entry.from, entry.to + )); + } + parts.extend(install_lines); + } + if parts.is_empty() { return Err(CubError::InvalidPkgbuild( "custom template requires at least one prepare/build/check/install command".to_string(), @@ -232,7 +335,7 @@ mod tests { build: vec!["cargo".to_string()], runtime: vec!["openssl3".to_string()], check: vec!["python".to_string()], - optional: Vec::new(), + optional: vec!["git".to_string()], provides: Vec::new(), conflicts: Vec::new(), }, @@ -256,6 +359,7 @@ mod tests { original_pkgbuild: String::new(), conversion_status: ConversionStatus::Full, target: String::new(), + split_packages: Vec::new(), }, policy: PolicySection::default(), } @@ -277,6 +381,7 @@ mod tests { value["package"]["dependencies"][0].as_str(), Some("openssl3") ); + assert_eq!(value["source"]["shallow_clone"].as_bool(), Some(true)); } #[test] @@ -321,4 +426,40 @@ mod tests { let recipe = generate_recipe(&pkg).expect("generate recipe"); assert!(!recipe.contains("make test")); } + + #[test] + fn prefixes_dev_dependencies_with_host() { + let recipe = generate_recipe(&base_pkg(BuildTemplate::Cargo)).expect("generate recipe"); + let value: toml::Value = toml::from_str(&recipe).expect("parse generated recipe"); + + assert_eq!( + value["build"]["dev-dependencies"][0].as_str(), + Some("host:python") + ); + } + + #[test] + fn generates_optional_packages_for_optional_deps() { + let recipe = generate_recipe(&base_pkg(BuildTemplate::Cargo)).expect("generate recipe"); + let value: toml::Value = toml::from_str(&recipe).expect("parse generated recipe"); + + let opt = &value["optional-packages"][0]; + assert_eq!(opt["name"].as_str(), Some("optional-deps")); + assert_eq!(opt["dependencies"][0].as_str(), Some("git")); + } + + #[test] + fn errors_on_multiple_sources() { + let mut pkg = base_pkg(BuildTemplate::Cargo); + pkg.source.sources.push(SourceEntry { + source_type: SourceType::Tar, + url: "https://example.com/extra.tar.gz".to_string(), + sha256: "deadbeef".to_string(), + rev: String::new(), + branch: String::new(), + }); + + let err = generate_recipe(&pkg).expect_err("multiple sources should error"); + assert!(matches!(err, CubError::Conversion(_))); + } } diff --git a/local/recipes/system/cub/source/cub-lib/src/deps.rs b/local/recipes/system/cub/source/cub-lib/src/deps.rs index 6588f3a38..003e1c506 100644 --- a/local/recipes/system/cub/source/cub-lib/src/deps.rs +++ b/local/recipes/system/cub/source/cub-lib/src/deps.rs @@ -53,6 +53,73 @@ pub fn map_dependency(arch_name: &str) -> MappedDep { "libtool" => ("libtool".to_string(), true), "systemd" => (String::new(), false), "dbus" => ("dbus".to_string(), true), + "bzip2" | "libbz2" => ("bzip2".to_string(), true), + "xz" | "liblzma" => ("xz".to_string(), true), + "zstd" | "libzstd" => ("zstd".to_string(), true), + "lz4" | "liblz4" => ("lz4".to_string(), true), + "expat" | "libexpat" => ("expat".to_string(), true), + "libxml2" => ("libxml2".to_string(), false), + "libxslt" => ("libxslt".to_string(), false), + "libarchive" => ("libarchive".to_string(), true), + "libuv" => ("libuv".to_string(), true), + "nghttp2" | "libnghttp2" => ("nghttp2".to_string(), true), + "sqlite" | "sqlite3" => ("sqlite3".to_string(), true), + "libsodium" => ("libsodium".to_string(), false), + "libssh2" => ("libssh2".to_string(), false), + "openssh" => ("openssh".to_string(), false), + "gmp" | "libgmp" => ("libgmp".to_string(), true), + "mpfr" | "libmpfr" => ("libmpfr".to_string(), true), + "mpc" | "libmpc" => ("mpc".to_string(), true), + "isl" | "libisl" => ("isl".to_string(), false), + "gdbm" => ("gdbm".to_string(), false), + "libcap" | "libcap-ng" => (String::new(), false), + "pam" | "linux-pam" => (String::new(), false), + "acl" | "libacl" | "attr" | "libattr" => (String::new(), false), + "libselinux" => (String::new(), false), + "dbus-glib" => ("dbus".to_string(), false), + "alsa-lib" | "alsa" => (String::new(), false), + "pulseaudio" | "libpulse" => (String::new(), false), + "jack" | "jack2" => (String::new(), false), + "gstreamer" => (String::new(), false), + "libdrm" => ("libdrm".to_string(), true), + "mesa" | "mesa-libgl" => ("mesa".to_string(), false), + "libglvnd" => (String::new(), false), + "vulkan-icd-loader" | "vulkan-loader" => (String::new(), false), + "libusb" => (String::new(), false), + "libinput" => (String::new(), false), + "libevdev" => (String::new(), false), + "mtdev" => (String::new(), false), + "libwacom" => (String::new(), false), + "libxrandr" | "libxext" | "libxrender" | "libxi" | "libxfixes" + | "libxdamage" | "libxcomposite" | "libxcursor" | "libxinerama" + | "libxshmfence" | "libxkbcommon" | "libxau" | "libxdmcp" + | "libxxf86vm" | "libxtst" | "libxt" | "libxmu" | "libxpm" + | "libxkbfile" | "libxres" | "libxscrnsaver" | "libxv" + | "libxvmc" | "libsm" | "libice" => (String::new(), false), + "icu" | "libicu" | "icu4c" => ("icu".to_string(), false), + "libunistring" => (String::new(), false), + "pcre" | "libpcre" => ("pcre2".to_string(), false), + "libelf" | "elfutils" => ("elfutils".to_string(), false), + "dw" | "libdw" => (String::new(), false), + "libunwind" => (String::new(), false), + "gperf" => ("gperf".to_string(), true), + "intltool" => ("intltool".to_string(), false), + "gettext" | "gnu-gettext" => ("gettext".to_string(), true), + "texinfo" => ("texinfo".to_string(), false), + "help2man" => (String::new(), false), + "gtk-doc" => (String::new(), false), + "gobject-introspection" => (String::new(), false), + "vala" => (String::new(), false), + "python-setuptools" | "python-wheel" | "python-pip" + | "python-build" | "python-installer" => ("python".to_string(), false), + "ruby" | "rubygems" => (String::new(), false), + "nodejs" | "npm" => (String::new(), false), + "java-runtime" | "jre-openjdk" | "jdk-openjdk" => (String::new(), false), + "ghc" | "haskell" | "cabal" => (String::new(), false), + "go" | "golang" => (String::new(), false), + "lua" | "luajit" => ("lua".to_string(), true), + "tcl" | "tclsh" => (String::new(), false), + "tk" | "wish" => (String::new(), false), _ => (base.clone(), true), }; diff --git a/local/recipes/system/cub/source/cub-lib/src/lib.rs b/local/recipes/system/cub/source/cub-lib/src/lib.rs index 5c63aeb05..711de9d06 100644 --- a/local/recipes/system/cub/source/cub-lib/src/lib.rs +++ b/local/recipes/system/cub/source/cub-lib/src/lib.rs @@ -10,6 +10,7 @@ pub mod pkgbuild; pub mod rbpkgbuild; pub mod rbsrcinfo; pub mod recipe; +pub mod resolver; pub mod sandbox; pub mod storage; diff --git a/local/recipes/system/cub/source/cub-lib/src/package.rs b/local/recipes/system/cub/source/cub-lib/src/package.rs index 5454677ff..bf251de36 100644 --- a/local/recipes/system/cub/source/cub-lib/src/package.rs +++ b/local/recipes/system/cub/source/cub-lib/src/package.rs @@ -132,6 +132,7 @@ mod tests { original_pkgbuild: String::new(), conversion_status: ConversionStatus::Full, target: String::new(), + split_packages: Vec::new(), }, policy: PolicySection::default(), } diff --git a/local/recipes/system/cub/source/cub-lib/src/pkgbuild.rs b/local/recipes/system/cub/source/cub-lib/src/pkgbuild.rs index 26b77d423..cdc1781ad 100644 --- a/local/recipes/system/cub/source/cub-lib/src/pkgbuild.rs +++ b/local/recipes/system/cub/source/cub-lib/src/pkgbuild.rs @@ -126,6 +126,7 @@ pub fn convert_pkgbuild(content: &str) -> Result { original_pkgbuild: content.to_string(), conversion_status: status.clone(), target: "x86_64-unknown-redox".to_string(), + split_packages: split_packages, }, policy: PolicySection::default(), }; @@ -328,22 +329,25 @@ pub fn detect_build_template(content: &str) -> BuildTemplate { pub fn detect_linuxisms(content: &str) -> Vec { let lowered = content.to_ascii_lowercase(); let checks = [ - ( - "systemctl", - "uses systemctl, which is not available on Redox", - ), - ( - "/usr/lib/systemd", - "references /usr/lib/systemd, which is Linux-specific", - ), - ( - "systemd", - "references systemd, which is unavailable on Redox", - ), - ( - "/proc", - "references /proc, which may require Redox-specific adaptation", - ), + ("systemctl", "uses systemctl, which is not available on Redox"), + ("/usr/lib/systemd", "references /usr/lib/systemd, which is Linux-specific"), + ("systemd", "references systemd, which is unavailable on Redox"), + ("/proc", "references /proc, which may require Redox-specific adaptation"), + ("dbus-daemon", "uses dbus-daemon, verify D-Bus service compatibility"), + ("dbus-launch", "uses dbus-launch, verify D-Bus session compatibility"), + ("systemd-udevd", "references systemd-udevd, unavailable on Redox (use udev-shim)"), + ("udevadm", "uses udevadm, unavailable on Redox"), + ("/sys/class", "references /sys/class, may require Redox-specific adaptation"), + ("/sys/devices", "references /sys/devices, may require Redox-specific adaptation"), + ("/run/", "references /run/, may require Redox-specific adaptation"), + ("systemd-tmpfiles", "uses systemd-tmpfiles, unavailable on Redox"), + ("systemd-sysusers", "uses systemd-sysusers, unavailable on Redox"), + ("libsystemd", "links against libsystemd, unavailable on Redox"), + ("libudev", "links against libudev, may need udev-shim"), + ("polkit", "references polkit, verify PolicyKit compatibility"), + ("pam_systemd", "references pam_systemd, unavailable on Redox"), + ("elogind", "references elogind, unavailable on Redox"), + ("logind", "references logind, unavailable on Redox (use redbear-sessiond)"), ]; let mut warnings = Vec::new(); diff --git a/local/recipes/system/cub/source/cub-lib/src/rbpkgbuild.rs b/local/recipes/system/cub/source/cub-lib/src/rbpkgbuild.rs index 05768cc2d..cde1091c1 100644 --- a/local/recipes/system/cub/source/cub-lib/src/rbpkgbuild.rs +++ b/local/recipes/system/cub/source/cub-lib/src/rbpkgbuild.rs @@ -156,6 +156,8 @@ pub struct CompatSection { pub conversion_status: ConversionStatus, #[serde(default)] pub target: String, + #[serde(default)] + pub split_packages: Vec, } #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)] diff --git a/local/recipes/system/cub/source/cub-lib/src/rbsrcinfo.rs b/local/recipes/system/cub/source/cub-lib/src/rbsrcinfo.rs index 9625de68f..5fcffadec 100644 --- a/local/recipes/system/cub/source/cub-lib/src/rbsrcinfo.rs +++ b/local/recipes/system/cub/source/cub-lib/src/rbsrcinfo.rs @@ -182,6 +182,7 @@ mod tests { original_pkgbuild: String::new(), conversion_status: ConversionStatus::Full, target: String::new(), + split_packages: Vec::new(), }, policy: PolicySection::default(), } diff --git a/local/recipes/system/cub/source/cub-lib/src/resolver.rs b/local/recipes/system/cub/source/cub-lib/src/resolver.rs new file mode 100644 index 000000000..c85b9ac4c --- /dev/null +++ b/local/recipes/system/cub/source/cub-lib/src/resolver.rs @@ -0,0 +1,438 @@ +use std::collections::HashMap; + +#[derive(Debug, Clone)] +pub struct ResolvedDep { + pub missing: String, + pub package: String, + pub kind: DepKind, +} + +#[derive(Debug, Clone, PartialEq)] +pub enum DepKind { + Command, + Header, + Library, + PkgConfig, + Unknown, +} + +pub struct DepResolver { + command_map: HashMap, + header_map: HashMap, + library_map: HashMap, + pkgconfig_map: HashMap, +} + +impl DepResolver { + pub fn new() -> Self { + let mut command_map = HashMap::new(); + let mut header_map = HashMap::new(); + let mut library_map = HashMap::new(); + + // ── Compilers ── + for cmd in &["gcc", "g++", "cc", "c++", "cpp"] { + command_map.insert(cmd.to_string(), "gcc-native".to_string()); + } + for cmd in &["clang", "clang++", "clang-cpp", "clang-cl"] { + command_map.insert(cmd.to_string(), "llvm-native".to_string()); + } + for cmd in &["rustc", "cargo", "rustdoc", "rustfmt", "clippy-driver"] { + command_map.insert(cmd.to_string(), "rust-native".to_string()); + } + command_map.insert("nasm".to_string(), "nasm".to_string()); + command_map.insert("yasm".to_string(), "yasm".to_string()); + + // ── Build systems ── + for cmd in &["make", "gmake", "gnumake"] { + command_map.insert(cmd.to_string(), "gnu-make".to_string()); + } + command_map.insert("cmake".to_string(), "cmake".to_string()); + command_map.insert("meson".to_string(), "meson".to_string()); + for cmd in &["ninja", "ninja-build"] { + command_map.insert(cmd.to_string(), "ninja-build".to_string()); + } + command_map.insert("autoconf".to_string(), "autoconf".to_string()); + command_map.insert("autoheader".to_string(), "autoconf".to_string()); + command_map.insert("autoreconf".to_string(), "autoconf".to_string()); + command_map.insert("autoscan".to_string(), "autoconf".to_string()); + command_map.insert("automake".to_string(), "automake".to_string()); + command_map.insert("aclocal".to_string(), "automake".to_string()); + command_map.insert("libtool".to_string(), "libtool".to_string()); + command_map.insert("libtoolize".to_string(), "libtool".to_string()); + command_map.insert("m4".to_string(), "m4".to_string()); + command_map.insert("pkg-config".to_string(), "pkg-config".to_string()); + command_map.insert("pkgconf".to_string(), "pkg-config".to_string()); + + // ── Binutils ── + for cmd in &["ld", "ar", "as", "nm", "strip", "objdump", "objcopy", + "ranlib", "readelf", "size", "strings", "addr2line"] { + command_map.insert(cmd.to_string(), "binutils-native".to_string()); + } + + // ── Text / file tools ── + command_map.insert("patch".to_string(), "patch".to_string()); + for cmd in &["sed", "gsed"] { + command_map.insert(cmd.to_string(), "sed".to_string()); + } + for cmd in &["grep", "egrep", "fgrep", "rgrep"] { + command_map.insert(cmd.to_string(), "gnu-grep".to_string()); + } + for cmd in &["awk", "gawk", "mawk", "nawk"] { + command_map.insert(cmd.to_string(), "gawk".to_string()); + } + for cmd in &["diff", "cmp", "diff3", "sdiff"] { + command_map.insert(cmd.to_string(), "diffutils".to_string()); + } + + // ── Archives ── + command_map.insert("tar".to_string(), "uutils-tar".to_string()); + for cmd in &["gzip", "gunzip", "zcat"] { + command_map.insert(cmd.to_string(), "gzip".to_string()); + } + for cmd in &["bzip2", "bunzip2"] { + command_map.insert(cmd.to_string(), "bzip2".to_string()); + } + for cmd in &["xz", "unxz", "lzma"] { + command_map.insert(cmd.to_string(), "xz".to_string()); + } + for cmd in &["zstd", "unzstd", "zstdcat"] { + command_map.insert(cmd.to_string(), "zstd".to_string()); + } + + // ── VCS / Network ── + command_map.insert("git".to_string(), "git".to_string()); + for cmd in &["curl", "wget"] { + command_map.insert(cmd.to_string(), "curl".to_string()); + } + + // ── Languages ── + for cmd in &["python", "python3"] { + command_map.insert(cmd.to_string(), "python312".to_string()); + } + command_map.insert("perl".to_string(), "perl5".to_string()); + command_map.insert("lua".to_string(), "lua".to_string()); + command_map.insert("ruby".to_string(), "ruby".to_string()); + + // ── Shell ── + for cmd in &["bash", "sh"] { + command_map.insert(cmd.to_string(), "bash".to_string()); + } + + // ── Parser generators ── + for cmd in &["flex", "lex"] { + command_map.insert(cmd.to_string(), "flex".to_string()); + } + for cmd in &["bison", "yacc"] { + command_map.insert(cmd.to_string(), "bison".to_string()); + } + command_map.insert("gperf".to_string(), "gperf".to_string()); + + // ── i18n / docs ── + for cmd in &["gettext", "msgfmt", "xgettext", "msgmerge"] { + command_map.insert(cmd.to_string(), "gettext".to_string()); + } + for cmd in &["intltool-update", "intltool-extract", "intltool-merge"] { + command_map.insert(cmd.to_string(), "intltool".to_string()); + } + for cmd in &["makeinfo", "texi2any", "texi2dvi", "texi2pdf"] { + command_map.insert(cmd.to_string(), "texinfo".to_string()); + } + for cmd in &["help2man"] { + command_map.insert(cmd.to_string(), "help2man".to_string()); + } + + // ── Core system ── + command_map.insert("install".to_string(), "coreutils".to_string()); + for cmd in &["cp", "mv", "rm", "ln", "mkdir", "rmdir", "chmod", "chown", + "cat", "echo", "touch", "ls", "find", "xargs", "dirname", + "basename", "tr", "cut", "sort", "uniq", "wc", "head", "tail"] { + command_map.insert(cmd.to_string(), "coreutils".to_string()); + } + + // ── Header files → packages ── + header_map.insert("stdio.h".to_string(), "relibc".to_string()); + header_map.insert("stdlib.h".to_string(), "relibc".to_string()); + header_map.insert("string.h".to_string(), "relibc".to_string()); + header_map.insert("unistd.h".to_string(), "relibc".to_string()); + header_map.insert("fcntl.h".to_string(), "relibc".to_string()); + header_map.insert("signal.h".to_string(), "relibc".to_string()); + header_map.insert("pthread.h".to_string(), "relibc".to_string()); + header_map.insert("dlfcn.h".to_string(), "relibc".to_string()); + header_map.insert("zlib.h".to_string(), "zlib".to_string()); + header_map.insert("bzlib.h".to_string(), "bzip2".to_string()); + header_map.insert("lzma.h".to_string(), "xz".to_string()); + header_map.insert("zstd.h".to_string(), "zstd".to_string()); + header_map.insert("openssl/ssl.h".to_string(), "openssl3".to_string()); + header_map.insert("curl/curl.h".to_string(), "curl".to_string()); + header_map.insert("expat.h".to_string(), "expat".to_string()); + header_map.insert("ffi.h".to_string(), "libffi".to_string()); + header_map.insert("pcre2.h".to_string(), "pcre2".to_string()); + header_map.insert("ncurses.h".to_string(), "ncurses".to_string()); + header_map.insert("readline/readline.h".to_string(), "readline".to_string()); + header_map.insert("sqlite3.h".to_string(), "sqlite3".to_string()); + header_map.insert("fontconfig/fontconfig.h".to_string(), "fontconfig".to_string()); + header_map.insert("freetype2/freetype/freetype.h".to_string(), "freetype".to_string()); + header_map.insert("harfbuzz/hb.h".to_string(), "harfbuzz".to_string()); + header_map.insert("png.h".to_string(), "libpng".to_string()); + header_map.insert("jpeglib.h".to_string(), "libjpeg-turbo".to_string()); + + // ── Library files → packages ── + library_map.insert("libz".to_string(), "zlib".to_string()); + library_map.insert("libbz2".to_string(), "bzip2".to_string()); + library_map.insert("liblzma".to_string(), "xz".to_string()); + library_map.insert("libzstd".to_string(), "zstd".to_string()); + library_map.insert("libssl".to_string(), "openssl3".to_string()); + library_map.insert("libcrypto".to_string(), "openssl3".to_string()); + library_map.insert("libcurl".to_string(), "curl".to_string()); + library_map.insert("libexpat".to_string(), "expat".to_string()); + library_map.insert("libffi".to_string(), "libffi".to_string()); + library_map.insert("libpcre2".to_string(), "pcre2".to_string()); + library_map.insert("libncurses".to_string(), "ncurses".to_string()); + library_map.insert("libreadline".to_string(), "readline".to_string()); + library_map.insert("libsqlite3".to_string(), "sqlite3".to_string()); + library_map.insert("libpng".to_string(), "libpng".to_string()); + library_map.insert("libjpeg".to_string(), "libjpeg-turbo".to_string()); + library_map.insert("libfontconfig".to_string(), "fontconfig".to_string()); + library_map.insert("libfreetype".to_string(), "freetype".to_string()); + library_map.insert("libharfbuzz".to_string(), "harfbuzz".to_string()); + library_map.insert("libxml2".to_string(), "libxml2".to_string()); + library_map.insert("libxslt".to_string(), "libxslt".to_string()); + + let mut pkgconfig_map = HashMap::new(); + pkgconfig_map.insert("gtk+-3.0".to_string(), "gtk".to_string()); + pkgconfig_map.insert("gtk4".to_string(), "gtk".to_string()); + pkgconfig_map.insert("glib-2.0".to_string(), "glib".to_string()); + pkgconfig_map.insert("gobject-2.0".to_string(), "glib".to_string()); + pkgconfig_map.insert("gio-2.0".to_string(), "glib".to_string()); + pkgconfig_map.insert("cairo".to_string(), "cairo".to_string()); + pkgconfig_map.insert("pango".to_string(), "pango".to_string()); + pkgconfig_map.insert("atk".to_string(), "atk".to_string()); + pkgconfig_map.insert("gdk-pixbuf-2.0".to_string(), "gdk-pixbuf".to_string()); + pkgconfig_map.insert("libpng".to_string(), "libpng".to_string()); + pkgconfig_map.insert("libjpeg".to_string(), "libjpeg-turbo".to_string()); + pkgconfig_map.insert("freetype2".to_string(), "freetype".to_string()); + pkgconfig_map.insert("fontconfig".to_string(), "fontconfig".to_string()); + pkgconfig_map.insert("harfbuzz".to_string(), "harfbuzz".to_string()); + pkgconfig_map.insert("openssl".to_string(), "openssl3".to_string()); + pkgconfig_map.insert("libcurl".to_string(), "curl".to_string()); + pkgconfig_map.insert("zlib".to_string(), "zlib".to_string()); + pkgconfig_map.insert("bzip2".to_string(), "bzip2".to_string()); + pkgconfig_map.insert("liblzma".to_string(), "xz".to_string()); + pkgconfig_map.insert("libzstd".to_string(), "zstd".to_string()); + pkgconfig_map.insert("expat".to_string(), "expat".to_string()); + pkgconfig_map.insert("libffi".to_string(), "libffi".to_string()); + pkgconfig_map.insert("libpcre2-8".to_string(), "pcre2".to_string()); + pkgconfig_map.insert("ncurses".to_string(), "ncurses".to_string()); + pkgconfig_map.insert("readline".to_string(), "readline".to_string()); + pkgconfig_map.insert("sqlite3".to_string(), "sqlite3".to_string()); + pkgconfig_map.insert("dbus-1".to_string(), "dbus".to_string()); + pkgconfig_map.insert("wayland-client".to_string(), "wayland".to_string()); + pkgconfig_map.insert("wayland-server".to_string(), "wayland".to_string()); + pkgconfig_map.insert("x11".to_string(), "libx11".to_string()); + pkgconfig_map.insert("xcb".to_string(), "libxcb".to_string()); + pkgconfig_map.insert("libxml-2.0".to_string(), "libxml2".to_string()); + pkgconfig_map.insert("libxslt".to_string(), "libxslt".to_string()); + pkgconfig_map.insert("alsa".to_string(), "alsa-lib".to_string()); + pkgconfig_map.insert("libpulse".to_string(), "pulseaudio".to_string()); + + Self { + command_map, + header_map, + library_map, + pkgconfig_map, + } + } + + pub fn scan_build_error(&self, error_output: &str) -> Vec { + let mut resolved = Vec::new(); + + for line in error_output.lines() { + let line_lower = line.to_ascii_lowercase(); + + if let Some(header) = extract_missing_header(&line_lower) { + let base = std::path::Path::new(&header) + .file_name() + .and_then(|n| n.to_str()) + .unwrap_or(&header) + .to_string(); + if let Some(pkg) = self.header_map.get(&base.to_ascii_lowercase()) { + resolved.push(ResolvedDep { + missing: header, + package: pkg.clone(), + kind: DepKind::Header, + }); + } + continue; + } + + if let Some(lib) = extract_missing_library(&line_lower) { + let key = lib.to_ascii_lowercase(); + let pkg = self + .library_map + .get(&key) + .or_else(|| self.library_map.get(&format!("lib{}", key))) + .cloned(); + if let Some(pkg) = pkg { + resolved.push(ResolvedDep { + missing: format!("lib{}", lib), + package: pkg, + kind: DepKind::Library, + }); + } + continue; + } + + if let Some(pc) = extract_missing_pkgconfig(&line_lower) { + let key = pc.to_ascii_lowercase(); + let pkg = self + .command_map + .get(&key) + .or_else(|| self.pkgconfig_map.get(&key)) + .cloned(); + if let Some(pkg) = pkg { + resolved.push(ResolvedDep { + missing: pc, + package: pkg, + kind: DepKind::PkgConfig, + }); + } + continue; + } + + if let Some(cmd) = extract_command_not_found(&line_lower) { + if let Some(pkg) = self.command_map.get(&cmd.to_ascii_lowercase()) { + if !resolved.iter().any(|r: &ResolvedDep| r.missing == cmd) { + resolved.push(ResolvedDep { + missing: cmd, + package: pkg.clone(), + kind: DepKind::Command, + }); + } + } + } + } + + resolved + } +} + +impl Default for DepResolver { + fn default() -> Self { + Self::new() + } +} + +fn extract_command_not_found(line_lower: &str) -> Option { + // "sh: line 1: gcc: command not found" + if line_lower.contains(": command not found") { + if let Some(rest) = line_lower.strip_suffix(": command not found") { + if let Some(cmd) = rest.rsplit(':').next() { + let cmd = cmd.trim(); + if !cmd.is_empty() && cmd.len() < 50 { + return Some(cmd.to_string()); + } + } + } + } + // "make: gcc: No such file or directory" + if line_lower.contains(": no such file or directory") { + let rest = line_lower.replace(": no such file or directory", ""); + if let Some(cmd) = rest.rsplit(':').next() { + let cmd = cmd.trim(); + if !cmd.is_empty() && cmd.len() < 50 { + return Some(cmd.to_string()); + } + } + } + None +} + +fn extract_missing_header(line_lower: &str) -> Option { + // "fatal error: X.h: No such file or directory" + if line_lower.contains("fatal error:") && line_lower.contains("no such file") { + if let Some(after) = line_lower.split("fatal error:").nth(1) { + if let Some(header) = after.split(':').next() { + let h = header.trim(); + if !h.is_empty() { + return Some(h.to_string()); + } + } + } + } + None +} + +fn extract_missing_library(line_lower: &str) -> Option { + if line_lower.contains("cannot find -l") { + for part in line_lower.split_whitespace() { + if part.starts_with("-l") && part.len() > 2 { + let mut lib = part[2..].to_string(); + lib = lib.trim_end_matches(|c: char| !c.is_alphanumeric() && c != '_').to_string(); + if !lib.is_empty() && lib.chars().all(|c| c.is_alphanumeric() || c == '_') { + return Some(lib); + } + } + } + } + None +} + +fn extract_missing_pkgconfig(line_lower: &str) -> Option { + // "No package 'gtk+-3.0' found" + if line_lower.contains("no package '") { + if let Some(after) = line_lower.split("no package '").nth(1) { + if let Some(pkg) = after.split('\'').next() { + let p = pkg.trim(); + if !p.is_empty() { + return Some(p.to_string()); + } + } + } + } + None +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn detects_command_not_found() { + let output = "sh: line 1: gcc: command not found\nmake: *** [all] Error 127"; + let resolver = DepResolver::new(); + let deps = resolver.scan_build_error(output); + assert!(deps.iter().any(|d| d.missing == "gcc" && d.package == "gcc-native")); + } + + #[test] + fn detects_missing_header() { + let output = "src/main.c:3:10: fatal error: zlib.h: No such file or directory"; + let resolver = DepResolver::new(); + let deps = resolver.scan_build_error(output); + assert!(deps.iter().any(|d| d.missing.contains("zlib.h") && d.package == "zlib")); + } + + #[test] + fn detects_missing_library() { + let output = "/usr/bin/ld: cannot find -lz: No such file or directory"; + let resolver = DepResolver::new(); + let deps = resolver.scan_build_error(output); + assert!(deps.iter().any(|d| d.package == "zlib")); + } + + #[test] + fn detects_missing_pkgconfig() { + let output = "Package gtk+-3.0 was not found in the pkg-config search path.\nNo package 'gtk+-3.0' found"; + let resolver = DepResolver::new(); + let deps = resolver.scan_build_error(output); + assert!(deps.iter().any(|d| d.missing == "gtk+-3.0")); + } + + #[test] + fn detects_make_command_not_found() { + let output = "make: cmake: No such file or directory"; + let resolver = DepResolver::new(); + let deps = resolver.scan_build_error(output); + assert!(deps.iter().any(|d| d.missing == "cmake" && d.package == "cmake")); + } +} diff --git a/recipes/tools/diffutils/diffutils.patch b/local/recipes/tools/diffutils/diffutils.patch similarity index 100% rename from recipes/tools/diffutils/diffutils.patch rename to local/recipes/tools/diffutils/diffutils.patch diff --git a/recipes/tools/diffutils/recipe.toml b/local/recipes/tools/diffutils/recipe.toml similarity index 71% rename from recipes/tools/diffutils/recipe.toml rename to local/recipes/tools/diffutils/recipe.toml index fb85db942..aebbdf397 100644 --- a/recipes/tools/diffutils/recipe.toml +++ b/local/recipes/tools/diffutils/recipe.toml @@ -19,7 +19,15 @@ COOKBOOK_CONFIGURE_FLAGS+=( gt_cv_locale_zh_CN=false gl_cv_func_working_mktime=yes ac_cv_func_getprogname=yes + gl_cv_header_working_stddef_h=yes + gl_cv_header_working_stdint_h=yes + gl_cv_header_sys_types_h=yes + ac_cv_header_wchar_h=yes + ac_cv_header_stdio_h=yes ) cookbook_configure rm -f "${COOKBOOK_STAGE}/usr/share/info/dir" """ + +[package] +description = "GNU diffutils (gnulib bypass enabled)" diff --git a/recipes/archives/uutils-tar/recipe.toml b/recipes/archives/uutils-tar/recipe.toml new file mode 100644 index 000000000..256beb9a4 --- /dev/null +++ b/recipes/archives/uutils-tar/recipe.toml @@ -0,0 +1,10 @@ +[source] +git = "https://github.com/uutils/tar" +rev = "f85dddd20e33bb08000b0ed4277067a68a18fee4" +shallow_clone = true + +[build] +template = "cargo" + +[package] +description = "GNU tar-compatible archive utility (Rust uutils implementation)" diff --git a/recipes/core/kernel/recipe.toml b/recipes/core/kernel/recipe.toml index cb98f1b06..455d7ed61 100644 --- a/recipes/core/kernel/recipe.toml +++ b/recipes/core/kernel/recipe.toml @@ -20,7 +20,7 @@ [source] git = "https://gitlab.redox-os.org/redox-os/kernel.git" rev = "866dfad0" -patches = ["../../../local/patches/kernel/redbear-consolidated.patch", "../../../local/patches/kernel/P8-msi.patch", "../../../local/patches/kernel/P2-rebrand-start-message.patch", "P0-eventfd-kernel.patch"] +patches = ["../../../local/patches/kernel/redbear-consolidated.patch", "../../../local/patches/kernel/P8-msi.patch", "../../../local/patches/kernel/P2-rebrand-start-message.patch", "P0-eventfd-kernel.patch", "../../../local/patches/kernel/P1-mkfifo-fifo-support.patch"] [build] template = "custom" diff --git a/recipes/dev/binutils-native b/recipes/dev/binutils-native new file mode 120000 index 000000000..c34b16fa8 --- /dev/null +++ b/recipes/dev/binutils-native @@ -0,0 +1 @@ +../../local/recipes/dev/binutils-native \ No newline at end of file diff --git a/recipes/dev/bison b/recipes/dev/bison new file mode 120000 index 000000000..b3b26baf1 --- /dev/null +++ b/recipes/dev/bison @@ -0,0 +1 @@ +../../local/recipes/dev/bison \ No newline at end of file diff --git a/recipes/dev/flex b/recipes/dev/flex new file mode 120000 index 000000000..000908918 --- /dev/null +++ b/recipes/dev/flex @@ -0,0 +1 @@ +../../local/recipes/dev/flex \ No newline at end of file diff --git a/recipes/dev/gcc-native b/recipes/dev/gcc-native new file mode 120000 index 000000000..4d707f373 --- /dev/null +++ b/recipes/dev/gcc-native @@ -0,0 +1 @@ +../../local/recipes/dev/gcc-native \ No newline at end of file diff --git a/recipes/dev/gnu-make b/recipes/dev/gnu-make new file mode 120000 index 000000000..f27e9cb8c --- /dev/null +++ b/recipes/dev/gnu-make @@ -0,0 +1 @@ +../../local/recipes/dev/gnu-make \ No newline at end of file diff --git a/recipes/dev/libtool b/recipes/dev/libtool new file mode 120000 index 000000000..992fca9e4 --- /dev/null +++ b/recipes/dev/libtool @@ -0,0 +1 @@ +../../local/recipes/dev/libtool \ No newline at end of file diff --git a/recipes/dev/llvm-native b/recipes/dev/llvm-native new file mode 120000 index 000000000..aa7804920 --- /dev/null +++ b/recipes/dev/llvm-native @@ -0,0 +1 @@ +../../local/recipes/dev/llvm-native \ No newline at end of file diff --git a/recipes/dev/m4 b/recipes/dev/m4 new file mode 120000 index 000000000..d45f16640 --- /dev/null +++ b/recipes/dev/m4 @@ -0,0 +1 @@ +../../local/recipes/dev/m4 \ No newline at end of file diff --git a/recipes/dev/meson b/recipes/dev/meson new file mode 120000 index 000000000..535caa6be --- /dev/null +++ b/recipes/dev/meson @@ -0,0 +1 @@ +../../local/recipes/dev/meson \ No newline at end of file diff --git a/recipes/dev/ninja-build b/recipes/dev/ninja-build new file mode 120000 index 000000000..062967b96 --- /dev/null +++ b/recipes/dev/ninja-build @@ -0,0 +1 @@ +../../local/recipes/dev/ninja-build \ No newline at end of file diff --git a/recipes/dev/rust-native b/recipes/dev/rust-native new file mode 120000 index 000000000..3c1963001 --- /dev/null +++ b/recipes/dev/rust-native @@ -0,0 +1 @@ +../../local/recipes/dev/rust-native \ No newline at end of file diff --git a/recipes/tools/diffutils b/recipes/tools/diffutils new file mode 120000 index 000000000..7b1b40d75 --- /dev/null +++ b/recipes/tools/diffutils @@ -0,0 +1 @@ +../../local/recipes/tools/diffutils \ No newline at end of file diff --git a/recipes/wip/doc/texinfo/recipe.toml b/recipes/wip/doc/texinfo/recipe.toml index f8086a213..e1a4ed329 100644 --- a/recipes/wip/doc/texinfo/recipe.toml +++ b/recipes/wip/doc/texinfo/recipe.toml @@ -1,5 +1,13 @@ -#TODO Compilation error [source] tar = "https://ftp.gnu.org/gnu/texinfo/texinfo-7.0.3.tar.xz" + [build] -template = "configure" +template = "custom" +script = """ +DYNAMIC_INIT +COOKBOOK_CONFIGURE_FLAGS+=( + --disable-nls + --disable-perl-xs +) +cookbook_configure +"""