Add CUB package builder and include in all Red Bear OS configs

CUB (Red Bear OS Package Builder) is a Rust CLI tool that combines package management and building:
- RBPKGBUILD parser (TOML format) with full spec support
- Cookbook adapter converting RBPKGBUILD to recipe.toml
- PKGBUILD (Arch AUR) to RBPKGBUILD conversion with Linuxism detection
- Dependency mapping (Arch to Redox names)
- pkgar package creation integration
- Build environment setup with Cookbook env vars
- CLI with pacman-style shortcuts: -S, -Ss, -B, -G, -Pi, -Sua, -Sc, --import-aur

28 cub-lib tests passing. cub-cli compiles with local pkgutils.
Added cub = {} to redbear-desktop, redbear-full, redbear-minimal configs.
Created recipe symlink and updated integrate-redbear.sh.

Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-openagent)

Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>
This commit is contained in:
2026-04-12 23:51:48 +01:00
parent ca13795f06
commit 59d4ba5dcf
19 changed files with 2789 additions and 0 deletions
+3
View File
@@ -15,3 +15,6 @@ redbear-release = {}
# Terminal file manager (Midnight Commander port)
mc = {}
# Package builder (cub -S/-B/-G CLI)
cub = {}
+3
View File
@@ -30,6 +30,9 @@ firmware-loader = {}
evdevd = {}
udev-shim = {}
# Package builder (cub -S/-B/-G CLI)
cub = {}
# RBOS meta-package (dependencies, default config)
redbear-meta = {}
+3
View File
@@ -15,6 +15,9 @@ redbear-release = {}
# Terminal file manager
mc = {}
# Package builder
cub = {}
# Firmware loading
firmware-loader = {}
+9
View File
@@ -0,0 +1,9 @@
[source]
path = "source"
[build]
template = "cargo"
cargopath = "cub-cli"
[package]
dependencies = ["pkgutils"]
@@ -0,0 +1,28 @@
[workspace]
resolver = "2"
members = [
"cub-lib",
"cub-cli",
]
default-members = [
"cub-cli",
]
[workspace.package]
version = "0.1.0"
description = "Red Bear OS Package Builder"
license = "MIT"
authors = ["Red Bear OS Contributors"]
repository = "https://gitlab.redox-os.org/redox-os/redox"
edition = "2021"
[workspace.dependencies]
serde = "1"
serde_derive = "1"
toml = "0.8.2"
thiserror = "2"
clap = { version = "4.3", features = ["cargo", "derive"] }
[patch.crates-io]
ring = { git = "https://gitlab.redox-os.org/redox-os/ring.git", branch = "redox-0.17.8" }
cc-11 = { git = "https://github.com/tea/cc-rs", branch="riscv-abi-arch-fix", package = "cc" }
@@ -0,0 +1,18 @@
[package]
name = "cub-cli"
default-run = "cub"
description = "Red Bear OS Package Builder CLI"
version.workspace = true
edition.workspace = true
license.workspace = true
[[bin]]
name = "cub"
path = "src/main.rs"
[dependencies]
cub-lib = { path = "../cub-lib" }
redox-pkg = { path = "../../../../../../recipes/core/pkgutils/source/pkg-lib", default-features = false, features = ["indicatif"] }
clap = { workspace = true }
termion = "4.0.6"
@@ -0,0 +1,801 @@
use std::cell::RefCell;
use std::env;
use std::ffi::OsString;
use std::fs;
use std::io;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::rc::Rc;
use std::time::{SystemTime, UNIX_EPOCH};
use clap::{Parser, Subcommand};
use cub::converter::{self, ConversionReport, ConversionResult};
use cub::error::CubError;
use cub::rbpkgbuild::RbPkgBuild;
use cub::rbsrcinfo::RbSrcInfo;
use cub::sandbox::SandboxConfig;
use pkg::callback::IndicatifCallback;
use pkg::{Library, PackageName};
const DEFAULT_TARGET: &str = "x86_64-unknown-redox";
const HOST_INSTALL_PATH: &str = "/tmp/pkg_install";
const REDOX_INSTALL_PATH: &str = "/";
const PKG_DOWNLOAD_DIR: &str = "/tmp/pkg_download/";
const CUB_CACHE_DIR: &str = "/tmp/cub_cache/";
const DEFAULT_BUR_REPO_URL: &str = "https://gitlab.redox-os.org/redox-os/bur.git";
const DEFAULT_AUR_BASE_URL: &str = "https://aur.archlinux.org";
const PUBLIC_KEY_FILE: &str = "id_ed25519.pub.toml";
const DEFAULT_SECRET_KEY_FILE: &str = "id_ed25519.toml";
struct CookbookAdapter;
impl CookbookAdapter {
fn write_recipe_dir(rbpkg: &RbPkgBuild, recipe_dir: &Path) -> Result<(), CubError> {
fs::create_dir_all(recipe_dir)?;
let recipe = cub::cookbook::generate_recipe(rbpkg)?;
fs::write(recipe_dir.join("recipe.toml"), recipe)?;
Ok(())
}
}
struct PkgbuildConverter;
impl PkgbuildConverter {
fn convert(content: &str) -> Result<ConversionResult, CubError> {
converter::convert_pkgbuild(content)
}
}
struct PackageCreator;
impl PackageCreator {
fn create_pkgar(
stage_dir: &Path,
output_path: &Path,
secret_key_path: &Path,
) -> Result<(), CubError> {
cub::package::PackageCreator::create_from_stage(stage_dir, output_path, secret_key_path)
}
fn generate_package_toml(rbpkg: &RbPkgBuild) -> String {
cub::package::PackageCreator::generate_package_toml(rbpkg)
}
}
#[derive(Debug, Parser)]
#[command(name = "cub")]
#[command(version)]
#[command(about = "Red Bear OS Package Builder")]
#[command(arg_required_else_help = true)]
struct Cli {
#[command(subcommand)]
command: Commands,
}
#[derive(Debug, Subcommand)]
enum Commands {
/// Install a package from the official repo or BUR
Install { package: String },
/// Search packages in the official repo and cached BUR
Search { query: String },
/// Build and install a local RBPKGBUILD directory
Build { dir: String },
/// Fetch a BUR recipe into the current directory
Get { package: String },
/// Inspect an installed package or local RBPKGBUILD
Inspect { target: String },
/// Convert an AUR PKGBUILD into an RBPKGBUILD tree
ImportAur { target: String },
/// Update all installed packages
UpdateAll,
/// Remove cub and pkg download caches
CleanCache,
}
struct AppContext {
install_path: PathBuf,
target: String,
}
impl AppContext {
fn new() -> Self {
let install_path = if cfg!(target_os = "redox") {
PathBuf::from(REDOX_INSTALL_PATH)
} else {
PathBuf::from(HOST_INSTALL_PATH)
};
let target = if cfg!(target_os = "redox") {
env::var("TARGET").unwrap_or_else(|_| DEFAULT_TARGET.to_string())
} else {
DEFAULT_TARGET.to_string()
};
Self {
install_path,
target,
}
}
fn open_library(&self) -> Result<Library, pkg::backend::Error> {
let callback = new_pkg_callback();
Library::new(&self.install_path, &self.target, callback)
}
fn open_local_library(
&self,
source_dir: &Path,
pubkey_dir: &Path,
) -> Result<Library, pkg::backend::Error> {
let callback = new_pkg_callback();
Library::new_local(
source_dir,
pubkey_dir,
&self.install_path,
&self.target,
callback,
)
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = rewrite_shortcut_args(env::args_os())?;
let cli = Cli::parse_from(args);
let context = AppContext::new();
match cli.command {
Commands::Install { package } => install_package(&context, &package)?,
Commands::Search { query } => search_packages(&context, &query)?,
Commands::Build { dir } => build_local_dir(&context, Path::new(&dir))?,
Commands::Get { package } => fetch_bur_recipe(&package)?,
Commands::Inspect { target } => inspect_target(&context, &target)?,
Commands::ImportAur { target } => import_aur_target(&target)?,
Commands::UpdateAll => update_all(&context)?,
Commands::CleanCache => clean_cache()?,
}
Ok(())
}
fn rewrite_shortcut_args(
args: impl IntoIterator<Item = OsString>,
) -> Result<Vec<OsString>, Box<dyn std::error::Error>> {
let collected: Vec<OsString> = args.into_iter().collect();
if collected.len() <= 1 {
return Ok(collected);
}
let binary = collected[0].clone();
let rest = &collected[1..];
let Some(flag) = rest.first().and_then(|value| value.to_str()) else {
return Ok(collected);
};
match flag {
"-S" => rewrite_value_command(binary, rest, "install", "package"),
"-Ss" => rewrite_value_command(binary, rest, "search", "query"),
"-B" => rewrite_value_command(binary, rest, "build", "dir"),
"-G" => rewrite_value_command(binary, rest, "get", "package"),
"-Pi" => rewrite_value_command(binary, rest, "inspect", "target"),
"--import-aur" => rewrite_value_command(binary, rest, "import-aur", "target"),
"-Sua" => rewrite_flag_command(binary, rest, "update-all"),
"-Sc" => rewrite_flag_command(binary, rest, "clean-cache"),
_ => Ok(collected),
}
}
fn rewrite_value_command(
binary: OsString,
rest: &[OsString],
subcommand: &str,
value_name: &str,
) -> Result<Vec<OsString>, Box<dyn std::error::Error>> {
let Some(value) = rest.get(1) else {
return Err(io::Error::new(
io::ErrorKind::InvalidInput,
format!("missing {value_name} for {}", rest[0].to_string_lossy()),
)
.into());
};
let mut rewritten = vec![binary, OsString::from(subcommand), value.clone()];
rewritten.extend(rest.iter().skip(2).cloned());
Ok(rewritten)
}
fn rewrite_flag_command(
binary: OsString,
rest: &[OsString],
subcommand: &str,
) -> Result<Vec<OsString>, Box<dyn std::error::Error>> {
let mut rewritten = vec![binary, OsString::from(subcommand)];
rewritten.extend(rest.iter().skip(1).cloned());
Ok(rewritten)
}
fn new_pkg_callback() -> Rc<RefCell<IndicatifCallback>> {
let mut callback = IndicatifCallback::new();
callback.set_interactive(true);
Rc::new(RefCell::new(callback))
}
fn install_package(context: &AppContext, package: &str) -> Result<(), Box<dyn std::error::Error>> {
let package_name = PackageName::new(package.to_string())?;
let mut library = context.open_library()?;
match library.install(vec![package_name.clone()], false) {
Ok(()) => {
let applied = apply_library_changes(&mut library)?;
println!(
"Installed {} from the official repository ({} change(s)).",
package, applied
);
Ok(())
}
Err(pkg::backend::Error::PackageNotFound(_)) => {
println!(
"{} was not found in the official repository. Trying BUR...",
package
);
let bur_dir = ensure_bur_package_dir(package)?;
build_local_dir(context, &bur_dir)
}
Err(error) => Err(Box::new(error)),
}
}
fn search_packages(context: &AppContext, query: &str) -> Result<(), Box<dyn std::error::Error>> {
let mut library = context.open_library()?;
let official_matches = library.search(query)?;
let bur_matches = search_cached_bur(query)?;
if official_matches.is_empty() {
println!("Official repo: no matches for {query:?}");
} else {
println!("Official repo:");
for (name, score) in official_matches {
println!(" {} ({score:.2})", name);
}
}
if bur_matches.is_empty() {
println!("Cached BUR: no matches for {query:?}");
} else {
println!("Cached BUR:");
for entry in bur_matches {
if let Some(description) = &entry.description {
println!(" {} - {}", entry.name, description);
} else {
println!(" {}", entry.name);
}
}
}
Ok(())
}
fn build_local_dir(context: &AppContext, dir: &Path) -> Result<(), Box<dyn std::error::Error>> {
let rbpkg_path = dir.join("RBPKGBUILD");
let rbpkg = RbPkgBuild::from_file(&rbpkg_path)?;
rbpkg.validate()?;
let work_dir = create_temp_dir("cub-build")?;
let recipe_dir = work_dir.join(&rbpkg.package.name);
CookbookAdapter::write_recipe_dir(&rbpkg, &recipe_dir)?;
let sandbox = SandboxConfig::new(&work_dir);
sandbox.setup()?;
let mut command = Command::new("repo");
command.arg("cook");
command.arg(&recipe_dir);
command.envs(sandbox.env_vars());
let status = command.status()?;
if !status.success() {
return Err(Box::new(CubError::BuildFailed(format!(
"repo cook {} failed with status {status}",
recipe_dir.display()
))));
}
let stage_dir = find_stage_dir(&sandbox, &work_dir)?;
let secret_key_path = resolve_secret_key_path()?;
let public_key_dir = resolve_public_key_dir(&secret_key_path)?;
let local_repo_dir = work_dir.join("local-repo");
let target_repo_dir = local_repo_dir.join(&context.target);
fs::create_dir_all(&target_repo_dir)?;
let pkgar_path = target_repo_dir.join(format!("{}.pkgar", rbpkg.package.name));
PackageCreator::create_pkgar(&stage_dir, &pkgar_path, &secret_key_path)?;
let package_toml_path = target_repo_dir.join(format!("{}.toml", rbpkg.package.name));
fs::write(
package_toml_path,
PackageCreator::generate_package_toml(&rbpkg),
)?;
let package_name = PackageName::new(rbpkg.package.name.clone())?;
let mut library = context.open_local_library(&local_repo_dir, &public_key_dir)?;
library.install(vec![package_name], false)?;
let applied = apply_library_changes(&mut library)?;
println!(
"Built and installed {} successfully ({} change(s)).",
rbpkg.package.name, applied
);
Ok(())
}
fn fetch_bur_recipe(package: &str) -> Result<(), Box<dyn std::error::Error>> {
let source_dir = ensure_bur_package_dir(package)?;
let destination = env::current_dir()?.join(package);
if destination.exists() {
return Err(io::Error::new(
io::ErrorKind::AlreadyExists,
format!("destination already exists: {}", destination.display()),
)
.into());
}
copy_dir_recursive(&source_dir, &destination)?;
println!(
"Fetched BUR recipe {} to {}.",
package,
destination.display()
);
Ok(())
}
fn inspect_target(context: &AppContext, target: &str) -> Result<(), Box<dyn std::error::Error>> {
let path = Path::new(target);
if path.exists() {
inspect_rbpkgbuild_path(path)?;
return Ok(());
}
let mut library = context.open_library()?;
let info = library.info(PackageName::new(target.to_string())?)?;
println!("{info:#?}");
Ok(())
}
fn import_aur_target(target: &str) -> Result<(), Box<dyn std::error::Error>> {
let repo_url = aur_repo_url(target);
let clone_dir = create_temp_dir("cub-aur")?;
let status = Command::new("git")
.arg("clone")
.arg("--depth")
.arg("1")
.arg(&repo_url)
.arg(&clone_dir)
.status()?;
if !status.success() {
return Err(Box::new(CubError::BuildFailed(format!(
"failed to clone AUR source from {repo_url}"
))));
}
let pkgbuild_path = clone_dir.join("PKGBUILD");
let pkgbuild = fs::read_to_string(&pkgbuild_path)?;
let conversion = PkgbuildConverter::convert(&pkgbuild)?;
let output_dir = env::current_dir()?.join(&conversion.rbpkg.package.name);
fs::create_dir_all(&output_dir)?;
fs::create_dir_all(output_dir.join("patches"))?;
fs::create_dir_all(output_dir.join("import"))?;
fs::write(output_dir.join("RBPKGBUILD"), conversion.rbpkg.to_toml()?)?;
fs::write(
output_dir.join(".RBSRCINFO"),
RbSrcInfo::from_rbpkgbuild(&conversion.rbpkg).to_string(),
)?;
fs::write(output_dir.join("import").join("PKGBUILD"), pkgbuild)?;
let report = render_conversion_report(&conversion.report);
fs::write(output_dir.join("import").join("report.txt"), &report)?;
println!("Imported AUR package into {}", output_dir.display());
println!("{report}");
Ok(())
}
fn update_all(context: &AppContext) -> Result<(), Box<dyn std::error::Error>> {
let mut library = context.open_library()?;
library.update(Vec::new())?;
let applied = apply_library_changes(&mut library)?;
println!("Updated installed packages ({} change(s)).", applied);
Ok(())
}
fn clean_cache() -> Result<(), Box<dyn std::error::Error>> {
remove_dir_if_exists(Path::new(PKG_DOWNLOAD_DIR))?;
remove_dir_if_exists(Path::new(CUB_CACHE_DIR))?;
println!("Removed package caches from {PKG_DOWNLOAD_DIR} and {CUB_CACHE_DIR}.");
Ok(())
}
fn apply_library_changes(library: &mut Library) -> Result<usize, Box<dyn std::error::Error>> {
match library.apply() {
Ok(changes) => Ok(changes),
Err(error) => {
if let Err(abort_error) = library.abort() {
eprintln!("Failed to abort package transaction: {abort_error}");
}
Err(Box::new(error))
}
}
}
fn inspect_rbpkgbuild_path(path: &Path) -> Result<(), Box<dyn std::error::Error>> {
let rbpkg_path = if path.is_dir() {
path.join("RBPKGBUILD")
} else {
path.to_path_buf()
};
let rbpkg = RbPkgBuild::from_file(&rbpkg_path)?;
println!("Package:");
println!(" name = {}", rbpkg.package.name);
println!(" version = {}", rbpkg.package.version);
println!(" release = {}", rbpkg.package.release);
println!(" description = {}", rbpkg.package.description);
println!(" homepage = {}", rbpkg.package.homepage);
println!(" license = {:?}", rbpkg.package.license);
println!(" architectures = {:?}", rbpkg.package.architectures);
println!(" maintainers = {:?}", rbpkg.package.maintainers);
println!("Source:");
for source in &rbpkg.source.sources {
println!(
" {:?}: url={}, rev={}, branch={}, sha256={}",
source.source_type, source.url, source.rev, source.branch, source.sha256
);
}
println!("Dependencies:");
println!(" build = {:?}", rbpkg.dependencies.build);
println!(" runtime = {:?}", rbpkg.dependencies.runtime);
println!(" check = {:?}", rbpkg.dependencies.check);
println!(" optional = {:?}", rbpkg.dependencies.optional);
println!(" provides = {:?}", rbpkg.dependencies.provides);
println!(" conflicts = {:?}", rbpkg.dependencies.conflicts);
println!("Build:");
println!(" template = {:?}", rbpkg.build.template);
println!(" release = {}", rbpkg.build.release);
println!(" features = {:?}", rbpkg.build.features);
println!(" args = {:?}", rbpkg.build.args);
println!(" build_dir = {}", rbpkg.build.build_dir);
println!(" prepare = {:?}", rbpkg.build.prepare);
println!(" build_script = {:?}", rbpkg.build.build_script);
println!(" check = {:?}", rbpkg.build.check);
println!(" install_script = {:?}", rbpkg.build.install_script);
println!("Install:");
println!(" bins = {:?}", rbpkg.install.bins);
println!(" libs = {:?}", rbpkg.install.libs);
println!(" headers = {:?}", rbpkg.install.headers);
println!(" docs = {:?}", rbpkg.install.docs);
println!(" man = {:?}", rbpkg.install.man);
println!("Patches:");
println!(" files = {:?}", rbpkg.patches.files);
println!("Compat:");
println!(" imported_from = {}", rbpkg.compat.imported_from);
println!(" conversion_status = {:?}", rbpkg.compat.conversion_status);
println!(" target = {}", rbpkg.compat.target);
println!("Policy:");
println!(" allow_network = {}", rbpkg.policy.allow_network);
println!(" allow_tests = {}", rbpkg.policy.allow_tests);
println!(" review_required = {}", rbpkg.policy.review_required);
println!("Generated .RBSRCINFO:");
println!("{}", rbpkg.to_srcinfo().to_string());
Ok(())
}
struct BurMatch {
name: String,
description: Option<String>,
}
fn search_cached_bur(query: &str) -> Result<Vec<BurMatch>, Box<dyn std::error::Error>> {
let repo_dir = bur_repo_dir();
if !repo_dir.exists() {
return Ok(Vec::new());
}
let mut matches = Vec::new();
let lowered_query = query.to_ascii_lowercase();
for entry in fs::read_dir(repo_dir)? {
let entry = entry?;
let path = entry.path();
if !path.is_dir() {
continue;
}
let Some(name) = path.file_name().and_then(|value| value.to_str()) else {
continue;
};
if name == ".git" {
continue;
}
let rbpkg_path = path.join("RBPKGBUILD");
let mut description = None;
let mut matched = name.to_ascii_lowercase().contains(&lowered_query);
if rbpkg_path.is_file() {
if let Ok(pkg) = RbPkgBuild::from_file(&rbpkg_path) {
if pkg
.package
.name
.to_ascii_lowercase()
.contains(&lowered_query)
|| pkg
.package
.description
.to_ascii_lowercase()
.contains(&lowered_query)
{
matched = true;
}
if !pkg.package.description.trim().is_empty() {
description = Some(pkg.package.description);
}
}
}
if matched {
matches.push(BurMatch {
name: name.to_string(),
description,
});
}
}
matches.sort_by(|left, right| left.name.cmp(&right.name));
Ok(matches)
}
fn ensure_bur_package_dir(package: &str) -> Result<PathBuf, Box<dyn std::error::Error>> {
let repo_dir = sync_bur_repo()?;
let package_dir = repo_dir.join(package);
if package_dir.is_dir() {
Ok(package_dir)
} else {
Err(Box::new(CubError::PackageNotFound(format!(
"{package} not found in BUR cache {}",
repo_dir.display()
))))
}
}
fn sync_bur_repo() -> Result<PathBuf, Box<dyn std::error::Error>> {
let repo_dir = bur_repo_dir();
let parent = repo_dir
.parent()
.ok_or_else(|| io::Error::new(io::ErrorKind::InvalidInput, "invalid BUR cache path"))?;
fs::create_dir_all(parent)?;
if repo_dir.join(".git").is_dir() {
let status = Command::new("git")
.arg("pull")
.arg("--ff-only")
.current_dir(&repo_dir)
.status()?;
if !status.success() {
return Err(Box::new(CubError::BuildFailed(format!(
"failed to update BUR cache at {}",
repo_dir.display()
))));
}
} else {
let status = Command::new("git")
.arg("clone")
.arg(default_bur_repo_url())
.arg(&repo_dir)
.status()?;
if !status.success() {
return Err(Box::new(CubError::BuildFailed(format!(
"failed to clone BUR repository into {}",
repo_dir.display()
))));
}
}
Ok(repo_dir)
}
fn default_bur_repo_url() -> String {
env::var("CUB_BUR_REPO_URL").unwrap_or_else(|_| DEFAULT_BUR_REPO_URL.to_string())
}
fn bur_repo_dir() -> PathBuf {
PathBuf::from(CUB_CACHE_DIR).join("bur")
}
fn aur_repo_url(target: &str) -> String {
if target.contains("://") || target.ends_with(".git") {
target.to_string()
} else {
format!("{DEFAULT_AUR_BASE_URL}/{}.git", target)
}
}
fn resolve_secret_key_path() -> Result<PathBuf, Box<dyn std::error::Error>> {
if let Some(path) = env::var_os("CUB_PKGAR_SECRET_KEY") {
let candidate = PathBuf::from(path);
if candidate.is_file() {
return Ok(candidate);
}
}
let home = env::var_os("HOME").map(PathBuf::from);
let candidates = [
home.as_ref()
.map(|path| path.join(".pkg").join(DEFAULT_SECRET_KEY_FILE)),
Some(PathBuf::from("/etc/pkg").join(DEFAULT_SECRET_KEY_FILE)),
Some(PathBuf::from("/pkg").join(DEFAULT_SECRET_KEY_FILE)),
Some(env::current_dir()?.join(DEFAULT_SECRET_KEY_FILE)),
];
for candidate in candidates.into_iter().flatten() {
if candidate.is_file() {
return Ok(candidate);
}
}
Err(Box::new(CubError::BuildFailed(
"could not locate a pkgar secret key; set CUB_PKGAR_SECRET_KEY".to_string(),
)))
}
fn resolve_public_key_dir(secret_key_path: &Path) -> Result<PathBuf, Box<dyn std::error::Error>> {
if let Some(path) = env::var_os("CUB_PKGAR_PUBKEY_DIR") {
let candidate = PathBuf::from(path);
if candidate.join(PUBLIC_KEY_FILE).is_file() {
return Ok(candidate);
}
}
let Some(parent) = secret_key_path.parent() else {
return Err(Box::new(CubError::BuildFailed(format!(
"could not determine public key directory for {}",
secret_key_path.display()
))));
};
if parent.join(PUBLIC_KEY_FILE).is_file() {
Ok(parent.to_path_buf())
} else {
Err(Box::new(CubError::BuildFailed(format!(
"missing {} in {}",
PUBLIC_KEY_FILE,
parent.display()
))))
}
}
fn find_stage_dir(
sandbox: &SandboxConfig,
search_root: &Path,
) -> Result<PathBuf, Box<dyn std::error::Error>> {
let direct_candidates = [
sandbox.stage_dir.clone(),
sandbox.destdir.clone(),
search_root.join("stage"),
search_root.join("destdir"),
];
for candidate in direct_candidates {
if directory_has_entries(&candidate)? {
return Ok(candidate);
}
}
let mut stack = vec![search_root.to_path_buf()];
while let Some(dir) = stack.pop() {
for entry in fs::read_dir(&dir)? {
let entry = entry?;
let path = entry.path();
if !path.is_dir() {
continue;
}
let Some(name) = path.file_name().and_then(|value| value.to_str()) else {
continue;
};
if matches!(name, "stage" | "destdir") && directory_has_entries(&path)? {
return Ok(path);
}
stack.push(path);
}
}
Err(Box::new(CubError::BuildFailed(format!(
"unable to locate a populated stage directory under {}",
search_root.display()
))))
}
fn directory_has_entries(path: &Path) -> Result<bool, io::Error> {
if !path.is_dir() {
return Ok(false);
}
Ok(fs::read_dir(path)?.next().transpose()?.is_some())
}
fn render_conversion_report(report: &ConversionReport) -> String {
let mut output = String::new();
output.push_str(&format!("Conversion: {:?}\n", report.status));
if !report.warnings.is_empty() {
output.push_str("\nWarnings:\n");
for warning in &report.warnings {
output.push_str(&format!("- {warning}\n"));
}
}
if !report.actions_required.is_empty() {
output.push_str("\nActions required:\n");
for action in &report.actions_required {
output.push_str(&format!("- {action}\n"));
}
}
output
}
fn create_temp_dir(prefix: &str) -> Result<PathBuf, Box<dyn std::error::Error>> {
let base = env::temp_dir();
let nanos = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|duration| duration.as_nanos())
.unwrap_or(0);
for attempt in 0..128 {
let candidate = base.join(format!("{prefix}-{}-{nanos}-{attempt}", std::process::id()));
if !candidate.exists() {
fs::create_dir_all(&candidate)?;
return Ok(candidate);
}
}
Err(io::Error::new(
io::ErrorKind::AlreadyExists,
format!("failed to allocate temporary directory for {prefix}"),
)
.into())
}
fn copy_dir_recursive(src: &Path, dst: &Path) -> Result<(), Box<dyn std::error::Error>> {
fs::create_dir_all(dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let entry_path = entry.path();
let destination_path = dst.join(entry.file_name());
if entry_path.is_dir() {
copy_dir_recursive(&entry_path, &destination_path)?;
} else {
fs::copy(&entry_path, &destination_path)?;
}
}
Ok(())
}
fn remove_dir_if_exists(path: &Path) -> Result<(), io::Error> {
if path.exists() {
fs::remove_dir_all(path)?;
}
Ok(())
}
@@ -0,0 +1,33 @@
[package]
name = "cub-lib"
description = "Red Bear OS Package Builder Library"
version.workspace = true
edition.workspace = true
license.workspace = true
[lib]
name = "cub"
doctest = false
[dependencies]
serde = { workspace = true }
serde_derive = { workspace = true }
toml = { workspace = true }
thiserror = { workspace = true }
hex = "0.4"
blake3 = "1"
walkdir = "2"
tempfile = "3"
# pkgar integration for package creation
pkgar = { version = "0.2.2", optional = true }
pkgar-core = { version = "0.2.2", optional = true }
pkgar-keys = { version = "0.2.2", optional = true }
# HTTP for source fetching
reqwest = { version = "0.12", default-features = false, features = ["blocking", "rustls-tls"], optional = true }
[features]
default = ["full"]
full = ["pkgar", "pkgar-core", "pkgar-keys", "reqwest"]
@@ -0,0 +1,463 @@
use crate::deps::map_dependency;
use crate::error::CubError;
use crate::rbpkgbuild::{
BuildSection, BuildTemplate, CompatSection, ConversionStatus, DependenciesSection,
InstallSection, PackageSection, PatchesSection, PolicySection, RbPkgBuild, SourceEntry,
SourceSection, SourceType,
};
pub struct ConversionResult {
pub rbpkg: RbPkgBuild,
pub report: ConversionReport,
}
pub struct ConversionReport {
pub status: ConversionStatus,
pub warnings: Vec<String>,
pub actions_required: Vec<String>,
}
pub fn convert_pkgbuild(content: &str) -> Result<ConversionResult, CubError> {
let pkgname = extract_scalar_assignment(content, "pkgname")
.ok_or_else(|| CubError::Conversion("missing pkgname in PKGBUILD".to_string()))?;
let pkgver = extract_scalar_assignment(content, "pkgver")
.ok_or_else(|| CubError::Conversion("missing pkgver in PKGBUILD".to_string()))?;
let pkgrel = extract_scalar_assignment(content, "pkgrel")
.and_then(|value| value.parse::<u32>().ok())
.unwrap_or(1);
let pkgdesc = extract_scalar_assignment(content, "pkgdesc").unwrap_or_default();
let url = extract_scalar_assignment(content, "url").unwrap_or_default();
let licenses = extract_array_assignment(content, "license").unwrap_or_default();
let depends = extract_array_assignment(content, "depends").unwrap_or_default();
let makedepends = extract_array_assignment(content, "makedepends").unwrap_or_default();
let checkdepends = extract_array_assignment(content, "checkdepends").unwrap_or_default();
let sources = extract_array_assignment(content, "source").unwrap_or_default();
let sha256sums = extract_array_assignment(content, "sha256sums").unwrap_or_default();
let template = detect_build_template(content);
let mut warnings = detect_linuxisms(content);
let mut actions_required = Vec::new();
let mapped_runtime = map_dep_list(&depends, &mut warnings, &mut actions_required);
let mapped_build = map_dep_list(&makedepends, &mut warnings, &mut actions_required);
let mapped_check = map_dep_list(&checkdepends, &mut warnings, &mut actions_required);
if sources.is_empty() {
warnings.push("PKGBUILD does not define any source entries".to_string());
}
let status = if warnings.is_empty() && actions_required.is_empty() {
ConversionStatus::Full
} else {
ConversionStatus::Partial
};
let rbpkg = RbPkgBuild {
format: 1,
package: PackageSection {
name: sanitize_pkgname(&pkgname),
version: pkgver,
release: pkgrel,
description: pkgdesc,
homepage: url,
license: licenses,
architectures: vec!["x86_64-unknown-redox".to_string()],
maintainers: Vec::new(),
},
source: SourceSection {
sources: sources
.into_iter()
.enumerate()
.map(|(index, source)| {
source_from_arch(source, sha256sums.get(index).map(String::as_str))
})
.collect(),
},
dependencies: DependenciesSection {
build: mapped_build,
runtime: mapped_runtime,
check: mapped_check,
optional: Vec::new(),
provides: Vec::new(),
conflicts: Vec::new(),
},
build: BuildSection {
template,
..BuildSection::default()
},
install: InstallSection::default(),
patches: PatchesSection::default(),
compat: CompatSection {
imported_from: "aur".to_string(),
original_pkgbuild: content.to_string(),
conversion_status: status.clone(),
target: "x86_64-unknown-redox".to_string(),
},
policy: PolicySection::default(),
};
rbpkg.validate()?;
let _ = rbpkg.to_srcinfo();
Ok(ConversionResult {
rbpkg,
report: ConversionReport {
status,
warnings,
actions_required,
},
})
}
fn map_dep_list(
deps: &[String],
warnings: &mut Vec<String>,
actions_required: &mut Vec<String>,
) -> Vec<String> {
let mut mapped = Vec::new();
for dep in deps {
let mapping = map_dependency(dep);
if mapping.mapped.is_empty() {
warnings.push(format!(
"dependency '{}' has no Redox mapping and was omitted",
mapping.original
));
actions_required.push(format!(
"port or replace dependency '{}' manually",
mapping.original
));
continue;
}
if !mapping.is_exact {
warnings.push(format!(
"dependency '{}' mapped to '{}'",
mapping.original, mapping.mapped
));
}
if !mapped.contains(&mapping.mapped) {
mapped.push(mapping.mapped);
}
}
mapped
}
fn detect_build_template(content: &str) -> BuildTemplate {
let lowered = content.to_ascii_lowercase();
if lowered.contains("cargo build") || lowered.contains("cargo install") {
BuildTemplate::Cargo
} else if lowered.contains("meson setup") || lowered.contains(" meson ") {
BuildTemplate::Meson
} else if lowered.contains("cmake") {
BuildTemplate::Cmake
} else if lowered.contains("./configure") || lowered.contains(" configure ") {
BuildTemplate::Configure
} else {
BuildTemplate::Custom
}
}
fn detect_linuxisms(content: &str) -> Vec<String> {
let lowered = content.to_ascii_lowercase();
let checks = [
(
"systemctl",
"uses systemctl, which is not available on Redox",
),
(
"/usr/lib/systemd",
"references /usr/lib/systemd, which is Linux-specific",
),
(
"systemd",
"references systemd, which is unavailable on Redox",
),
(
"/proc",
"references /proc, which may require Redox-specific adaptation",
),
];
let mut warnings = Vec::new();
for (needle, warning) in checks {
if lowered.contains(needle) {
warnings.push(warning.to_string());
}
}
warnings
}
fn sanitize_pkgname(name: &str) -> String {
name.trim_matches('"')
.to_ascii_lowercase()
.replace('_', "-")
}
fn source_from_arch(entry: String, sha256: Option<&str>) -> SourceEntry {
let normalized = normalize_source_entry(&entry);
let source_type = if normalized.starts_with("git+")
|| normalized.starts_with("git://")
|| normalized.ends_with(".git")
{
SourceType::Git
} else {
SourceType::Tar
};
SourceEntry {
sha256: if matches!(source_type, SourceType::Tar) {
sha256.unwrap_or_default().to_string()
} else {
String::new()
},
url: normalized,
source_type,
rev: String::new(),
branch: String::new(),
}
}
fn normalize_source_entry(entry: &str) -> String {
let stripped = entry
.split_once("::")
.map(|(_, value)| value)
.unwrap_or(entry)
.trim();
stripped
.strip_prefix("git+")
.unwrap_or(stripped)
.to_string()
}
fn extract_scalar_assignment(content: &str, name: &str) -> Option<String> {
extract_assignment(content, name).map(|raw| parse_scalar(&raw))
}
fn extract_array_assignment(content: &str, name: &str) -> Option<Vec<String>> {
extract_assignment(content, name).map(|raw| parse_array(&raw))
}
fn extract_assignment(content: &str, name: &str) -> Option<String> {
let prefix = format!("{name}=");
let mut lines = content.lines();
while let Some(line) = lines.next() {
let trimmed = line.trim_start();
if !trimmed.starts_with(&prefix) {
continue;
}
let mut value = trimmed[prefix.len()..].trim().to_string();
if value.starts_with('(') {
let mut depth = paren_balance(&value);
while depth > 0 {
let Some(next) = lines.next() else {
break;
};
value.push('\n');
value.push_str(next.trim());
depth += paren_balance(next);
}
} else {
while value.ends_with('\\') {
value.pop();
let Some(next) = lines.next() else {
break;
};
value.push(' ');
value.push_str(next.trim());
}
}
return Some(value);
}
None
}
fn paren_balance(input: &str) -> i32 {
let opens = input.chars().filter(|ch| *ch == '(').count() as i32;
let closes = input.chars().filter(|ch| *ch == ')').count() as i32;
opens - closes
}
fn parse_scalar(raw: &str) -> String {
let binding = strip_unquoted_comment(raw);
let stripped = binding.trim();
if let Some(unquoted) = unquote(stripped) {
unquoted
} else {
stripped.to_string()
}
}
fn parse_array(raw: &str) -> Vec<String> {
let binding = strip_unquoted_comment(raw);
let trimmed = binding.trim();
let inner = trimmed
.strip_prefix('(')
.and_then(|value| value.strip_suffix(')'))
.unwrap_or(trimmed);
shell_split(inner)
}
fn strip_unquoted_comment(input: &str) -> String {
let mut single = false;
let mut double = false;
let mut result = String::new();
for ch in input.chars() {
match ch {
'\'' if !double => {
single = !single;
result.push(ch);
}
'"' if !single => {
double = !double;
result.push(ch);
}
'#' if !single && !double => break,
_ => result.push(ch),
}
}
result
}
fn unquote(value: &str) -> Option<String> {
if value.len() >= 2 {
let bytes = value.as_bytes();
let first = bytes[0] as char;
let last = bytes[value.len() - 1] as char;
if (first == '\'' && last == '\'') || (first == '"' && last == '"') {
return Some(value[1..value.len() - 1].to_string());
}
}
None
}
fn shell_split(input: &str) -> Vec<String> {
let mut items = Vec::new();
let mut current = String::new();
let mut quote: Option<char> = None;
let mut escape = false;
for ch in input.chars() {
if escape {
current.push(ch);
escape = false;
continue;
}
match ch {
'\\' => escape = true,
'\'' | '"' => {
if quote == Some(ch) {
quote = None;
} else if quote.is_none() {
quote = Some(ch);
} else {
current.push(ch);
}
}
'#' if quote.is_none() => break,
ch if ch.is_whitespace() && quote.is_none() => {
if !current.is_empty() {
items.push(current.clone());
current.clear();
}
}
_ => current.push(ch),
}
}
if !current.is_empty() {
items.push(current);
}
items
}
#[cfg(test)]
mod tests {
use super::*;
const PKGBUILD: &str = r#"
pkgname=demo_pkg
pkgver=1.2.3
pkgrel=4
pkgdesc="Demo application"
url="https://example.com/demo"
license=('MIT')
depends=('glibc' 'openssl>=1.1' 'systemd')
makedepends=('cargo' 'pkg-config')
checkdepends=('python')
source=('https://example.com/demo-1.2.3.tar.xz')
sha256sums=('abc123deadbeef')
build() {
cargo build --release
}
package() {
install -Dm755 target/release/demo "$pkgdir/usr/bin/demo"
systemctl --version >/dev/null
}
"#;
#[test]
fn converts_pkgbuild_to_rbpkgbuild() {
let result = convert_pkgbuild(PKGBUILD).expect("convert PKGBUILD");
assert_eq!(result.rbpkg.package.name, "demo-pkg");
assert_eq!(result.rbpkg.package.version, "1.2.3");
assert_eq!(result.rbpkg.package.release, 4);
assert_eq!(result.rbpkg.build.template, BuildTemplate::Cargo);
assert_eq!(
result.rbpkg.dependencies.runtime,
vec!["relibc", "openssl3"]
);
assert_eq!(result.rbpkg.dependencies.build, vec!["cargo", "pkg-config"]);
assert_eq!(result.rbpkg.dependencies.check, vec!["python"]);
assert_eq!(result.rbpkg.source.sources.len(), 1);
assert_eq!(result.rbpkg.source.sources[0].sha256, "abc123deadbeef");
}
#[test]
fn reports_linuxisms_and_unmapped_deps() {
let result = convert_pkgbuild(PKGBUILD).expect("convert PKGBUILD");
assert!(matches!(result.report.status, ConversionStatus::Partial));
assert!(result
.report
.warnings
.iter()
.any(|w| w.contains("systemctl")));
assert!(result
.report
.actions_required
.iter()
.any(|w| w.contains("systemd")));
}
#[test]
fn parses_multiline_arrays() {
let input = "depends=(\n 'glibc'\n 'zlib'\n)\n";
let parsed = extract_array_assignment(input, "depends").expect("depends array");
assert_eq!(parsed, vec!["glibc", "zlib"]);
}
#[test]
fn detects_meson_template() {
let input = "pkgname=demo\npkgver=1\nmeson setup build\n";
assert_eq!(detect_build_template(input), BuildTemplate::Meson);
}
}
@@ -0,0 +1,324 @@
use serde_derive::Serialize;
use crate::error::CubError;
use crate::rbpkgbuild::{BuildTemplate, RbPkgBuild, SourceType};
#[derive(Debug, Serialize)]
struct CookbookRecipe {
#[serde(skip_serializing_if = "Option::is_none")]
source: Option<CookbookSource>,
build: CookbookBuild,
#[serde(skip_serializing_if = "Option::is_none")]
package: Option<CookbookPackage>,
}
#[derive(Debug, Default, Serialize)]
struct CookbookSource {
#[serde(skip_serializing_if = "Option::is_none")]
git: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
tar: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
branch: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
rev: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
blake3: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
patches: Vec<String>,
}
#[derive(Debug, Serialize)]
struct CookbookBuild {
template: String,
#[serde(skip_serializing_if = "Vec::is_empty")]
dependencies: Vec<String>,
#[serde(rename = "dev-dependencies", skip_serializing_if = "Vec::is_empty")]
dev_dependencies: Vec<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
cargoflags: Vec<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
configureflags: Vec<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
cmakeflags: Vec<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
mesonflags: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
script: Option<String>,
}
#[derive(Debug, Serialize)]
struct CookbookPackage {
#[serde(skip_serializing_if = "Vec::is_empty")]
dependencies: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
description: Option<String>,
}
pub fn generate_recipe(rbpkg: &RbPkgBuild) -> Result<String, CubError> {
rbpkg.validate()?;
if rbpkg.source.sources.len() > 1 {
return Err(CubError::Conversion(
"Cookbook recipe generation currently supports a single primary source".to_string(),
));
}
let source = rbpkg
.source
.sources
.first()
.map(convert_source)
.transpose()?
.map(|mut source| {
source.patches = rbpkg.patches.files.clone();
source
});
let build = convert_build(rbpkg)?;
let package = build_package_section(rbpkg);
toml::to_string_pretty(&CookbookRecipe {
source,
build,
package,
})
.map_err(CubError::from)
}
fn convert_source(source: &crate::rbpkgbuild::SourceEntry) -> Result<CookbookSource, CubError> {
let mut cookbook = CookbookSource::default();
match source.source_type {
SourceType::Git => {
cookbook.git = Some(source.url.clone());
cookbook.branch = non_empty(&source.branch);
cookbook.rev = non_empty(&source.rev);
}
SourceType::Tar => {
cookbook.tar = Some(source.url.clone());
cookbook.blake3 = non_empty(&source.sha256);
}
}
Ok(cookbook)
}
fn convert_build(rbpkg: &RbPkgBuild) -> Result<CookbookBuild, CubError> {
let mut build = CookbookBuild {
template: template_name(&rbpkg.build.template).to_string(),
dependencies: rbpkg.dependencies.build.clone(),
dev_dependencies: rbpkg.dependencies.check.clone(),
cargoflags: Vec::new(),
configureflags: Vec::new(),
cmakeflags: Vec::new(),
mesonflags: Vec::new(),
script: None,
};
match rbpkg.build.template {
BuildTemplate::Cargo => {
if rbpkg.build.release {
build.cargoflags.push("--release".to_string());
}
if !rbpkg.build.features.is_empty() {
build.cargoflags.push("--features".to_string());
build.cargoflags.push(rbpkg.build.features.join(","));
}
build.cargoflags.extend(rbpkg.build.args.clone());
}
BuildTemplate::Configure => build.configureflags = rbpkg.build.args.clone(),
BuildTemplate::Cmake => build.cmakeflags = rbpkg.build.args.clone(),
BuildTemplate::Meson => build.mesonflags = rbpkg.build.args.clone(),
BuildTemplate::Custom => {
let script = custom_script(rbpkg)?;
build.script = Some(script);
}
}
Ok(build)
}
fn build_package_section(rbpkg: &RbPkgBuild) -> Option<CookbookPackage> {
let description = non_empty(&rbpkg.package.description);
let version = Some(if rbpkg.package.release > 0 {
format!("{}-{}", rbpkg.package.version, rbpkg.package.release)
} else {
rbpkg.package.version.clone()
});
if rbpkg.dependencies.runtime.is_empty() && description.is_none() && version.is_none() {
None
} else {
Some(CookbookPackage {
dependencies: rbpkg.dependencies.runtime.clone(),
version,
description,
})
}
}
fn custom_script(rbpkg: &RbPkgBuild) -> Result<String, CubError> {
let mut parts = Vec::new();
parts.extend(rbpkg.build.prepare.iter().cloned());
parts.extend(rbpkg.build.build_script.iter().cloned());
if rbpkg.policy.allow_tests {
parts.extend(rbpkg.build.check.iter().cloned());
}
parts.extend(rbpkg.build.install_script.iter().cloned());
if parts.is_empty() {
return Err(CubError::InvalidPkgbuild(
"custom template requires at least one prepare/build/check/install command".to_string(),
));
}
Ok(parts.join("\n"))
}
fn template_name(template: &BuildTemplate) -> &'static str {
match template {
BuildTemplate::Custom => "custom",
BuildTemplate::Cargo => "cargo",
BuildTemplate::Configure => "configure",
BuildTemplate::Cmake => "cmake",
BuildTemplate::Meson => "meson",
}
}
fn non_empty(value: &str) -> Option<String> {
let trimmed = value.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed.to_string())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::rbpkgbuild::{
BuildSection, BuildTemplate, CompatSection, ConversionStatus, DependenciesSection,
InstallSection, PackageSection, PatchesSection, PolicySection, RbPkgBuild, SourceEntry,
SourceSection, SourceType,
};
fn base_pkg(template: BuildTemplate) -> RbPkgBuild {
RbPkgBuild {
format: 1,
package: PackageSection {
name: "demo".to_string(),
version: "1.0.0".to_string(),
release: 1,
description: "demo package".to_string(),
homepage: String::new(),
license: vec!["MIT".to_string()],
architectures: vec!["x86_64-unknown-redox".to_string()],
maintainers: Vec::new(),
},
source: SourceSection {
sources: vec![SourceEntry {
source_type: SourceType::Git,
url: "https://example.com/repo.git".to_string(),
sha256: String::new(),
rev: "abc123".to_string(),
branch: "main".to_string(),
}],
},
dependencies: DependenciesSection {
build: vec!["cargo".to_string()],
runtime: vec!["openssl3".to_string()],
check: vec!["python".to_string()],
optional: Vec::new(),
provides: Vec::new(),
conflicts: Vec::new(),
},
build: BuildSection {
template,
release: true,
features: vec!["cli".to_string(), "full".to_string()],
args: vec!["--locked".to_string()],
build_dir: String::new(),
prepare: vec!["./autogen.sh".to_string()],
build_script: vec!["make".to_string()],
check: vec!["make test".to_string()],
install_script: vec!["make install DESTDIR=\"${COOKBOOK_STAGE}\"".to_string()],
},
install: InstallSection::default(),
patches: PatchesSection {
files: vec!["redox.patch".to_string()],
},
compat: CompatSection {
imported_from: String::new(),
original_pkgbuild: String::new(),
conversion_status: ConversionStatus::Full,
target: String::new(),
},
policy: PolicySection::default(),
}
}
#[test]
fn generates_cargo_recipe() {
let recipe = generate_recipe(&base_pkg(BuildTemplate::Cargo)).expect("generate recipe");
let value: toml::Value = toml::from_str(&recipe).expect("parse generated recipe");
assert_eq!(
value["source"]["git"].as_str(),
Some("https://example.com/repo.git")
);
assert_eq!(value["build"]["template"].as_str(), Some("cargo"));
assert_eq!(value["build"]["dependencies"][0].as_str(), Some("cargo"));
assert_eq!(value["source"]["patches"][0].as_str(), Some("redox.patch"));
assert_eq!(
value["package"]["dependencies"][0].as_str(),
Some("openssl3")
);
}
#[test]
fn generates_tar_recipe_with_checksum() {
let mut pkg = base_pkg(BuildTemplate::Cargo);
pkg.source.sources[0] = SourceEntry {
source_type: SourceType::Tar,
url: "https://example.com/demo.tar.gz".to_string(),
sha256: "abc123deadbeef".to_string(),
rev: String::new(),
branch: String::new(),
};
let recipe = generate_recipe(&pkg).expect("generate recipe");
let value: toml::Value = toml::from_str(&recipe).expect("parse generated recipe");
assert_eq!(
value["source"]["tar"].as_str(),
Some("https://example.com/demo.tar.gz")
);
assert_eq!(value["source"]["blake3"].as_str(), Some("abc123deadbeef"));
}
#[test]
fn generates_custom_script() {
let recipe = generate_recipe(&base_pkg(BuildTemplate::Custom)).expect("generate recipe");
let value: toml::Value = toml::from_str(&recipe).expect("parse generated recipe");
let script = value["build"]["script"].as_str().expect("custom script");
assert!(script.contains("./autogen.sh"));
assert!(
script.contains("make\n") || script.ends_with("make") || script.contains("make test")
);
assert!(script.contains("make install"));
}
#[test]
fn omits_test_commands_when_policy_disallows_them() {
let mut pkg = base_pkg(BuildTemplate::Custom);
pkg.policy.allow_tests = false;
let recipe = generate_recipe(&pkg).expect("generate recipe");
assert!(!recipe.contains("make test"));
}
}
@@ -0,0 +1,105 @@
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct MappedDep {
pub original: String,
pub mapped: String,
pub is_exact: bool,
}
pub fn map_dependency(arch_name: &str) -> MappedDep {
let cleaned = arch_name.trim();
let base = dependency_base_name(cleaned);
let (mapped, is_exact) = match base.as_str() {
"glibc" => ("relibc".to_string(), false),
"gcc" | "make" => ("build-base".to_string(), false),
"pkg-config" => ("pkg-config".to_string(), true),
"openssl" => ("openssl3".to_string(), false),
"zlib" => ("zlib".to_string(), true),
"libffi" => ("libffi".to_string(), true),
"pcre2" => ("pcre2".to_string(), true),
"ncurses" => ("ncurses".to_string(), true),
"readline" => ("readline".to_string(), true),
"curl" => ("curl".to_string(), true),
"git" => ("git".to_string(), true),
"python" => ("python".to_string(), true),
"rust" => ("rust".to_string(), true),
"cargo" => ("cargo".to_string(), true),
"cmake" => ("cmake".to_string(), true),
"meson" => ("meson".to_string(), true),
"autoconf" => ("autoconf".to_string(), true),
"automake" => ("automake".to_string(), true),
"libtool" => ("libtool".to_string(), true),
"systemd" => (String::new(), false),
"dbus" => ("dbus".to_string(), true),
_ => (base.clone(), true),
};
MappedDep {
original: cleaned.to_string(),
mapped,
is_exact,
}
}
pub fn map_dependencies(arch_deps: &[String]) -> Vec<MappedDep> {
arch_deps.iter().map(|dep| map_dependency(dep)).collect()
}
fn dependency_base_name(name: &str) -> String {
let trimmed = name.trim();
let no_prefix = trimmed.strip_prefix("host:").unwrap_or(trimmed);
no_prefix
.chars()
.take_while(|ch| !matches!(ch, '<' | '>' | '=' | ':' | ' ' | '\t'))
.collect::<String>()
.to_ascii_lowercase()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn maps_known_dependency() {
let mapped = map_dependency("glibc");
assert_eq!(mapped.original, "glibc");
assert_eq!(mapped.mapped, "relibc");
assert!(!mapped.is_exact);
}
#[test]
fn keeps_unknown_dependency_name() {
let mapped = map_dependency("expat");
assert_eq!(mapped.mapped, "expat");
assert!(mapped.is_exact);
}
#[test]
fn strips_version_constraints() {
let mapped = map_dependency("openssl>=1.1");
assert_eq!(mapped.original, "openssl>=1.1");
assert_eq!(mapped.mapped, "openssl3");
}
#[test]
fn marks_unavailable_dependency() {
let mapped = map_dependency("systemd");
assert!(mapped.mapped.is_empty());
assert!(!mapped.is_exact);
}
#[test]
fn maps_collections() {
let deps = vec!["glibc".to_string(), "cmake".to_string()];
let mapped = map_dependencies(&deps);
assert_eq!(mapped.len(), 2);
assert_eq!(mapped[0].mapped, "relibc");
assert_eq!(mapped[1].mapped, "cmake");
}
}
@@ -0,0 +1,23 @@
use thiserror::Error;
#[derive(Error, Debug)]
pub enum CubError {
#[error("IO error: {0}")]
Io(#[from] std::io::Error),
#[error("TOML parse error: {0}")]
TomlParse(#[from] toml::de::Error),
#[error("TOML serialize error: {0}")]
TomlSerialize(#[from] toml::ser::Error),
#[error("Invalid RBPKGBUILD: {0}")]
InvalidPkgbuild(String),
#[error("Build failed: {0}")]
BuildFailed(String),
#[error("Package not found: {0}")]
PackageNotFound(String),
#[error("Conversion error: {0}")]
Conversion(String),
#[error("Dependency resolution failed: {0}")]
Dependency(String),
#[error("Sandbox error: {0}")]
Sandbox(String),
}
@@ -0,0 +1,11 @@
pub mod rbpkgbuild;
pub mod rbsrcinfo;
pub mod cookbook;
pub mod converter;
pub mod deps;
pub mod sandbox;
#[cfg(feature = "full")]
pub mod package;
pub mod error;
pub use error::CubError;
@@ -0,0 +1,167 @@
use std::path::Path;
use serde_derive::Serialize;
use crate::error::CubError;
use crate::rbpkgbuild::RbPkgBuild;
pub struct PackageCreator {
pub name: String,
pub version: String,
pub target: String,
}
impl PackageCreator {
pub fn create_from_stage(
stage_dir: &Path,
output_path: &Path,
secret_key_path: &Path,
) -> Result<(), CubError> {
if !stage_dir.is_dir() {
return Err(CubError::PackageNotFound(format!(
"stage directory does not exist: {}",
stage_dir.display()
)));
}
pkgar_keys::get_skey(secret_key_path).map_err(|err| {
CubError::BuildFailed(format!(
"failed to load pkgar secret key {}: {err}",
secret_key_path.display()
))
})?;
pkgar::folder_entries(stage_dir).map_err(|err| {
CubError::BuildFailed(format!(
"failed to scan stage directory {}: {err}",
stage_dir.display()
))
})?;
let flags = pkgar_core::HeaderFlags::latest(
pkgar_core::Architecture::Independent,
pkgar_core::Packaging::Uncompressed,
);
pkgar::create_with_flags(secret_key_path, output_path, stage_dir, flags).map_err(|err| {
CubError::BuildFailed(format!(
"failed to create pkgar archive {}: {err}",
output_path.display()
))
})
}
pub fn generate_package_toml(rbpkg: &RbPkgBuild) -> String {
#[derive(Serialize)]
struct PackageMetadata {
name: String,
version: String,
target: String,
#[serde(skip_serializing_if = "Vec::is_empty")]
depends: Vec<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
optdepends: Vec<String>,
}
let metadata = PackageMetadata {
name: rbpkg.package.name.clone(),
version: if rbpkg.package.release > 0 {
format!("{}-{}", rbpkg.package.version, rbpkg.package.release)
} else {
rbpkg.package.version.clone()
},
target: rbpkg
.package
.architectures
.first()
.cloned()
.unwrap_or_else(|| "x86_64-unknown-redox".to_string()),
depends: rbpkg.dependencies.runtime.clone(),
optdepends: rbpkg.dependencies.optional.clone(),
};
match toml::to_string_pretty(&metadata) {
Ok(rendered) => rendered,
Err(_) => format!(
"name = \"{}\"\nversion = \"{}\"\ntarget = \"{}\"\n",
metadata.name, metadata.version, metadata.target
),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::rbpkgbuild::{
BuildSection, CompatSection, ConversionStatus, DependenciesSection, InstallSection,
PackageSection, PatchesSection, PolicySection, RbPkgBuild, SourceSection,
};
use tempfile::tempdir;
fn sample_rbpkgbuild() -> RbPkgBuild {
RbPkgBuild {
format: 1,
package: PackageSection {
name: "demo".to_string(),
version: "1.0.0".to_string(),
release: 1,
description: "demo package".to_string(),
homepage: String::new(),
license: Vec::new(),
architectures: vec!["x86_64-unknown-redox".to_string()],
maintainers: Vec::new(),
},
source: SourceSection::default(),
dependencies: DependenciesSection {
build: Vec::new(),
runtime: vec!["openssl3".to_string()],
check: Vec::new(),
optional: Vec::new(),
provides: vec!["demo-virtual".to_string()],
conflicts: vec!["demo-old".to_string()],
},
build: BuildSection {
build_script: vec!["make".to_string()],
install_script: vec!["make install".to_string()],
..BuildSection::default()
},
install: InstallSection::default(),
patches: PatchesSection::default(),
compat: CompatSection {
imported_from: String::new(),
original_pkgbuild: String::new(),
conversion_status: ConversionStatus::Full,
target: String::new(),
},
policy: PolicySection::default(),
}
}
#[test]
fn generates_package_toml() {
let mut rbpkg = sample_rbpkgbuild();
rbpkg.dependencies.optional = vec!["git".to_string()];
let rendered = PackageCreator::generate_package_toml(&rbpkg);
assert!(rendered.contains("name = \"demo\""));
assert!(rendered.contains("version = \"1.0.0-1\""));
assert!(rendered.contains("target = \"x86_64-unknown-redox\""));
assert!(rendered.contains("depends = [\"openssl3\"]"));
assert!(rendered.contains("optdepends = [\"git\"]"));
assert!(!rendered.contains("dependencies ="));
}
#[test]
fn errors_when_stage_dir_is_missing() {
let temp = tempdir().expect("tempdir");
let err = PackageCreator::create_from_stage(
&temp.path().join("missing-stage"),
&temp.path().join("out.pkgar"),
&temp.path().join("secret.toml"),
)
.expect_err("missing stage should fail");
assert!(matches!(err, CubError::PackageNotFound(_)));
}
}
@@ -0,0 +1,406 @@
use std::fs;
use std::path::Path;
use serde_derive::{Deserialize, Serialize};
use crate::error::CubError;
use crate::rbsrcinfo::RbSrcInfo;
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct RbPkgBuild {
pub format: u32,
pub package: PackageSection,
#[serde(default)]
pub source: SourceSection,
#[serde(default)]
pub dependencies: DependenciesSection,
#[serde(default)]
pub build: BuildSection,
#[serde(default)]
pub install: InstallSection,
#[serde(default)]
pub patches: PatchesSection,
#[serde(default)]
pub compat: CompatSection,
#[serde(default)]
pub policy: PolicySection,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct PackageSection {
pub name: String,
pub version: String,
#[serde(default)]
pub release: u32,
#[serde(default)]
pub description: String,
#[serde(default)]
pub homepage: String,
#[serde(default)]
pub license: Vec<String>,
#[serde(default)]
pub architectures: Vec<String>,
#[serde(default)]
pub maintainers: Vec<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct SourceSection {
#[serde(default)]
pub sources: Vec<SourceEntry>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct SourceEntry {
#[serde(rename = "type")]
pub source_type: SourceType,
#[serde(default)]
pub url: String,
#[serde(default)]
pub sha256: String,
#[serde(default)]
pub rev: String,
#[serde(default)]
pub branch: String,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum SourceType {
Tar,
Git,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct DependenciesSection {
#[serde(default)]
pub build: Vec<String>,
#[serde(default)]
pub runtime: Vec<String>,
#[serde(default)]
pub check: Vec<String>,
#[serde(default)]
pub optional: Vec<String>,
#[serde(default)]
pub provides: Vec<String>,
#[serde(default)]
pub conflicts: Vec<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct BuildSection {
#[serde(default)]
pub template: BuildTemplate,
#[serde(default)]
pub release: bool,
#[serde(default)]
pub features: Vec<String>,
#[serde(default)]
pub args: Vec<String>,
#[serde(default)]
pub build_dir: String,
#[serde(default)]
pub prepare: Vec<String>,
#[serde(default)]
pub build_script: Vec<String>,
#[serde(default)]
pub check: Vec<String>,
#[serde(default)]
pub install_script: Vec<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum BuildTemplate {
#[default]
Custom,
Cargo,
Configure,
Cmake,
Meson,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct InstallSection {
#[serde(default)]
pub bins: Vec<InstallEntry>,
#[serde(default)]
pub libs: Vec<InstallEntry>,
#[serde(default)]
pub headers: Vec<InstallEntry>,
#[serde(default)]
pub docs: Vec<InstallEntry>,
#[serde(default)]
pub man: Vec<InstallEntry>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
pub struct InstallEntry {
pub from: String,
pub to: String,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct PatchesSection {
#[serde(default)]
pub files: Vec<String>,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct CompatSection {
#[serde(default)]
pub imported_from: String,
#[serde(default)]
pub original_pkgbuild: String,
#[serde(default)]
pub conversion_status: ConversionStatus,
#[serde(default)]
pub target: String,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "lowercase")]
pub enum ConversionStatus {
#[default]
Full,
Partial,
Manual,
}
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
pub struct PolicySection {
#[serde(default)]
pub allow_network: bool,
#[serde(default = "default_true")]
pub allow_tests: bool,
#[serde(default = "default_true")]
pub review_required: bool,
}
fn default_true() -> bool {
true
}
impl RbPkgBuild {
pub fn from_file(path: impl AsRef<Path>) -> Result<RbPkgBuild, CubError> {
let contents = fs::read_to_string(path)?;
Self::from_str(&contents)
}
pub fn from_str(s: &str) -> Result<RbPkgBuild, CubError> {
let parsed: RbPkgBuild = toml::from_str(s)?;
parsed.validate()?;
Ok(parsed)
}
pub fn to_toml(&self) -> Result<String, CubError> {
self.validate()?;
toml::to_string_pretty(self).map_err(CubError::from)
}
pub fn validate(&self) -> Result<(), CubError> {
if self.format != 1 {
return Err(CubError::InvalidPkgbuild(format!(
"unsupported format {}, expected 1",
self.format
)));
}
if self.package.name.is_empty() {
return Err(CubError::InvalidPkgbuild(
"package.name must not be empty".to_string(),
));
}
if !valid_package_name(&self.package.name) {
return Err(CubError::InvalidPkgbuild(format!(
"package.name must match [a-z0-9-_]+: {}",
self.package.name
)));
}
if self.package.version.trim().is_empty() {
return Err(CubError::InvalidPkgbuild(
"package.version must not be empty".to_string(),
));
}
if !self
.package
.architectures
.iter()
.any(|arch| arch == "x86_64-unknown-redox")
{
return Err(CubError::InvalidPkgbuild(
"package.architectures must include x86_64-unknown-redox".to_string(),
));
}
for source in &self.source.sources {
if source.url.trim().is_empty() {
return Err(CubError::InvalidPkgbuild(
"source entry url must not be empty".to_string(),
));
}
if matches!(source.source_type, SourceType::Git) && source.url.contains(' ') {
return Err(CubError::InvalidPkgbuild(format!(
"git source url must not contain spaces: {}",
source.url
)));
}
}
for (i, source) in self.source.sources.iter().enumerate() {
match source.source_type {
SourceType::Tar => {
if source.sha256.is_empty() {
return Err(CubError::InvalidPkgbuild(format!(
"source[{}]: tar source requires sha256 checksum",
i
)));
}
}
SourceType::Git => {
if source.rev.is_empty() && source.branch.is_empty() {
// Warning only for MVP: some git sources intentionally track default branch.
}
}
}
}
if matches!(self.build.template, BuildTemplate::Custom)
&& self.build.prepare.is_empty()
&& self.build.build_script.is_empty()
&& self.build.install_script.is_empty()
&& self.install.bins.is_empty()
&& self.install.libs.is_empty()
&& self.install.headers.is_empty()
&& self.install.docs.is_empty()
&& self.install.man.is_empty()
{
return Err(CubError::InvalidPkgbuild(
"custom builds require prepare/build/install instructions".to_string(),
));
}
Ok(())
}
pub fn to_srcinfo(&self) -> RbSrcInfo {
RbSrcInfo::from_rbpkgbuild(self)
}
}
fn valid_package_name(name: &str) -> bool {
name.chars()
.all(|ch| ch.is_ascii_lowercase() || ch.is_ascii_digit() || ch == '-' || ch == '_')
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::NamedTempFile;
const SAMPLE_TOML: &str = r#"
format = 1
[package]
name = "demo-pkg"
version = "1.0.0"
release = 1
description = "demo package"
homepage = "https://example.com"
license = ["MIT"]
architectures = ["x86_64-unknown-redox", "aarch64-unknown-redox"]
maintainers = ["Red Bear OS"]
[source]
sources = [
{ type = "git", url = "https://example.com/repo.git", rev = "abc123", branch = "main" }
]
[dependencies]
build = ["cargo"]
runtime = ["openssl3"]
[build]
template = "cargo"
release = true
features = ["std"]
[policy]
allow_network = false
"#;
#[test]
fn parses_valid_rbpkgbuild() {
let pkg = RbPkgBuild::from_str(SAMPLE_TOML).expect("parse RBPKGBUILD");
assert_eq!(pkg.format, 1);
assert_eq!(pkg.package.name, "demo-pkg");
assert_eq!(pkg.build.template, BuildTemplate::Cargo);
assert!(pkg.build.release);
}
#[test]
fn rejects_invalid_name() {
let invalid = SAMPLE_TOML.replace("demo-pkg", "DemoPkg");
let err = RbPkgBuild::from_str(&invalid).expect_err("invalid name should fail");
assert!(matches!(err, CubError::InvalidPkgbuild(_)));
}
#[test]
fn rejects_missing_redox_architecture() {
let invalid = SAMPLE_TOML.replace(
"[\"x86_64-unknown-redox\", \"aarch64-unknown-redox\"]",
"[\"x86_64-unknown-linux-gnu\"]",
);
let err = RbPkgBuild::from_str(&invalid).expect_err("missing redox arch should fail");
assert!(matches!(err, CubError::InvalidPkgbuild(_)));
}
#[test]
fn rejects_tar_source_without_sha256() {
let invalid = SAMPLE_TOML.replace(
r#"{ type = "git", url = "https://example.com/repo.git", rev = "abc123", branch = "main" }"#,
r#"{ type = "tar", url = "https://example.com/demo.tar.gz" }"#,
);
let err =
RbPkgBuild::from_str(&invalid).expect_err("tar source without sha256 should fail");
assert!(matches!(err, CubError::InvalidPkgbuild(_)));
}
#[test]
fn round_trips_to_toml() {
let pkg = RbPkgBuild::from_str(SAMPLE_TOML).expect("parse RBPKGBUILD");
let toml = pkg.to_toml().expect("serialize RBPKGBUILD");
let reparsed = RbPkgBuild::from_str(&toml).expect("reparse RBPKGBUILD");
assert_eq!(reparsed.package.name, "demo-pkg");
assert_eq!(reparsed.build.features, vec!["std"]);
}
#[test]
fn parses_from_file() {
let file = NamedTempFile::new().expect("temp file");
fs::write(file.path(), SAMPLE_TOML).expect("write RBPKGBUILD");
let pkg = RbPkgBuild::from_file(file.path()).expect("read RBPKGBUILD");
assert_eq!(pkg.package.version, "1.0.0");
}
#[test]
fn converts_to_srcinfo() {
let pkg = RbPkgBuild::from_str(SAMPLE_TOML).expect("parse RBPKGBUILD");
let srcinfo = pkg.to_srcinfo();
assert_eq!(srcinfo.pkgname, "demo-pkg");
assert_eq!(srcinfo.pkgver, "1.0.0");
assert_eq!(srcinfo.makedepends, vec!["cargo"]);
assert_eq!(srcinfo.depends, vec!["openssl3"]);
}
}
@@ -0,0 +1,226 @@
use std::fs;
use std::path::Path;
use crate::error::CubError;
use crate::rbpkgbuild::{RbPkgBuild, SourceType};
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct RbSrcInfo {
pub pkgname: String,
pub pkgver: String,
pub pkgrel: u32,
pub pkgdesc: String,
pub arch: String,
pub depends: Vec<String>,
pub makedepends: Vec<String>,
pub source: Vec<String>,
pub sha256sums: Vec<String>,
pub provides: Vec<String>,
pub conflicts: Vec<String>,
}
impl RbSrcInfo {
pub fn from_file(path: impl AsRef<Path>) -> Result<RbSrcInfo, CubError> {
let contents = fs::read_to_string(path)?;
Self::from_str(&contents)
}
pub fn to_string(&self) -> String {
let mut lines = Vec::new();
push_scalar(&mut lines, "pkgname", &self.pkgname);
push_scalar(&mut lines, "pkgver", &self.pkgver);
lines.push(format!("pkgrel = {}", self.pkgrel));
push_scalar(&mut lines, "pkgdesc", &self.pkgdesc);
push_scalar(&mut lines, "arch", &self.arch);
push_list(&mut lines, "depends", &self.depends);
push_list(&mut lines, "makedepends", &self.makedepends);
push_list(&mut lines, "source", &self.source);
push_list(&mut lines, "sha256sums", &self.sha256sums);
push_list(&mut lines, "provides", &self.provides);
push_list(&mut lines, "conflicts", &self.conflicts);
let mut output = lines.join("\n");
output.push('\n');
output
}
pub fn from_rbpkgbuild(rb: &RbPkgBuild) -> Self {
let mut sha256sums = Vec::new();
let source = rb
.source
.sources
.iter()
.map(|entry| {
if matches!(entry.source_type, SourceType::Tar) && !entry.sha256.is_empty() {
sha256sums.push(entry.sha256.clone());
}
entry.url.clone()
})
.collect();
Self {
pkgname: rb.package.name.clone(),
pkgver: rb.package.version.clone(),
pkgrel: rb.package.release,
pkgdesc: rb.package.description.clone(),
arch: rb
.package
.architectures
.first()
.cloned()
.unwrap_or_else(|| "x86_64-unknown-redox".to_string()),
depends: rb.dependencies.runtime.clone(),
makedepends: rb.dependencies.build.clone(),
source,
sha256sums,
provides: rb.dependencies.provides.clone(),
conflicts: rb.dependencies.conflicts.clone(),
}
}
fn from_str(contents: &str) -> Result<RbSrcInfo, CubError> {
let mut info = RbSrcInfo::default();
for raw_line in contents.lines() {
let line = raw_line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
let Some((key, value)) = line.split_once('=') else {
continue;
};
let key = key.trim();
let value = value.trim().trim_matches('"');
match key {
"pkgname" => info.pkgname = value.to_string(),
"pkgver" => info.pkgver = value.to_string(),
"pkgrel" => {
info.pkgrel = value.parse().map_err(|_| {
CubError::InvalidPkgbuild(format!("invalid pkgrel in .RBSRCINFO: {value}"))
})?
}
"pkgdesc" => info.pkgdesc = value.to_string(),
"arch" => info.arch = value.to_string(),
"depends" => info.depends.push(value.to_string()),
"makedepends" => info.makedepends.push(value.to_string()),
"source" => info.source.push(value.to_string()),
"sha256sums" => info.sha256sums.push(value.to_string()),
"provides" => info.provides.push(value.to_string()),
"conflicts" => info.conflicts.push(value.to_string()),
_ => {}
}
}
Ok(info)
}
}
fn push_scalar(lines: &mut Vec<String>, key: &str, value: &str) {
if !value.is_empty() {
lines.push(format!("{key} = {value}"));
}
}
fn push_list(lines: &mut Vec<String>, key: &str, values: &[String]) {
for value in values {
if !value.is_empty() {
lines.push(format!("{key} = {value}"));
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::rbpkgbuild::{
BuildSection, CompatSection, ConversionStatus, DependenciesSection, InstallSection,
PackageSection, PatchesSection, PolicySection, RbPkgBuild, SourceEntry, SourceSection,
SourceType,
};
use tempfile::NamedTempFile;
fn sample_rbpkgbuild() -> RbPkgBuild {
RbPkgBuild {
format: 1,
package: PackageSection {
name: "demo".to_string(),
version: "1.2.3".to_string(),
release: 4,
description: "Demo package".to_string(),
homepage: String::new(),
license: vec!["MIT".to_string()],
architectures: vec!["x86_64-unknown-redox".to_string()],
maintainers: Vec::new(),
},
source: SourceSection {
sources: vec![SourceEntry {
source_type: SourceType::Tar,
url: "https://example.com/demo.tar.xz".to_string(),
sha256: "abc123".to_string(),
rev: String::new(),
branch: String::new(),
}],
},
dependencies: DependenciesSection {
build: vec!["cmake".to_string()],
runtime: vec!["zlib".to_string()],
check: Vec::new(),
optional: Vec::new(),
provides: vec!["demo-virtual".to_string()],
conflicts: vec!["demo-old".to_string()],
},
build: BuildSection::default(),
install: InstallSection::default(),
patches: PatchesSection::default(),
compat: CompatSection {
imported_from: String::new(),
original_pkgbuild: String::new(),
conversion_status: ConversionStatus::Full,
target: String::new(),
},
policy: PolicySection::default(),
}
}
#[test]
fn converts_from_rbpkgbuild() {
let info = RbSrcInfo::from_rbpkgbuild(&sample_rbpkgbuild());
assert_eq!(info.pkgname, "demo");
assert_eq!(info.pkgver, "1.2.3");
assert_eq!(info.pkgrel, 4);
assert_eq!(info.depends, vec!["zlib"]);
assert_eq!(info.makedepends, vec!["cmake"]);
assert_eq!(info.sha256sums, vec!["abc123"]);
}
#[test]
fn serializes_and_parses_round_trip() {
let info = RbSrcInfo::from_rbpkgbuild(&sample_rbpkgbuild());
let rendered = info.to_string();
let reparsed = RbSrcInfo::from_str(&rendered).expect("parse .RBSRCINFO");
assert_eq!(reparsed, info);
}
#[test]
fn parses_from_file() {
let file = NamedTempFile::new().expect("temp file");
fs::write(
file.path(),
"pkgname = demo\npkgver = 1.0.0\npkgrel = 1\narch = x86_64-unknown-redox\n",
)
.expect("write .RBSRCINFO");
let info = RbSrcInfo::from_file(file.path()).expect("read .RBSRCINFO");
assert_eq!(info.pkgname, "demo");
assert_eq!(info.pkgver, "1.0.0");
assert_eq!(info.pkgrel, 1);
assert_eq!(info.arch, "x86_64-unknown-redox");
}
}
@@ -0,0 +1,164 @@
use std::collections::{BTreeSet, HashMap};
use std::fs;
use std::path::{Path, PathBuf};
use crate::error::CubError;
#[derive(Debug, Clone)]
pub struct SandboxConfig {
pub target: String,
pub gnu_target: String,
pub destdir: PathBuf,
pub prefix: String,
pub cores: u32,
pub allow_network: bool,
pub source_dir: PathBuf,
pub build_dir: PathBuf,
pub stage_dir: PathBuf,
pub sysroot_dir: PathBuf,
}
impl SandboxConfig {
pub fn new(source_dir: &Path) -> Self {
let root = source_dir.join(".cub-sandbox");
let build_dir = root.join("build");
let stage_dir = root.join("stage");
let sysroot_dir = root.join("sysroot");
Self {
target: "x86_64-unknown-redox".to_string(),
gnu_target: "x86_64-redox".to_string(),
destdir: stage_dir.clone(),
prefix: "/usr".to_string(),
cores: std::thread::available_parallelism()
.map(|count| count.get() as u32)
.unwrap_or(1),
allow_network: false,
source_dir: source_dir.to_path_buf(),
build_dir,
stage_dir,
sysroot_dir,
}
}
pub fn env_vars(&self) -> HashMap<String, String> {
let mut env = HashMap::new();
let current_path = std::env::var("PATH").unwrap_or_default();
let tool_path = self.sysroot_dir.join("bin");
env.insert(
"COOKBOOK_SOURCE".to_string(),
self.source_dir.display().to_string(),
);
env.insert(
"COOKBOOK_STAGE".to_string(),
self.stage_dir.display().to_string(),
);
env.insert(
"COOKBOOK_SYSROOT".to_string(),
self.sysroot_dir.display().to_string(),
);
env.insert("COOKBOOK_TARGET".to_string(), self.target.clone());
env.insert(
"COOKBOOK_HOST_TARGET".to_string(),
"x86_64-unknown-linux-gnu".to_string(),
);
env.insert("COOKBOOK_MAKE_JOBS".to_string(), self.cores.to_string());
env.insert("DESTDIR".to_string(), self.stage_dir.display().to_string());
env.insert("TARGET".to_string(), self.target.clone());
env.insert("GNU_TARGET".to_string(), self.gnu_target.clone());
env.insert(
"PATH".to_string(),
if current_path.is_empty() {
tool_path.display().to_string()
} else {
format!("{}:{}", tool_path.display(), current_path)
},
);
env
}
pub fn setup(&self) -> Result<(), CubError> {
for dir in [
&self.build_dir,
&self.stage_dir,
&self.sysroot_dir,
&self.destdir,
] {
fs::create_dir_all(dir).map_err(|err| {
CubError::Sandbox(format!("failed to create {}: {err}", dir.display()))
})?;
}
Ok(())
}
pub fn cleanup(&self) -> Result<(), CubError> {
let mut dirs = BTreeSet::new();
dirs.insert(self.destdir.clone());
dirs.insert(self.stage_dir.clone());
dirs.insert(self.build_dir.clone());
dirs.insert(self.sysroot_dir.clone());
for dir in dirs.into_iter().rev() {
if dir.exists() {
fs::remove_dir_all(&dir).map_err(|err| {
CubError::Sandbox(format!("failed to remove {}: {err}", dir.display()))
})?;
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use tempfile::tempdir;
#[test]
fn builds_expected_defaults() {
let temp = tempdir().expect("tempdir");
let sandbox = SandboxConfig::new(temp.path());
assert_eq!(sandbox.target, "x86_64-unknown-redox");
assert_eq!(sandbox.gnu_target, "x86_64-redox");
assert_eq!(sandbox.prefix, "/usr");
assert!(sandbox.cores >= 1);
}
#[test]
fn exposes_cookbook_environment() {
let temp = tempdir().expect("tempdir");
let sandbox = SandboxConfig::new(temp.path());
let env = sandbox.env_vars();
assert_eq!(
env.get("COOKBOOK_TARGET"),
Some(&"x86_64-unknown-redox".to_string())
);
assert_eq!(env.get("GNU_TARGET"), Some(&"x86_64-redox".to_string()));
assert!(env
.get("PATH")
.expect("PATH set")
.starts_with(&sandbox.sysroot_dir.join("bin").display().to_string()));
}
#[test]
fn sets_up_and_cleans_directories() {
let temp = tempdir().expect("tempdir");
let sandbox = SandboxConfig::new(temp.path());
sandbox.setup().expect("setup sandbox");
assert!(sandbox.build_dir.exists());
assert!(sandbox.stage_dir.exists());
assert!(sandbox.sysroot_dir.exists());
sandbox.cleanup().expect("cleanup sandbox");
assert!(!sandbox.build_dir.exists());
assert!(!sandbox.stage_dir.exists());
assert!(!sandbox.sysroot_dir.exists());
}
}
+1
View File
@@ -154,6 +154,7 @@ symlink "../../local/recipes/system/redbear-meta" "recipes/system/redbear-meta"
symlink "../../local/recipes/system/udev-shim" "recipes/system/udev-shim"
symlink "../../local/recipes/core/ext4d" "recipes/core/ext4d"
symlink "../../local/recipes/tui/mc" "recipes/tui/mc"
symlink "../../local/recipes/system/cub" "recipes/system/cub"
status "Custom recipe symlinks ready"
echo ""
+1
View File
@@ -0,0 +1 @@
../../local/recipes/system/cub