fix: noconfirm auto-selects first AUR match

This commit is contained in:
2026-05-08 11:01:02 +01:00
parent d39cdc3fd9
commit 153cca6132
8056 changed files with 1983098 additions and 779 deletions
+12 -10
View File
@@ -424,7 +424,7 @@ fn repo_inner(
handle_validate_patches(recipe, logger)?;
Ok(false)
};
let Some(log_path) = &config.logs_dir else {
let Some(_log_path) = &config.logs_dir else {
return validate_fn(&None);
};
let (status_tx, status_rx) = mpsc::channel::<StatusUpdate>();
@@ -492,7 +492,9 @@ fn parse_args(args: Vec<String>) -> anyhow::Result<(CliConfig, CliCommand, Vec<C
"--all" => config.all = true,
"--allow-protected" => {
// SAFETY: set once at startup, before any threading
unsafe { env::set_var("REDBEAR_ALLOW_PROTECTED_FETCH", "1"); }
unsafe {
env::set_var("REDBEAR_ALLOW_PROTECTED_FETCH", "1");
}
}
_ => {
eprintln!("Error: Unknown flag: {}", arg);
@@ -749,11 +751,14 @@ fn handle_fetch(
if !allow_offline {
if let Ok(release) = env::var("REDBEAR_RELEASE") {
if !release.is_empty() {
bail!("{}", Error::Other(format!(
"Fetch is disabled in release mode (REDBEAR_RELEASE={}). \
bail!(
"{}",
Error::Other(format!(
"Fetch is disabled in release mode (REDBEAR_RELEASE={}). \
Sources are immutable. To refresh, run: provision-release.sh",
release
)));
release
))
);
}
}
}
@@ -807,10 +812,7 @@ fn handle_cook(
Ok(build_result.cached)
}
fn handle_validate_patches(
recipe: &CookRecipe,
logger: &PtyOut,
) -> anyhow::Result<()> {
fn handle_validate_patches(recipe: &CookRecipe, logger: &PtyOut) -> anyhow::Result<()> {
validate_patches(recipe, logger).map_err(|e| anyhow!("{}", e))
}
+15 -4
View File
@@ -271,11 +271,13 @@ pub fn build(
let deps_modified = modified_all_btree(
dep_pkgars.iter().map(|(_dep, pkgar)| pkgar.as_path()),
modified,
).unwrap_or(SystemTime::UNIX_EPOCH);
)
.unwrap_or(SystemTime::UNIX_EPOCH);
let deps_host_modified = modified_all_btree(
dep_host_pkgars.iter().map(|(_dep, pkgar)| pkgar.as_path()),
modified,
).unwrap_or(SystemTime::UNIX_EPOCH);
)
.unwrap_or(SystemTime::UNIX_EPOCH);
// check stage dir modified against pkgar files, any files missing will result in UNIX_EPOCH
let stage_modified = modified_all(&stage_pkgars, modified).unwrap_or(SystemTime::UNIX_EPOCH);
@@ -608,10 +610,19 @@ fn build_deps_dir(
archive_path.clone()
} else {
let repo_path = std::path::PathBuf::from("repo")
.join(crate::cross_target().as_deref().unwrap_or("x86_64-unknown-redox"))
.join(
crate::cross_target()
.as_deref()
.unwrap_or("x86_64-unknown-redox"),
)
.join(format!("{}.pkgar", name.without_prefix()));
if repo_path.is_file() {
log_to_pty!(logger, "DEBUG: using repo pkgar for {}: {}", name, repo_path.display());
log_to_pty!(
logger,
"DEBUG: using repo pkgar for {}: {}",
name,
repo_path.display()
);
repo_path
} else {
archive_path.clone()
+141 -83
View File
@@ -41,9 +41,17 @@ pub(crate) fn cleanup_workspace_pollution(recipe_dir: &Path, logger: &PtyOut) {
let path = recipes_root.join(file);
if path.is_file() && !path.is_symlink() {
if let Err(e) = fs::remove_file(&path) {
log_to_pty!(logger, "[WARN] failed to remove workspace pollution {}: {e}", path.display());
log_to_pty!(
logger,
"[WARN] failed to remove workspace pollution {}: {e}",
path.display()
);
} else {
log_to_pty!(logger, "[CLEAN] removed workspace pollution {}", path.display());
log_to_pty!(
logger,
"[CLEAN] removed workspace pollution {}",
path.display()
);
}
}
}
@@ -262,7 +270,11 @@ fn redbear_source_dir_is_effectively_empty(source_dir: &Path) -> bool {
visible_entries == 0
}
fn redbear_ensure_offline_source(recipe_dir: &Path, source_dir: &PathBuf, logger: &PtyOut) -> Result<()> {
fn redbear_ensure_offline_source(
recipe_dir: &Path,
source_dir: &PathBuf,
logger: &PtyOut,
) -> Result<()> {
if !source_dir.exists() || redbear_source_dir_is_effectively_empty(source_dir) {
redbear_try_restore_source(recipe_dir, logger, true)?;
}
@@ -366,53 +378,60 @@ pub fn fetch_offline(recipe: &CookRecipe, logger: &PtyOut) -> Result<FetchResult
redbear_ensure_offline_git_source(recipe_dir, &source_dir, logger)?;
let git_head = source_dir.join(".git/HEAD");
if !git_head.is_file() {
let source_ident = rev.clone().unwrap_or_else(|| {
format!("release-archive:{}", recipe.name.name())
});
let source_ident = rev
.clone()
.unwrap_or_else(|| format!("release-archive:{}", recipe.name.name()));
FetchResult::cached(source_dir, source_ident)
} else {
let (head_rev, _) = get_git_head_rev(&source_dir)?;
if let Some(expected_rev) = rev {
let head_short = &head_rev[..head_rev.len().min(7)];
let expected_short = &expected_rev[..expected_rev.len().min(7)];
if !head_rev.starts_with(expected_rev.as_str())
&& head_short != expected_short
{
bail_other_err!(
"source at {} has revision {} but recipe expects {}. \
Source archives may be corrupted. Restore from release archives.",
source_dir.display(), head_short, expected_rev
);
let (head_rev, _) = get_git_head_rev(&source_dir)?;
if let Some(expected_rev) = rev {
let expected_commit = get_git_tag_rev(&source_dir, expected_rev)
.unwrap_or_else(|_| expected_rev.clone());
let head_short = &head_rev[..head_rev.len().min(7)];
let expected_short = &expected_commit[..expected_commit.len().min(7)];
if !head_rev.starts_with(expected_commit.as_str())
&& head_short != expected_short
{
bail_other_err!(
"source at {} has revision {} but recipe expects {}. \
Source archives may be corrupted. Restore from release archives.",
source_dir.display(),
head_short,
expected_rev
);
}
}
}
// Validate all patch symlinks resolve before touching source.
fetch_validate_patch_symlinks(recipe_dir, patches)?;
// Validate all patch symlinks resolve before touching source.
fetch_validate_patch_symlinks(recipe_dir, patches)?;
if (!patches.is_empty() || script.is_some())
&& fetch_patches_state_stale(recipe_dir, patches, script, &source_dir)
{
log_to_pty!(logger, "[INFO] patches state stale or missing — re-applying");
// Reset source to clean state, including submodules.
let mut clean_cmd = Command::new("git");
clean_cmd.arg("-C").arg(&source_dir);
clean_cmd.arg("clean").arg("-ffdx");
let _ = run_command(clean_cmd, logger);
let mut reset_cmd = Command::new("git");
reset_cmd.arg("-C").arg(&source_dir);
reset_cmd.arg("reset").arg("--hard");
run_command(reset_cmd, logger)?;
// Recursively reset submodules if any exist.
if source_dir.join(".gitmodules").exists() {
let mut sub_cmd = Command::new("git");
sub_cmd.arg("-C").arg(&source_dir);
sub_cmd.arg("submodule").arg("foreach");
sub_cmd.arg("--recursive");
sub_cmd.arg("git reset --hard && git clean -ffdx");
run_command(sub_cmd, logger)?;
if (!patches.is_empty() || script.is_some())
&& fetch_patches_state_stale(recipe_dir, patches, script, &source_dir)
{
log_to_pty!(
logger,
"[INFO] patches state stale or missing — re-applying"
);
// Reset source to clean state, including submodules.
let mut clean_cmd = Command::new("git");
clean_cmd.arg("-C").arg(&source_dir);
clean_cmd.arg("clean").arg("-ffdx");
let _ = run_command(clean_cmd, logger);
let mut reset_cmd = Command::new("git");
reset_cmd.arg("-C").arg(&source_dir);
reset_cmd.arg("reset").arg("--hard");
run_command(reset_cmd, logger)?;
// Recursively reset submodules if any exist.
if source_dir.join(".gitmodules").exists() {
let mut sub_cmd = Command::new("git");
sub_cmd.arg("-C").arg(&source_dir);
sub_cmd.arg("submodule").arg("foreach");
sub_cmd.arg("--recursive");
sub_cmd.arg("git reset --hard && git clean -ffdx");
run_command(sub_cmd, logger)?;
}
fetch_apply_patches(recipe_dir, patches, script, &source_dir, logger)?;
}
fetch_apply_patches(recipe_dir, patches, script, &source_dir, logger)?;
}
FetchResult::cached(source_dir, head_rev)
FetchResult::cached(source_dir, head_rev)
}
}
Some(SourceRecipe::Tar {
@@ -941,6 +960,13 @@ pub(crate) fn fetch_resolve_canon(
if !canon_dir.exists() {
bail_other_err!("{dir:?} is not exists", dir = canon_dir.display());
}
let canon_dir = canon_dir.canonicalize().map_err(|err| {
format!(
"failed to canonicalize same_as target '{}': {}",
canon_dir.display(),
err
)
})?;
CookRecipe::from_path(canon_dir.as_path(), true, is_host).map_err(Error::from)
}
@@ -1229,21 +1255,25 @@ pub(crate) fn fetch_apply_patches(
logger,
)?;
}
log_to_pty!(logger, "[ATOMIC] {n}/{n} patches applied", n = applied.len());
log_to_pty!(
logger,
"[ATOMIC] {n}/{n} patches applied",
n = applied.len()
);
Ok(())
}
Err(e) => {
let _ = fs::remove_dir_all(&staging_dir);
log_to_pty!(logger, "[ATOMIC] patch application rolled back — source tree unchanged");
log_to_pty!(
logger,
"[ATOMIC] patch application rolled back — source tree unchanged"
);
Err(e)
}
}
}
pub fn validate_patches(
recipe: &CookRecipe,
logger: &PtyOut,
) -> Result<()> {
pub fn validate_patches(recipe: &CookRecipe, logger: &PtyOut) -> Result<()> {
let recipe_dir = &recipe.dir;
let source_dir = recipe_dir.join("source");
@@ -1255,8 +1285,12 @@ pub fn validate_patches(
}
let (patches, script) = match &recipe.recipe.source {
Some(SourceRecipe::Git { patches, script, .. })
| Some(SourceRecipe::Tar { patches, script, .. }) => (patches.clone(), script.clone()),
Some(SourceRecipe::Git {
patches, script, ..
})
| Some(SourceRecipe::Tar {
patches, script, ..
}) => (patches.clone(), script.clone()),
_ => {
log_to_pty!(logger, "[INFO] Recipe has no patches to validate");
return Ok(());
@@ -1302,12 +1336,16 @@ pub fn validate_patches(
// Clean the staging copy to pristine upstream state
let _ = Command::new("git")
.arg("-C").arg(&staging_dir)
.arg("clean").arg("-ffdx")
.arg("-C")
.arg(&staging_dir)
.arg("clean")
.arg("-ffdx")
.status();
Command::new("git")
.arg("-C").arg(&staging_dir)
.arg("reset").arg("--hard")
.arg("-C")
.arg(&staging_dir)
.arg("reset")
.arg("--hard")
.status()
.map_err(|e| format!("failed to reset staging to clean state: {e}"))?;
@@ -1336,7 +1374,12 @@ pub fn validate_patches(
if let Ok(out) = rej_check {
if !out.stdout.is_empty() {
let path = String::from_utf8_lossy(&out.stdout).trim().to_string();
log_to_pty!(logger, " [FAIL] {} → {} has rejected hunks", patch_name, path);
log_to_pty!(
logger,
" [FAIL] {} → {} has rejected hunks",
patch_name,
path
);
failed += 1;
continue;
}
@@ -1385,7 +1428,11 @@ pub fn validate_patches(
);
}
log_to_pty!(logger, "[SUMMARY] All {} patches validated successfully", passed);
log_to_pty!(
logger,
"[SUMMARY] All {} patches validated successfully",
passed
);
Ok(())
}
@@ -1423,16 +1470,16 @@ fn normalize_patch(raw: &[u8]) -> Vec<u8> {
}
/// Computes a BLAKE3 hash over all patch file contents (in order).
fn fetch_compute_patches_hash(
recipe_dir: &Path,
patches: &[String],
) -> Result<String> {
fn fetch_compute_patches_hash(recipe_dir: &Path, patches: &[String]) -> Result<String> {
// BLAKE3 is already a project dependency (used for source verification).
let mut hasher = blake3::Hasher::new();
for patch_name in patches {
let patch_file = recipe_dir.join(patch_name);
let content = fs::read(&patch_file).map_err(|err| {
format!("failed to read patch for hashing '{}': {err}", patch_file.display())
format!(
"failed to read patch for hashing '{}': {err}",
patch_file.display()
)
})?;
hasher.update(&content);
}
@@ -1448,14 +1495,16 @@ fn fetch_compute_patches_hash(
/// and trigger re-patching on the next build.
fn fetch_compute_source_hash(source_dir: &Path) -> String {
let output = Command::new("git")
.arg("-C").arg(source_dir)
.arg("-C")
.arg(source_dir)
.args(["ls-files", "-z"])
.output();
match output {
Ok(out) if !out.stdout.is_empty() => {
let mut hasher = blake3::Hasher::new();
// Hash file paths in sorted order for stability.
let mut files: Vec<&str> = out.stdout
let mut files: Vec<&str> = out
.stdout
.split(|&b| b == 0)
.filter_map(|s| std::str::from_utf8(s).ok())
.collect();
@@ -1487,9 +1536,10 @@ fn fetch_write_patches_state(
.unwrap_or_else(|_| "unknown".to_string());
let hash = fetch_compute_patches_hash(recipe_dir, applied)
.unwrap_or_else(|_| "hash-error".to_string());
let script_hash = script.as_ref().map(|s| {
blake3::hash(s.as_bytes()).to_hex().to_string()
}).unwrap_or_else(|| "none".to_string());
let script_hash = script
.as_ref()
.map(|s| blake3::hash(s.as_bytes()).to_hex().to_string())
.unwrap_or_else(|| "none".to_string());
// State goes in target/ so git clean/reset won't delete it.
let state_dir = recipe_dir.join("target");
@@ -1507,20 +1557,21 @@ fn fetch_write_patches_state(
for (i, name) in applied.iter().enumerate() {
content.push_str(&format!("patch[{}]: {name}\n", i + 1));
}
fs::write(&state_file, &content).map_err(|err| {
format!("failed to write .patches-state: {err}")
})?;
log_to_pty!(logger, "[OK] wrote .patches-state ({}/{} patches)", applied.len(), applied.len());
fs::write(&state_file, &content)
.map_err(|err| format!("failed to write .patches-state: {err}"))?;
log_to_pty!(
logger,
"[OK] wrote .patches-state ({}/{} patches)",
applied.len(),
applied.len()
);
Ok(())
}
/// Validates that every patch file path resolves to a real file before we
/// touch the source tree. Fails early with a clear message if any symlink
/// is broken or file is missing.
fn fetch_validate_patch_symlinks(
recipe_dir: &Path,
patches: &[String],
) -> Result<()> {
fn fetch_validate_patch_symlinks(recipe_dir: &Path, patches: &[String]) -> Result<()> {
let mut seen = std::collections::HashSet::new();
for patch_name in patches {
let patch_file = recipe_dir.join(patch_name);
@@ -1567,9 +1618,10 @@ fn fetch_patches_state_stale(
Ok(h) => h,
Err(_) => return true,
};
let expected_script_hash = script.as_ref().map(|s| {
blake3::hash(s.as_bytes()).to_hex().to_string()
}).unwrap_or_else(|| "none".to_string());
let expected_script_hash = script
.as_ref()
.map(|s| blake3::hash(s.as_bytes()).to_hex().to_string())
.unwrap_or_else(|| "none".to_string());
let current_source_hash = fetch_compute_source_hash(source_dir);
let mut found_hash = false;
@@ -1577,15 +1629,21 @@ fn fetch_patches_state_stale(
let mut found_source = false;
for line in state_content.lines() {
if let Some(stored) = line.strip_prefix("patches-hash: ") {
if stored.trim() != expected_hash { return true; }
if stored.trim() != expected_hash {
return true;
}
found_hash = true;
}
if let Some(stored) = line.strip_prefix("script-hash: ") {
if stored.trim() != expected_script_hash { return true; }
if stored.trim() != expected_script_hash {
return true;
}
found_script = true;
}
if let Some(stored) = line.strip_prefix("source-hash: ") {
if stored.trim() != current_source_hash { return true; }
if stored.trim() != current_source_hash {
return true;
}
found_source = true;
}
}
+1 -1
View File
@@ -38,7 +38,7 @@ function DYNAMIC_INIT {
export RUSTFLAGS="-C target-feature=-crt-static -L native=${COOKBOOK_SYSROOT}/lib -C link-arg=-Wl,-rpath-link,${COOKBOOK_SYSROOT}/lib"
export COOKBOOK_DYNAMIC=1
if [ function = $(type -t reexport_flags) ]; then
if [ "$(type -t reexport_flags)" = function ]; then
reexport_flags
fi
}
+1 -1
View File
@@ -1,5 +1,5 @@
use std::{
collections::{BTreeSet, BTreeMap, VecDeque},
collections::{BTreeMap, BTreeSet, VecDeque},
convert::TryInto,
fs,
path::{Path, PathBuf},
+14 -3
View File
@@ -182,7 +182,10 @@ mod tests {
assert!(path.is_some(), "evdevd recipe should be found");
let path = path.unwrap();
assert!(path.ends_with("evdevd"), "path should end with evdevd");
assert!(path.join("recipe.toml").exists(), "recipe.toml should exist");
assert!(
path.join("recipe.toml").exists(),
"recipe.toml should exist"
);
}
#[test]
@@ -218,8 +221,16 @@ mod tests {
#[test]
fn test_recipe_count_reasonable() {
let count = RECIPE_PATHS.len();
assert!(count > 100, "should have more than 100 recipes (got {})", count);
assert!(count < 10000, "should have fewer than 10000 recipes (got {})", count);
assert!(
count > 100,
"should have more than 100 recipes (got {})",
count
);
assert!(
count < 10000,
"should have fewer than 10000 recipes (got {})",
count
);
}
#[test]