Fix cookbook fetch, recipe parsing, and sync-upstream hardening
fetch.rs: use full commit hash for deterministic checkout. recipe.rs: refactor recipe handling for cleaner patch application. sync-upstream: add dry-run mode and improve rebase error recovery.
This commit is contained in:
@@ -148,10 +148,30 @@ if [ "$NO_MERGE" = "0" ] && [ "$DRY_RUN" = "0" ]; then
|
|||||||
|
|
||||||
if [ "$FORCE" = "0" ]; then
|
if [ "$FORCE" = "0" ]; then
|
||||||
echo ""
|
echo ""
|
||||||
echo " ABORT: Uncommitted local/ changes detected. Use --force to override."
|
echo " ABORT: Uncommitted local/ changes detected."
|
||||||
|
echo " Commit your changes first: git add local/ && git commit -m 'WIP'"
|
||||||
|
echo " Or use --force if you understand the risks (untracked files will be LOST)."
|
||||||
exit 1
|
exit 1
|
||||||
else
|
else
|
||||||
echo " --force specified, proceeding anyway..."
|
# --force with untracked files requires explicit confirmation
|
||||||
|
if [ -n "$LOCAL_UNTRACKED" ]; then
|
||||||
|
echo ""
|
||||||
|
echo "!! DANGER: --force with untracked files will DELETE them permanently. !!"
|
||||||
|
echo " git stash does NOT protect untracked files."
|
||||||
|
echo " Untracked files found:"
|
||||||
|
echo "$LOCAL_UNTRACKED" | head -10 | while read -r f; do echo " $f"; done
|
||||||
|
TOTAL=$(echo "$LOCAL_UNTRACKED" | grep -c .)
|
||||||
|
[ "$TOTAL" -gt 10 ] && echo " ... and $((TOTAL - 10)) more"
|
||||||
|
echo ""
|
||||||
|
read -p " Type 'YES_DELETE' to confirm destruction of untracked local/ files: " CONFIRM
|
||||||
|
if [ "$CONFIRM" != "YES_DELETE" ]; then
|
||||||
|
echo " Aborted. Your untracked files are safe."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
echo " Proceeding with --force — untracked files WILL be deleted..."
|
||||||
|
else
|
||||||
|
echo " --force specified, proceeding (tracked changes will be stashed)..."
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|||||||
+5
-1
@@ -210,7 +210,11 @@ pub fn fetch_offline(recipe: &CookRecipe, logger: &PtyOut) -> Result<FetchResult
|
|||||||
}
|
}
|
||||||
|
|
||||||
let result = match &recipe.recipe.source {
|
let result = match &recipe.recipe.source {
|
||||||
Some(SourceRecipe::Path { path: _ }) | None => fetch(recipe, true, logger)?,
|
Some(SourceRecipe::Path { path: _ }) | None => {
|
||||||
|
offline_check_exists(&source_dir)?;
|
||||||
|
let ident = fetch_apply_source_info(recipe, "".to_string())?;
|
||||||
|
FetchResult::cached(source_dir, ident)
|
||||||
|
}
|
||||||
Some(SourceRecipe::SameAs { same_as }) => {
|
Some(SourceRecipe::SameAs { same_as }) => {
|
||||||
let recipe = fetch_resolve_canon(recipe_dir, &same_as, recipe.name.is_host())?;
|
let recipe = fetch_resolve_canon(recipe_dir, &same_as, recipe.name.is_host())?;
|
||||||
// recursively fetch
|
// recursively fetch
|
||||||
|
|||||||
+54
-66
@@ -1,5 +1,5 @@
|
|||||||
use std::{
|
use std::{
|
||||||
collections::BTreeSet,
|
collections::{BTreeSet, VecDeque},
|
||||||
convert::TryInto,
|
convert::TryInto,
|
||||||
fs,
|
fs,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
@@ -288,88 +288,76 @@ impl CookRecipe {
|
|||||||
collect_self: bool,
|
collect_self: bool,
|
||||||
recursion: usize,
|
recursion: usize,
|
||||||
) -> Result<Vec<Self>, PackageError> {
|
) -> Result<Vec<Self>, PackageError> {
|
||||||
if recursion == 0 {
|
// Iterative BFS with an explicit worklist to avoid stack overflow
|
||||||
return Err(PackageError::Recursion(Default::default()));
|
// on large transitive dependency graphs. Each work item carries its
|
||||||
|
// remaining depth budget so the original recursion limit is honoured.
|
||||||
|
struct WorkItem {
|
||||||
|
name: PackageName,
|
||||||
|
depth: usize,
|
||||||
|
collect_self: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut queue: VecDeque<WorkItem> = VecDeque::new();
|
||||||
|
for name in names {
|
||||||
|
queue.push_back(WorkItem {
|
||||||
|
name: name.clone(),
|
||||||
|
depth: recursion,
|
||||||
|
collect_self,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut recipes = Vec::new();
|
let mut recipes = Vec::new();
|
||||||
let mut recipes_set = BTreeSet::new();
|
let mut recipes_set = BTreeSet::new();
|
||||||
for name in names {
|
let mut expanded = BTreeSet::new();
|
||||||
let recipe = Self::from_name(name.clone())?;
|
|
||||||
|
while let Some(item) = queue.pop_front() {
|
||||||
|
if item.depth == 0 {
|
||||||
|
return Err(PackageError::Recursion(Default::default()));
|
||||||
|
}
|
||||||
|
|
||||||
|
if expanded.contains(&item.name) {
|
||||||
|
if item.collect_self && !recipes_set.contains(&item.name) {
|
||||||
|
let recipe = Self::from_name(item.name.clone())?;
|
||||||
|
recipes_set.insert(recipe.name.clone());
|
||||||
|
recipes.push(recipe);
|
||||||
|
}
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
expanded.insert(item.name.clone());
|
||||||
|
|
||||||
|
let recipe = Self::from_name(item.name.clone())?;
|
||||||
|
|
||||||
if recurse_build_deps {
|
if recurse_build_deps {
|
||||||
let dependencies = Self::new_recursive(
|
for dep in &recipe.recipe.build.dependencies {
|
||||||
&recipe.recipe.build.dependencies,
|
queue.push_back(WorkItem {
|
||||||
recurse_build_deps,
|
name: dep.clone(),
|
||||||
recurse_dev_build_deps,
|
depth: item.depth - 1,
|
||||||
recurse_package_deps,
|
collect_self: collect_build_deps,
|
||||||
collect_build_deps,
|
});
|
||||||
collect_package_deps,
|
|
||||||
collect_build_deps,
|
|
||||||
recursion - 1,
|
|
||||||
)
|
|
||||||
.map_err(|mut err| {
|
|
||||||
err.append_recursion(name);
|
|
||||||
err
|
|
||||||
})?;
|
|
||||||
|
|
||||||
for dependency in dependencies {
|
|
||||||
if !recipes_set.contains(&dependency.name) {
|
|
||||||
recipes_set.insert(dependency.name.clone());
|
|
||||||
recipes.push(dependency);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if recurse_dev_build_deps {
|
if recurse_dev_build_deps {
|
||||||
let dependencies = Self::new_recursive(
|
for dep in &recipe.recipe.build.dev_dependencies {
|
||||||
&recipe.recipe.build.dev_dependencies,
|
queue.push_back(WorkItem {
|
||||||
recurse_build_deps,
|
name: dep.clone(),
|
||||||
recurse_dev_build_deps,
|
depth: item.depth - 1,
|
||||||
recurse_package_deps,
|
collect_self: collect_build_deps,
|
||||||
collect_build_deps,
|
});
|
||||||
collect_package_deps,
|
|
||||||
collect_build_deps,
|
|
||||||
recursion - 1,
|
|
||||||
)
|
|
||||||
.map_err(|mut err| {
|
|
||||||
err.append_recursion(name);
|
|
||||||
err
|
|
||||||
})?;
|
|
||||||
|
|
||||||
for dependency in dependencies {
|
|
||||||
if !recipes_set.contains(&dependency.name) {
|
|
||||||
recipes_set.insert(dependency.name.clone());
|
|
||||||
recipes.push(dependency);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if recurse_package_deps {
|
if recurse_package_deps {
|
||||||
let dependencies = Self::new_recursive(
|
for dep in &recipe.recipe.package.dependencies {
|
||||||
&recipe.recipe.package.dependencies,
|
queue.push_back(WorkItem {
|
||||||
recurse_build_deps,
|
name: dep.clone(),
|
||||||
recurse_dev_build_deps,
|
depth: item.depth - 1,
|
||||||
recurse_package_deps,
|
collect_self: collect_package_deps,
|
||||||
collect_build_deps,
|
});
|
||||||
collect_package_deps,
|
|
||||||
collect_package_deps,
|
|
||||||
recursion - 1,
|
|
||||||
)
|
|
||||||
.map_err(|mut err| {
|
|
||||||
err.append_recursion(name);
|
|
||||||
err
|
|
||||||
})?;
|
|
||||||
|
|
||||||
for dependency in dependencies {
|
|
||||||
if !recipes_set.contains(&dependency.name) {
|
|
||||||
recipes_set.insert(dependency.name.clone());
|
|
||||||
recipes.push(dependency);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if collect_self && !recipes_set.contains(&recipe.name) {
|
if item.collect_self && !recipes_set.contains(&recipe.name) {
|
||||||
recipes_set.insert(recipe.name.clone());
|
recipes_set.insert(recipe.name.clone());
|
||||||
recipes.push(recipe);
|
recipes.push(recipe);
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user