Red Bear OS — microkernel OS in Rust, based on Redox

Derivative of Redox OS (https://www.redox-os.org) adding:
- AMD GPU driver (amdgpu) via LinuxKPI compat layer
- ext4 filesystem support (ext4d scheme daemon)
- ACPI fixes for AMD bare metal (x2APIC, DMAR, IVRS, MCFG)
- Custom branding (hostname, os-release, boot identity)

Build system is full upstream Redox with RBOS overlay in local/.
Patches for kernel, base, and relibc are symlinked from local/patches/
and protected from make clean/distclean. Custom recipes live in
local/recipes/ with symlinks into the recipes/ search path.

Build:  make all CONFIG_NAME=redbear-full
Sync:   ./local/scripts/sync-upstream.sh
This commit is contained in:
2026-04-12 19:05:00 +01:00
commit 50b731f1b7
3392 changed files with 98327 additions and 0 deletions
+18
View File
@@ -0,0 +1,18 @@
use std::env;
fn main() {
let mut args: Vec<String> = env::args().collect();
// Ensure all flags go to cargo
if args.len() >= 2 {
args.insert(2, "--".to_string());
if args[1] == "write-exec" {
if let Ok(stage_dir) = std::env::var("COOKBOOK_STAGE") {
args.insert(2, format!("{}/root", stage_dir));
args.insert(2, "--folder".to_string());
args.insert(2, stage_dir);
args.insert(2, "--root".to_string());
}
}
}
redoxer::main(&args);
}
+1961
View File
File diff suppressed because it is too large Load Diff
+290
View File
@@ -0,0 +1,290 @@
use cookbook::cook::ident::{get_ident, init_ident};
use cookbook::cook::{fetch, package as cook_package};
use cookbook::recipe::CookRecipe;
use cookbook::web::{CliWebConfig, generate_web};
use cookbook::{WALK_DEPTH, staged_pkg};
use pkg::PackageName;
use pkg::{Repository, SourceIdentifier};
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::env;
use std::fs::{self, File};
use std::io::{Read, Write};
use std::path::{Path, PathBuf};
use std::process::Command;
use toml::Value;
fn is_newer(src: &Path, dst: &Path) -> bool {
match (fs::metadata(src), fs::metadata(dst)) {
(Ok(src_meta), Ok(dst_meta)) => match (src_meta.modified(), dst_meta.modified()) {
(Ok(src_time), Ok(dst_time)) => src_time > dst_time,
(Ok(_), Err(_)) => true,
_ => false,
},
(Ok(_), Err(_)) => true,
_ => false,
}
}
#[derive(Clone)]
struct CliConfig {
repo_dir: PathBuf,
appstream: bool,
recipe_list: Vec<String>,
web: Option<CliWebConfig>,
}
impl CliConfig {
fn parse_args() -> Result<Self, std::io::Error> {
let mut args = env::args().skip(1);
let repo_dir = PathBuf::from(
args.next()
.expect("Usage: repo_builder <REPO_DIR> <recipe1> <recipe2> ..."),
);
let web = CliWebConfig::parse_args();
Ok(CliConfig {
repo_dir,
appstream: env::var("COOKBOOK_APPSTREAM").ok().as_deref() == Some("true"),
recipe_list: args.collect(),
web,
})
}
}
fn main() -> Result<(), Box<dyn std::error::Error>> {
init_ident();
let conf = CliConfig::parse_args()?;
Ok(publish_packages(&conf)?)
}
// TODO: Make this callable from repo bin
fn publish_packages(config: &CliConfig) -> anyhow::Result<()> {
let repo_path = &config.repo_dir.join(redoxer::target());
if !repo_path.is_dir() {
fs::create_dir_all(repo_path)?;
}
// Don't publish host packages
let target_packages = &config
.recipe_list
.iter()
.map(PackageName::new)
.filter(|pkg| pkg.as_ref().is_ok_and(|p| !p.is_host()))
.collect::<Result<Vec<_>, _>>()?;
if target_packages.len() == 0 {
return Ok(());
}
// TODO: publish cross target builds?
if std::env::var("COOKBOOK_CROSS_TARGET").is_ok_and(|x| !x.is_empty()) {
return Ok(());
}
// Runtime dependencies include both `[package.dependencies]` and dynamically
// linked packages discovered by auto_deps.
//
// The following adds the package dependencies of the recipes to the repo as
// well.
let (recipe_list, recipe_map) = staged_pkg::new_recursive_nonstop(target_packages, WALK_DEPTH);
if recipe_list.len() == 0 {
// Fail-Safe
anyhow::bail!("Zero packages are passing the build");
}
let mut appstream_sources: HashMap<String, PathBuf> = HashMap::new();
let mut packages: BTreeMap<String, String> = BTreeMap::new();
let mut outdated_packages: BTreeMap<String, SourceIdentifier> = BTreeMap::new();
// === 1. Push recipes in list ===
for recipe_toml in &recipe_list {
let recipe = &recipe_toml.name;
let Some(recipe_path) = staged_pkg::find(recipe.name()) else {
eprintln!("recipe {} not found", recipe);
continue;
};
let Ok(cookbook_recipe) = CookRecipe::from_path(recipe_path, true, false) else {
eprintln!("recipe {} unable to read", recipe);
continue;
};
let target_dir = cookbook_recipe.target_dir();
for package in cookbook_recipe.recipe.get_packages_list() {
let (stage_dir, pkgar_src, toml_src) =
cook_package::package_stage_paths(package, &target_dir);
let recipe_name = cook_package::get_package_name(recipe.name(), package);
let pkgar_dst = repo_path.join(format!("{}.pkgar", recipe_name));
let toml_dst = repo_path.join(format!("{}.toml", recipe_name));
if !fs::exists(&toml_src)? {
eprintln!("recipe {} is missing stage.toml", recipe_name);
continue;
}
if is_newer(&toml_src, &toml_dst) {
eprintln!("\x1b[01;38;5;155mrepo - publishing {}\x1b[0m", recipe_name);
if fs::exists(&pkgar_src)? {
fs::copy(&pkgar_src, &pkgar_dst)?;
}
fs::copy(&toml_src, &toml_dst)?;
}
// TODO: Extract from pkgar instead to handle config.cook.clean_target == true
if stage_dir.join("usr/share/metainfo").exists() {
appstream_sources.insert(recipe.name().to_string(), stage_dir.clone());
}
}
}
// === 2. Optional AppStream generation ===
if config.appstream {
eprintln!("\x1b[01;38;5;155mrepo - generating appstream data\x1b[0m");
let root = env::var("ROOT").unwrap_or_else(|_| ".".into());
let target = env::var("TARGET").unwrap_or_else(|_| "x86_64-unknown-linux-gnu".into());
let appstream_root = Path::new(&root)
.join("build")
.join(&target)
.join("appstream");
fs::remove_dir_all(&appstream_root).ok();
fs::create_dir_all(&appstream_root)?;
if !appstream_sources.is_empty() {
let mut compose_cmd = Command::new("appstreamcli");
compose_cmd
.arg("compose")
.arg("--origin=pkgar")
.arg("--print-report=full")
.arg(format!("--result-root={}", appstream_root.display()));
for (_recipe, source_path) in &appstream_sources {
compose_cmd.arg(source_path);
}
let exit_status = compose_cmd.status()?;
if exit_status.success() {
let appstream_pkg = repo_path.join("repo-appstream.pkgar");
fs::remove_file(&appstream_pkg).ok();
pkgar::create(
format!("{}/build/id_ed25519.toml", root),
&appstream_pkg,
&appstream_root,
)?;
} else {
eprintln!("\x1b[1;91;49mrepo - appstreamcli failed:\x1b[0m {exit_status:?}");
for (_recipe, source_path) in &appstream_sources {
eprintln!("- {}", source_path.display());
}
eprintln!();
}
}
}
// === 3. List outdated packages ===
for (recipe, e) in recipe_map
.into_iter()
.filter_map(|(k, v)| v.err().and_then(|e| Some((k, e))))
{
eprintln!(
"\x1b[0;91;49mrepo - marking {} as outdated:\x1b[0m {e}",
recipe
);
let Some(recipe_path) = staged_pkg::find(recipe.name()) else {
eprintln!("recipe {} not found", recipe);
continue;
};
let Ok(cookbook_recipe) = CookRecipe::from_path(recipe_path, true, false) else {
eprintln!("recipe {} unable to read", recipe);
continue;
};
match fetch::fetch_get_source_info(&cookbook_recipe) {
Ok(source_ident) => {
outdated_packages.insert(recipe.name().to_string(), source_ident);
}
Err(e) => {
eprintln!(
"\x1b[0;91;49m source of {} is not identifiable:\x1b[0m {e}",
recipe
);
let ident = get_ident();
outdated_packages.insert(
recipe.name().to_string(),
SourceIdentifier {
source_identifier: "missing_source".to_string(),
commit_identifier: ident.commit.clone(),
time_identifier: ident.time.clone(),
},
);
}
};
}
eprintln!("\x1b[01;38;5;155mrepo - generating repo.toml\x1b[0m");
// === 4. Read and update repo.toml ===
let repo_toml_path = repo_path.join("repo.toml");
if repo_toml_path.exists() {
let mut file = File::open(&repo_toml_path)?;
let mut contents = String::new();
file.read_to_string(&mut contents)?;
let parsed: Repository = toml::from_str(&contents)?;
for (k, v) in parsed.packages {
packages.insert(k, v);
}
if parsed.outdated_packages.len() > 0 {
let built_packages: BTreeSet<String> = recipe_list
.iter()
.map(|p| p.name.name().to_string())
.collect();
for (k, v) in parsed.outdated_packages {
if outdated_packages.contains_key(&k) || !built_packages.contains(&k) {
outdated_packages.insert(k, v);
}
}
}
}
for entry in fs::read_dir(&repo_path)? {
let entry = entry?;
let path = entry.path();
if path.extension().and_then(|s| s.to_str()) != Some("toml") {
continue;
}
if path.file_stem().and_then(|s| s.to_str()) == Some("repo") {
continue;
}
let content = fs::read_to_string(&path)?;
let parsed: Value = toml::from_str(&content)?;
let empty_ver = Value::String("".to_string());
let version_str = parsed
.get("blake3")
.unwrap_or_else(|| parsed.get("version").unwrap_or_else(|| &empty_ver))
.as_str()
.unwrap_or("");
let package_name = path.file_stem().unwrap().to_string_lossy().to_string();
packages.insert(package_name, version_str.to_string());
}
let repository = Repository {
packages,
outdated_packages,
};
let output = toml::to_string(&repository)?;
let mut output_file = File::create(&repo_toml_path)?;
output_file.write_all(output.as_bytes())?;
if let Some(conf) = &config.web {
eprintln!("\x1b[01;38;5;155mrepo - generating web content\x1b[0m");
generate_web(&repository.packages.keys().cloned().collect(), conf);
}
Ok(())
}
+254
View File
@@ -0,0 +1,254 @@
use std::{collections::HashMap, env, fs, str::FromStr, sync::OnceLock};
use serde::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Deserialize, PartialEq, Serialize)]
#[serde(default)]
pub struct CookConfigOpt {
/// whether to run offline
pub offline: Option<bool>,
/// whether to set jobs number instead of from nproc
pub jobs: Option<usize>,
/// whether to use TUI to allow parallel build
/// default value is yes if "CI" env unset and STDIN is open.
pub tui: Option<bool>,
/// whether to write logs to build/logs dir, default true on TUI
pub logs: Option<bool>,
/// whether to ignore build errors
pub nonstop: Option<bool>,
/// whether to archive packages with compressed format
pub compressed: Option<bool>,
/// whether to print verbose logs to certain commands
/// build failure still be printed anyway
pub verbose: Option<bool>,
/// whether to always clean the build directory before building
pub clean_build: Option<bool>,
/// whether to always clean the target directory after building
/// (deletes everything except pkgar files)
pub clean_target: Option<bool>,
/// whether to always write stage.files metadata
pub write_filetree: Option<bool>,
}
#[derive(Debug, Default, Clone, Deserialize, PartialEq, Serialize)]
pub struct CookConfig {
pub offline: bool,
pub jobs: usize,
pub tui: bool,
pub logs: bool,
pub nonstop: bool,
pub compressed: bool,
pub verbose: bool,
pub clean_build: bool,
pub clean_target: bool,
pub write_filetree: bool,
}
impl From<CookConfigOpt> for CookConfig {
fn from(value: CookConfigOpt) -> Self {
CookConfig {
offline: value.offline.unwrap(),
jobs: value.jobs.unwrap(),
tui: value.tui.unwrap(),
logs: value.logs.unwrap(),
nonstop: value.nonstop.unwrap(),
compressed: value.compressed.unwrap(),
verbose: value.verbose.unwrap(),
clean_build: value.clean_build.unwrap(),
clean_target: value.clean_target.unwrap(),
write_filetree: value.write_filetree.unwrap(),
}
}
}
#[derive(Debug, Default, Deserialize, PartialEq, Serialize)]
#[serde(default)]
pub struct CookbookConfig {
#[serde(rename = "cook")]
cook_opt: CookConfigOpt,
#[serde(skip)]
pub cook: CookConfig,
pub mirrors: HashMap<String, String>,
}
static CONFIG: OnceLock<CookbookConfig> = OnceLock::new();
pub fn init_config() {
let mut config: CookbookConfig = if fs::exists("cookbook.toml").unwrap_or(false) {
let toml_content = fs::read_to_string("cookbook.toml")
.map_err(|e| format!("Unable to read config: {:?}", e))
.unwrap();
toml::from_str(&toml_content)
.map_err(|e| format!("Unable to parse config: {:?}", e))
.unwrap()
} else {
CookbookConfig::default()
};
if config.cook_opt.tui.is_none() {
config.cook_opt.tui = Some(!env::var("CI").is_ok_and(|s| !s.is_empty()));
}
if config.cook_opt.jobs.is_none() {
config.cook_opt.jobs = Some(extract_env(
"COOKBOOK_MAKE_JOBS",
std::thread::available_parallelism()
.map(|f| usize::from(f))
.unwrap_or(1),
));
}
if config.cook_opt.logs.is_none() {
config.cook_opt.logs = Some(extract_env("COOKBOOK_LOGS", config.cook_opt.tui.unwrap()));
}
if config.cook_opt.offline.is_none() {
config.cook_opt.offline = Some(extract_env("COOKBOOK_OFFLINE", false));
}
if config.cook_opt.compressed.is_none() {
config.cook_opt.compressed = Some(extract_env("COOKBOOK_COMPRESSED", false));
}
if config.cook_opt.verbose.is_none() {
config.cook_opt.verbose = Some(extract_env("COOKBOOK_VERBOSE", true));
}
if config.cook_opt.nonstop.is_none() {
config.cook_opt.nonstop = Some(extract_env("COOKBOOK_NONSTOP", false));
}
if config.cook_opt.clean_build.is_none() {
config.cook_opt.clean_build = Some(extract_env("COOKBOOK_CLEAN_BUILD", false));
}
if config.cook_opt.clean_target.is_none() {
config.cook_opt.clean_target = Some(extract_env("COOKBOOK_CLEAN_TARGET", false));
}
if config.cook_opt.write_filetree.is_none() {
config.cook_opt.write_filetree = Some(extract_env(
"COOKBOOK_WRITE_FILETREE",
config.cook_opt.clean_target.unwrap_or(false) || extract_env("COOKBOOK_WEB", false),
));
}
if config.mirrors.len() == 0 {
// The GNU FTP mirror below is automatically inserted for convenience
// You can choose other mirrors by setting it on cookbook.toml
config.mirrors.insert(
"ftp.gnu.org/gnu".to_string(),
"mirrors.ocf.berkeley.edu/gnu".to_string(),
);
}
config.cook = CookConfig::from(config.cook_opt.clone());
CONFIG.set(config).expect("config is initialized twice");
}
fn extract_env<T: FromStr>(key: &str, default: T) -> T {
if let Ok(e) = env::var(&key) {
str::parse(&e).unwrap_or(default)
} else {
default
}
}
pub fn get_config() -> &'static CookbookConfig {
return CONFIG.get().expect("Configuration is not initialized");
}
pub fn translate_mirror(original_url: &str) -> String {
let config = CONFIG.get().expect("Configuration is not initialized");
let stripped_url = original_url
.strip_prefix("https://")
.or_else(|| original_url.strip_prefix("http://"))
.unwrap_or(original_url);
let mut best_match_prefix: Option<&String> = None;
for prefix in config.mirrors.keys() {
if stripped_url.starts_with(prefix) {
match best_match_prefix {
Some(current_best) if prefix.len() > current_best.len() => {
best_match_prefix = Some(prefix);
}
None => {
best_match_prefix = Some(prefix);
}
_ => {}
}
}
}
if let Some(prefix) = best_match_prefix {
let mirror_base = config.mirrors.get(prefix).unwrap();
let suffix = &stripped_url[prefix.len()..];
let ptotocol = &original_url[..(original_url.len() - stripped_url.len())];
return format!("{}{}{}", ptotocol, mirror_base, suffix);
}
original_url.to_string()
}
#[cfg(test)]
mod tests {
use super::*;
fn setup_test_config() {
let app_config = toml::from_str(
"[mirrors]\n\
\"ftp.gnu.org/gnu\" = \"example.com/gnu\"\n\
\"github.com/foo/bar\" = \"github.com/baz/bar\"\n\
\"github.com/a\" = \"github.com/b\"\n",
)
.expect("Unable to parse test config");
// This will be called for each test. If the config is already set,
// it will do nothing, which is fine as all tests use the same config.
let _ = CONFIG.set(app_config);
}
#[test]
fn test_parse_cook() {
let app_config: CookbookConfig = toml::from_str(
"[cook]\n\
offline = true\n",
)
.expect("Unable to parse test config");
assert_eq!(app_config.cook_opt.offline, Some(true));
assert_eq!(app_config.cook_opt.jobs, None);
}
#[test]
fn test_exact_match() {
setup_test_config();
assert_eq!(translate_mirror("ftp.gnu.org/gnu"), "example.com/gnu");
assert_eq!(translate_mirror("github.com/foo/bar"), "github.com/baz/bar");
}
#[test]
fn test_prefix_match() {
setup_test_config();
assert_eq!(
translate_mirror("https://github.com/a/c"),
"https://github.com/b/c"
);
assert_eq!(
translate_mirror("https://ftp.gnu.org/gnu/bash/bash-5.2.15.tar.gz"),
"https://example.com/gnu/bash/bash-5.2.15.tar.gz"
);
}
#[test]
fn test_longest_prefix_match() {
setup_test_config();
// "github.com/foo/bar" is a longer and more specific prefix than "github.com/a",
// so it should be chosen for the translation.
assert_eq!(
translate_mirror("https://github.com/foo/bar/baz"),
"https://github.com/baz/bar/baz"
);
}
#[test]
fn test_no_match() {
setup_test_config();
assert_eq!(translate_mirror("www.rust-lang.org"), "www.rust-lang.org");
assert_eq!(
translate_mirror("http://github.com/unrelated/repo"),
"http://github.com/unrelated/repo"
);
}
}
+10
View File
@@ -0,0 +1,10 @@
// avoid confusion with build.rs
pub mod cook_build;
pub mod fetch;
pub mod fetch_repo;
pub mod fs;
pub mod ident;
pub mod package;
pub mod pty;
pub mod script;
pub mod tree;
+758
View File
@@ -0,0 +1,758 @@
use pkg::PackageError;
use pkg::{Package, PackageName};
use crate::config::CookConfig;
use crate::cook::package::{package_source_paths, package_target};
use crate::cook::pty::PtyOut;
use crate::cook::script::*;
use crate::cook::{fetch, fs::*};
use crate::recipe::Recipe;
use crate::recipe::{AutoDeps, CookRecipe};
use crate::recipe::{BuildKind, OptionalPackageRecipe};
use std::collections::VecDeque;
use std::{
collections::BTreeSet,
fs,
path::{Path, PathBuf},
process::Command,
str,
time::SystemTime,
};
use crate::{is_redox, log_to_pty};
fn auto_deps_from_dynamic_linking(
stage_dirs: &[PathBuf],
dep_pkgars: &BTreeSet<(PackageName, PathBuf)>,
logger: &PtyOut,
) -> BTreeSet<PackageName> {
let mut paths = BTreeSet::new();
let mut visited = BTreeSet::new();
let verbose = crate::config::get_config().cook.verbose;
// Base directories may need to be updated for packages that place binaries in odd locations.
let mut walk = VecDeque::new();
for stage_dir in stage_dirs {
walk.push_back((stage_dir, stage_dir.join("usr/bin")));
walk.push_back((stage_dir, stage_dir.join("usr/games")));
walk.push_back((stage_dir, stage_dir.join("usr/lib")));
walk.push_back((stage_dir, stage_dir.join("usr/libexec")));
}
// Recursively (DFS) walk each directory to ensure nested libs and bins are checked.
while let Some((rel_path, dir)) = walk.pop_front() {
let Ok(dir) = dir.canonicalize() else {
continue;
};
if visited.contains(&dir) {
#[cfg(debug_assertions)]
log_to_pty!(
logger,
"DEBUG: auto_deps => Skipping `{dir:?}` (already visited)"
);
continue;
}
assert!(
visited.insert(dir.clone()),
"Directory `{:?}` should not be in visited\nVisited: {:#?}",
dir,
visited
);
let Ok(read_dir) = fs::read_dir(&dir) else {
continue;
};
for entry_res in read_dir {
let Ok(entry) = entry_res else { continue };
let Ok(file_type) = entry.file_type() else {
continue;
};
if file_type.is_file() {
paths.insert((rel_path, entry.path()));
} else if file_type.is_dir() {
walk.push_front((rel_path, entry.path()));
}
}
}
let mut needed = BTreeSet::new();
for (rel_path, path) in paths {
let Ok(file) = fs::File::open(&path) else {
continue;
};
let read_cache = object::ReadCache::new(file);
let Ok(object) = object::build::elf::Builder::read(&read_cache) else {
continue;
};
let Some(dynamic_data) = object.dynamic_data() else {
continue;
};
for dynamic in dynamic_data {
let object::build::elf::Dynamic::String { tag, val } = dynamic else {
continue;
};
if *tag == object::elf::DT_NEEDED {
let Ok(name) = str::from_utf8(val) else {
continue;
};
if let Ok(relative_path) = path.strip_prefix(rel_path) {
if verbose {
log_to_pty!(logger, "DEBUG: {} needs {}", relative_path.display(), name);
}
}
needed.insert(name.to_string());
}
}
}
let mut missing = needed.clone();
// relibc and friends will always be installed
for preinstalled in &["libc.so.6", "libgcc_s.so.1", "libstdc++.so.6"] {
missing.remove(*preinstalled);
}
let mut deps = BTreeSet::new();
if let Ok(key_file) = pkgar_keys::PublicKeyFile::open("build/id_ed25519.pub.toml") {
for (dep, archive_path) in dep_pkgars.iter() {
let Ok(mut package) = pkgar::PackageFile::new(archive_path, &key_file.pkey) else {
continue;
};
let Ok(entries) = pkgar_core::PackageSrc::read_entries(&mut package) else {
continue;
};
for entry in entries {
let Ok(entry_path) = pkgar::ext::EntryExt::check_path(&entry) else {
continue;
};
for prefix in &["lib", "usr/lib"] {
let Ok(child_path) = entry_path.strip_prefix(prefix) else {
continue;
};
let Some(child_name) = child_path.to_str() else {
continue;
};
if needed.contains(child_name) {
if verbose {
log_to_pty!(logger, "DEBUG: {} provides {}", dep, child_name);
}
deps.insert(dep.with_prefix(pkg::PackagePrefix::Any));
missing.remove(child_name);
}
}
}
}
}
if verbose {
for name in missing {
log_to_pty!(logger, "INFO: {} missing", name);
}
}
deps
}
fn auto_deps_from_static_package_deps(
build_dep_pkgars: &BTreeSet<(PackageName, PathBuf)>,
dynamic_dep_pkgars: &BTreeSet<PackageName>,
) -> Result<BTreeSet<PackageName>, PackageError> {
let static_dep_pkgars: Vec<PackageName> = build_dep_pkgars
.iter()
.map(|x| x.0.clone())
.filter(|x| !dynamic_dep_pkgars.contains(x))
.collect();
let pkgs = CookRecipe::get_package_deps_recursive(&static_dep_pkgars, false)?;
Ok(pkgs.into_iter().collect())
}
pub struct BuildResult {
pub stage_dirs: Vec<PathBuf>,
pub auto_deps: BTreeSet<PackageName>,
pub cached: bool,
}
impl BuildResult {
pub fn new(stage_dirs: Vec<PathBuf>, auto_deps: BTreeSet<PackageName>) -> Self {
BuildResult {
stage_dirs,
auto_deps,
cached: false,
}
}
pub fn cached(stage_dirs: Vec<PathBuf>, auto_deps: BTreeSet<PackageName>) -> Self {
BuildResult {
stage_dirs,
auto_deps,
cached: true,
}
}
}
pub fn build(
recipe_dir: &Path,
source_dir: &Path,
target_dir: &Path,
cook_recipe: &CookRecipe,
cook_config: &CookConfig,
logger: &PtyOut,
) -> Result<BuildResult, String> {
let recipe = &cook_recipe.recipe;
let name = &cook_recipe.name;
let check_source = !cook_recipe.is_deps;
let sysroot_dir = get_sub_target_dir(target_dir, "sysroot");
let toolchain_dir = get_sub_target_dir(target_dir, "toolchain");
let auto_deps_file = get_sub_target_dir(target_dir, "auto_deps.toml");
let stage_dirs = get_stage_dirs(&recipe.optional_packages, target_dir);
let stage_pkgars: Vec<PathBuf> = stage_dirs
.iter()
.map(|p| p.with_added_extension("pkgar"))
.collect();
let cli_verbose = cook_config.verbose;
let cli_jobs = cook_config.jobs;
if recipe.build.kind == BuildKind::None {
// metapackages don't need to do anything here
return Ok(BuildResult::new(stage_dirs, BTreeSet::new()));
}
let mut dep_pkgars = BTreeSet::new();
let mut dep_host_pkgars = BTreeSet::new();
let build_deps = [
&recipe.build.dependencies[..],
&recipe.build.dev_dependencies[..],
]
.concat();
let build_deps =
CookRecipe::get_build_deps_recursive(&build_deps, false).map_err(|e| format!("{:?}", e))?;
for dependency in build_deps.iter() {
let (_, pkgar, _) = dependency.stage_paths();
if dependency.name.is_host() {
dep_host_pkgars.insert((dependency.name.clone(), pkgar));
} else {
dep_pkgars.insert((dependency.name.clone(), pkgar));
}
}
macro_rules! make_auto_deps {
($cached:expr) => {
build_auto_deps(
recipe,
&auto_deps_file,
&stage_dirs,
$cached,
cook_config,
dep_pkgars,
logger,
)
};
}
if !check_source {
// TODO: when stage_dirs does not exist due to clean_target was true, extract from stage.pkgar?
let stage_present = stage_pkgars.iter().all(|file| file.is_file());
if stage_present && auto_deps_file.is_file() {
if cli_verbose {
log_to_pty!(logger, "DEBUG: using cached build, not checking source");
}
let auto_deps = make_auto_deps!(true)?;
return Ok(BuildResult::cached(stage_dirs, auto_deps));
}
}
let mut source_modified = modified_dir_ignore_git(source_dir).unwrap_or(SystemTime::UNIX_EPOCH);
if let Ok(recipe_modified) = modified(&recipe_dir.join("recipe.toml")) {
if recipe_modified > source_modified {
source_modified = recipe_modified
}
}
let deps_modified = modified_all_btree(
dep_pkgars.iter().map(|(_dep, pkgar)| pkgar.as_path()),
modified,
)?;
let deps_host_modified = modified_all_btree(
dep_host_pkgars.iter().map(|(_dep, pkgar)| pkgar.as_path()),
modified,
)?;
// check stage dir modified against pkgar files, any files missing will result in UNIX_EPOCH
let stage_modified = modified_all(&stage_pkgars, modified).unwrap_or(SystemTime::UNIX_EPOCH);
// Rebuild stage if source is newer
if stage_modified < source_modified
|| stage_modified < deps_modified
|| stage_modified < deps_host_modified
|| !auto_deps_file.is_file()
{
for stage_dir in &stage_dirs {
if stage_dir.is_dir() {
log_to_pty!(logger, "DEBUG: updating '{}'", stage_dir.display());
remove_stage_dir(stage_dir)?;
}
}
} else {
if cli_verbose {
log_to_pty!(logger, "DEBUG: using cached build");
}
// stop early otherwise we'll end up rebuilding
let auto_deps = make_auto_deps!(true)?;
return Ok(BuildResult::cached(stage_dirs, auto_deps));
}
// Rebuild sysroot if source is newer
if recipe.build.kind != BuildKind::Remote {
let updated = build_deps_dir(
logger,
&sysroot_dir,
if name.is_host() {
&dep_host_pkgars
} else {
&dep_pkgars
},
source_modified,
deps_modified,
)?;
if cli_verbose && !updated {
log_to_pty!(logger, "DEBUG: using cached sysroot");
}
}
if recipe.build.kind != BuildKind::Remote && !name.is_host() && dep_host_pkgars.len() > 0 {
let updated = build_deps_dir(
logger,
&toolchain_dir,
&dep_host_pkgars,
source_modified,
deps_host_modified,
)?;
if cli_verbose && !updated {
log_to_pty!(logger, "DEBUG: using cached toolchain");
}
}
let stage_dir = stage_dirs
.last()
.expect("Should have atleast one stage dir");
let build_dir = get_sub_target_dir(target_dir, "build");
if !stage_dir.is_dir() {
// Create stage.tmp
let stage_dir_tmp = target_dir.join("stage.tmp");
create_dir_clean(&stage_dir_tmp)?;
// Create build dir, if it does not exist
if cook_config.clean_build || !build_dir.is_dir() {
create_dir_clean(&build_dir)?;
}
let flags_fn = |name, flags: &Vec<String>| {
format!(
"{name}+=(\n{}\n)\n",
flags
.iter()
.map(|s| format!(" \"{s}\""))
.collect::<Vec<String>>()
.join("\n")
)
};
if recipe.build.kind == BuildKind::Remote {
return build_remote(stage_dirs, recipe, target_dir, cook_config);
}
let mut allow_cargo_offline = false;
//TODO: better integration with redoxer (library instead of binary)
//TODO: configurable target
//TODO: Add more configurability, convert scripts to Rust?
let script = match &recipe.build.kind {
BuildKind::Cargo {
cargopath,
cargoflags,
cargopackages,
cargoexamples,
} => {
allow_cargo_offline = true;
let mut script = format!(
"DYNAMIC_INIT\n{}\nCOOKBOOK_CARGO_PATH={} ",
flags_fn("COOKBOOK_CARGO_FLAGS", cargoflags),
cargopath.as_deref().unwrap_or(".")
);
if cargopackages.len() == 0 && cargoexamples.len() == 0 {
script += "cookbook_cargo\n"
} else {
if cargopackages.len() > 0 {
script += "cookbook_cargo_packages";
for package in cargopackages {
script += " ";
script += package;
}
script += "\n";
}
if cargoexamples.len() > 0 {
script += "cookbook_cargo_examples";
for example in cargoexamples {
script += " ";
script += example;
}
script += "\n";
}
}
script
}
BuildKind::Configure { configureflags } => format!(
"DYNAMIC_INIT\n{}cookbook_configure",
flags_fn("COOKBOOK_CONFIGURE_FLAGS", configureflags),
),
BuildKind::Cmake { cmakeflags } => format!(
"DYNAMIC_INIT\n{}cookbook_cmake",
flags_fn("COOKBOOK_CMAKE_FLAGS", cmakeflags),
),
BuildKind::Meson { mesonflags } => format!(
"DYNAMIC_INIT\n{}cookbook_meson",
flags_fn("COOKBOOK_MESON_FLAGS", mesonflags),
),
BuildKind::Custom { script } => script.clone(),
BuildKind::Remote => unreachable!(),
BuildKind::None => "".to_owned(),
};
let command = {
//TODO: remove unwraps
let cookbook_build = build_dir.canonicalize().unwrap();
let cookbook_recipe = recipe_dir.canonicalize().unwrap();
let cookbook_root = Path::new(".").canonicalize().unwrap();
let cookbook_stage = stage_dir_tmp.canonicalize().unwrap();
let cookbook_source = source_dir.canonicalize().unwrap();
let cookbook_sysroot = sysroot_dir.canonicalize().unwrap();
let cookbook_toolchain = toolchain_dir.canonicalize().ok();
let bash_args = if cli_verbose { "-ex" } else { "-e" };
let local_redoxer = Path::new("target/release/cookbook_rbos_redoxer");
let mut command = if is_redox() && !local_redoxer.is_file() {
let mut command = Command::new("cookbook_rbos_redoxer");
command.env("COOKBOOK_REDOXER", "cookbook_rbos_redoxer");
command
} else {
let cookbook_redoxer = local_redoxer
.canonicalize()
.unwrap_or(PathBuf::from("/bin/false"));
let mut command = Command::new(&cookbook_redoxer);
command.env("COOKBOOK_REDOXER", &cookbook_redoxer);
command
};
command.arg("env").arg("bash").arg(bash_args);
command.current_dir(&cookbook_build);
command.env("TARGET", package_target(name));
command.env("COOKBOOK_BUILD", &cookbook_build);
command.env("COOKBOOK_NAME", name.name());
command.env("COOKBOOK_HOST_TARGET", redoxer::host_target());
command.env("COOKBOOK_RECIPE", &cookbook_recipe);
command.env("COOKBOOK_ROOT", &cookbook_root);
command.env("COOKBOOK_STAGE", &cookbook_stage);
command.env("COOKBOOK_SOURCE", &cookbook_source);
command.env("COOKBOOK_SYSROOT", &cookbook_sysroot);
if let Some(cookbook_toolchain) = &cookbook_toolchain {
command.env("COOKBOOK_TOOLCHAIN", cookbook_toolchain);
} else if name.is_host() {
command.env("COOKBOOK_TOOLCHAIN", &cookbook_sysroot);
}
command.env("COOKBOOK_MAKE_JOBS", cli_jobs.to_string());
if cli_verbose {
command.env("COOKBOOK_VERBOSE", "1");
}
if cook_config.offline && allow_cargo_offline {
command.env("COOKBOOK_OFFLINE", "1");
} else {
command.env_remove("COOKBOOK_OFFLINE");
}
if let Ok(ident_source) = fetch::fetch_get_source_info(&cook_recipe) {
command.env("COOKBOOK_SOURCE_IDENT", ident_source.source_identifier);
command.env("COOKBOOK_COMMIT_IDENT", ident_source.commit_identifier);
}
command
};
let full_script = format!(
"{}\n{}\n{}\n{}",
BUILD_PRESCRIPT, SHARED_PRESCRIPT, script, BUILD_POSTSCRIPT
);
run_command_stdin(command, full_script.as_bytes(), logger)?;
// Move to each features dir
let mut globs = Vec::new();
for (i, feat) in recipe.optional_packages.iter().enumerate() {
let stage_dir = &stage_dirs[i];
create_dir_clean(&stage_dir)?;
for path in &feat.files {
let glob = globset::Glob::new(&path).map_err(|e| format!("{}", e))?;
globs.push((glob.compile_matcher(), stage_dir.clone()));
}
}
move_dir_all_fn(
&stage_dir_tmp,
&Box::new(|path: PathBuf| {
for (glob, dst) in &globs {
if glob.is_match(&path) {
return Some(dst.as_path());
}
}
None
}),
)
.map_err(|e| format!("Unable to move {e:?}"))?;
// Move stage.tmp to stage atomically
rename(&stage_dir_tmp, &stage_dir)?;
}
if cook_config.clean_target {
remove_all(&build_dir)?;
remove_all(&sysroot_dir)?;
if toolchain_dir.is_dir() {
remove_all(&toolchain_dir)?;
}
// don't remove stage dir yet
}
let auto_deps = make_auto_deps!(false)?;
Ok(BuildResult::new(stage_dirs, auto_deps))
}
pub fn remove_stage_dir(stage_dir: &PathBuf) -> crate::Result<()> {
if stage_dir.is_dir() {
remove_all(&stage_dir)?;
}
let stage_file = stage_dir.with_added_extension("pkgar");
if stage_file.is_file() {
remove_all(&stage_file)?;
}
let stage_meta = stage_dir.with_added_extension("toml");
if stage_meta.is_file() {
remove_all(&stage_meta)?;
}
let stage_files = stage_dir.with_added_extension("files");
if stage_files.is_file() {
remove_all(&stage_files)?;
}
Ok(())
}
pub fn get_stage_dirs(features: &Vec<OptionalPackageRecipe>, target_dir: &Path) -> Vec<PathBuf> {
let mut target_dir = target_dir.to_path_buf();
if let Some(cross_target) = crate::cross_target() {
// TODO: automatically pass COOKBOOK_CROSS_GNU_TARGET?
target_dir = target_dir.join(cross_target)
}
let mut v = Vec::new();
for f in features {
v.push(target_dir.join(format!("stage.{}", f.name)));
}
// intentionally added last as it contains leftover files from package features
v.push(target_dir.join("stage"));
v
}
pub fn get_sub_target_dir(target_dir: &Path, sub_path: &str) -> PathBuf {
let mut target_dir = target_dir.to_path_buf();
if let Some(cross_target) = crate::cross_target() {
// TODO: automatically pass COOKBOOK_CROSS_GNU_TARGET?
target_dir = target_dir.join(cross_target)
}
target_dir.join(sub_path)
}
fn build_deps_dir(
logger: &PtyOut,
deps_dir: &PathBuf,
dep_pkgars: &BTreeSet<(PackageName, PathBuf)>,
source_modified: SystemTime,
deps_modified: SystemTime,
) -> Result<bool, String> {
let deps_dir_tmp = deps_dir.with_added_extension("tmp");
if deps_dir.is_dir() {
let tags_dir = deps_dir.join(".tags");
let sysroot_modified = modified_dir(&tags_dir).unwrap_or(SystemTime::UNIX_EPOCH);
if sysroot_modified < source_modified
|| sysroot_modified < deps_modified
|| !check_files_present(
&tags_dir,
&dep_pkgars
.iter()
.map(|(name, _)| name.without_prefix())
.collect(),
)?
{
log_to_pty!(logger, "DEBUG: updating '{}'", deps_dir.display());
remove_all(deps_dir)?;
}
}
if !deps_dir.is_dir() {
// Create sysroot.tmp
create_dir_clean(&deps_dir_tmp)?;
let tags_dir = deps_dir_tmp.join(".tags");
let usr_dir = deps_dir_tmp.join("usr");
create_dir(&tags_dir)?;
create_dir(&usr_dir)?;
for folder in &["bin", "include", "lib", "share"] {
// Make sure sysroot/usr/$folder exists
create_dir(&usr_dir.join(folder))?;
// Link sysroot/$folder sysroot/usr/$folder
symlink(Path::new("usr").join(folder), &deps_dir_tmp.join(folder))?;
}
let pkey_path = "build/id_ed25519.pub.toml";
for (name, archive_path) in dep_pkgars {
let tag_file = tags_dir.join(name.without_prefix());
fs::write(&tag_file, "")
.map_err(|e| format!("failed to write tag file {}: {:?}", tag_file.display(), e))?;
pkgar::extract(pkey_path, &archive_path, deps_dir_tmp.to_str().unwrap()).map_err(
|err| {
format!(
"failed to install '{}' in '{}': {:?}",
archive_path.display(),
deps_dir_tmp.display(),
err
)
},
)?;
}
// Move sysroot.tmp to sysroot atomically
rename(&deps_dir_tmp, deps_dir)?;
return Ok(true);
}
Ok(false)
}
/// Calculate automatic dependencies
fn build_auto_deps(
recipe: &Recipe,
auto_deps_path: &Path,
stage_dirs: &Vec<PathBuf>,
cached: bool,
cook_config: &CookConfig,
mut dep_pkgars: BTreeSet<(PackageName, PathBuf)>,
logger: &PtyOut,
) -> Result<BTreeSet<PackageName>, String> {
if auto_deps_path.is_file() && !cached {
if cook_config.verbose {
log_to_pty!(logger, "DEBUG: updating {}", auto_deps_path.display());
}
remove_all(&auto_deps_path)?;
}
let auto_deps = if auto_deps_path.exists() {
let toml_content =
fs::read_to_string(&auto_deps_path).map_err(|_| "failed to read cached auto_deps")?;
let wrapper: AutoDeps =
toml::from_str(&toml_content).map_err(|_| "failed to deserialize cached auto_deps")?;
wrapper.packages
} else {
let mut dynamic_deps = auto_deps_from_dynamic_linking(stage_dirs, &dep_pkgars, logger);
dep_pkgars.retain(|x| recipe.build.dependencies.contains(&x.0));
let package_deps =
auto_deps_from_static_package_deps(&dep_pkgars, &dynamic_deps).unwrap_or_default();
dynamic_deps.extend(package_deps);
let wrapper = AutoDeps {
packages: dynamic_deps,
};
serialize_and_write(&auto_deps_path, &wrapper)?;
wrapper.packages
};
Ok(auto_deps)
}
pub fn build_remote(
stage_dirs: Vec<PathBuf>,
recipe: &Recipe,
target_dir: &Path,
cook_config: &CookConfig,
) -> Result<BuildResult, String> {
let source_toml = target_dir.join("source.toml");
let source_pubkey = "build/remotes/pub_key_static.redox-os.org.toml";
let packages = recipe.get_packages_list();
for (i, package) in packages.into_iter().enumerate() {
// declare pkg dependencies as autodeps dependency
let stage_dir = &stage_dirs[i];
if cook_config.clean_target && stage_dir.with_added_extension("pkgar").is_file() {
continue;
}
if !stage_dir.is_dir() {
let (_, source_pkgar, _) = package_source_paths(package, &target_dir);
let stage_dir_tmp = target_dir.join("stage.tmp");
pkgar::extract(&source_pubkey, &source_pkgar, &stage_dir_tmp).map_err(|err| {
format!(
"failed to install '{}' in '{}': {:?}",
source_pkgar.display(),
stage_dir_tmp.display(),
err
)
})?;
// Move stage.tmp to stage atomically
rename(&stage_dir_tmp, &stage_dir)?;
}
}
let auto_deps_path = target_dir.join("auto_deps.toml");
if auto_deps_path.is_file() && !cook_config.clean_target {
if modified(&auto_deps_path)? < modified_all(&stage_dirs, modified)? {
remove_all(&auto_deps_path)?
}
}
let auto_deps = if auto_deps_path.exists() {
let toml_content =
fs::read_to_string(&auto_deps_path).map_err(|_| "failed to read cached auto_deps")?;
let wrapper: AutoDeps =
toml::from_str(&toml_content).map_err(|_| "failed to deserialize cached auto_deps")?;
wrapper.packages
} else {
let toml_content =
fs::read_to_string(&source_toml).map_err(|_| "failed to read source.toml")?;
let pkg_toml: Package =
toml::from_str(&toml_content).map_err(|_| "failed to deserialize source.toml")?;
let wrapper = AutoDeps {
packages: pkg_toml.depends.into_iter().collect(),
};
serialize_and_write(&auto_deps_path, &wrapper)?;
wrapper.packages
};
Ok(BuildResult::new(stage_dirs, auto_deps))
}
#[cfg(test)]
mod tests {
use std::os::unix;
#[test]
fn file_system_loop_no_infinite_loop() {
let mut root = std::env::temp_dir();
root.push("temp_test_dir_file_system_loop_no_infinite_loop");
let _ = std::fs::remove_dir_all(&root);
std::fs::create_dir_all(&root).expect("Failed to create temporary root directory");
// Hierarchy with an infinite loop
let dir = root.join("loop");
unix::fs::symlink(&root, &dir).expect("Linking {dir:?} to {root:?}");
// Sanity check that we have a loop
assert_eq!(
root.canonicalize().unwrap(),
dir.canonicalize().unwrap(),
"Expected a loop where {dir:?} points to {root:?}"
);
let entries =
super::auto_deps_from_dynamic_linking(&vec![root.clone()], &Default::default(), &None);
assert!(
entries.is_empty(),
"auto_deps shouldn't have yielded any libraries"
);
}
}
+850
View File
@@ -0,0 +1,850 @@
use crate::Error;
use crate::Result;
use crate::bail_other_err;
use crate::config::translate_mirror;
use crate::cook::cook_build;
use crate::cook::fetch_repo;
use crate::cook::fetch_repo::PlainPtyCallback;
use crate::cook::fs::*;
use crate::cook::package::get_package_name;
use crate::cook::package::package_source_paths;
use crate::cook::pty::PtyOut;
use crate::cook::script::*;
use crate::is_redox;
use crate::log_to_pty;
use crate::recipe::BuildKind;
use crate::recipe::CookRecipe;
use crate::recipe::SourceRecipe;
use crate::wrap_io_err;
use crate::wrap_other_err;
use pkg::SourceIdentifier;
use pkg::net_backend::DownloadBackendWriter;
use std::cell::RefCell;
use std::collections::BTreeMap;
use std::fs;
use std::fs::File;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::process::Command;
use std::rc::Rc;
pub struct FetchResult {
pub source_dir: PathBuf,
pub source_ident: String,
pub cached: bool,
}
impl FetchResult {
pub fn new(source_dir: PathBuf, ident: String, cached: bool) -> Self {
Self {
source_dir,
source_ident: ident,
cached,
}
}
pub fn cached(source_dir: PathBuf, ident: String) -> Self {
Self {
source_dir,
source_ident: ident,
cached: true,
}
}
}
pub(crate) fn get_blake3(path: &PathBuf) -> Result<String> {
let mut f = fs::File::open(&path).map_err(wrap_io_err!(path, "Opening file for blake3"))?;
let hash = blake3::Hasher::new()
.update_reader(&mut f)
.map_err(wrap_io_err!(path, "Reading file for blake3"))?
.finalize();
Ok(hash.to_hex().to_string())
}
pub fn fetch_offline(recipe: &CookRecipe, logger: &PtyOut) -> Result<FetchResult> {
let recipe_dir = &recipe.dir;
let source_dir = recipe_dir.join("source");
match recipe.recipe.build.kind {
BuildKind::None => {
// the build function doesn't need source dir exists
let ident = fetch_apply_source_info(recipe, "".to_string())?;
return Ok(FetchResult::cached(source_dir, ident));
}
BuildKind::Remote => {
return fetch_remote(recipe_dir, recipe, true, source_dir, logger);
}
_ => {}
}
let result = match &recipe.recipe.source {
Some(SourceRecipe::Path { path: _ }) | None => fetch(recipe, true, logger)?,
Some(SourceRecipe::SameAs { same_as }) => {
let recipe = fetch_resolve_canon(recipe_dir, &same_as, recipe.name.is_host())?;
// recursively fetch
let r = fetch_offline(&recipe, logger)?;
fetch_make_symlink(&source_dir, &same_as)?;
r
}
Some(SourceRecipe::Git {
git: _,
upstream: _,
branch: _,
rev: _,
patches: _,
script: _,
shallow_clone: _,
}) => {
offline_check_exists(&source_dir)?;
let (head_rev, _) = get_git_head_rev(&source_dir)?;
FetchResult::cached(source_dir, head_rev)
}
Some(SourceRecipe::Tar {
tar: _,
blake3,
patches,
script,
}) => {
let ident = blake3.clone().unwrap_or("no_tar_blake3_hash_info".into());
let cached = source_dir.is_dir();
if !cached {
let source_tar = recipe_dir.join("source.tar");
let source_tar_blake3 = get_blake3(&source_tar)?;
if source_tar.exists() {
if let Some(blake3) = blake3 {
if source_tar_blake3 != *blake3 {
bail_other_err!(
"The downloaded tar blake3 {source_tar_blake3:?} is not equal to blake3 in recipe.toml"
);
}
create_dir(&source_dir)?;
fetch_extract_tar(source_tar, &source_dir, logger)?;
fetch_apply_patches(recipe_dir, patches, script, &source_dir, logger)?;
} else {
// need to trust this tar file
bail_other_err!(
"Please add blake3 = {source_tar_blake3:?} to {recipe:?}",
recipe = recipe_dir.join("recipe.toml").display(),
);
}
}
}
offline_check_exists(&source_dir)?;
FetchResult::new(source_dir, ident, cached)
}
};
fetch_apply_source_info(recipe, result.source_ident.clone())?;
Ok(result)
}
pub fn fetch(recipe: &CookRecipe, check_source: bool, logger: &PtyOut) -> Result<FetchResult> {
let recipe_dir = &recipe.dir;
let source_dir = recipe_dir.join("source");
match recipe.recipe.build.kind {
BuildKind::None => {
// the build function doesn't need source dir exists
let ident = fetch_apply_source_info(recipe, "".to_string())?;
return Ok(FetchResult::cached(source_dir, ident));
}
BuildKind::Remote => {
return fetch_remote(recipe_dir, recipe, false, source_dir, logger);
}
_ => {}
}
let result = match &recipe.recipe.source {
Some(SourceRecipe::SameAs { same_as }) => {
let recipe = fetch_resolve_canon(recipe_dir, &same_as, recipe.name.is_host())?;
// recursively fetch
let r = fetch(&recipe, check_source, logger)?;
fetch_make_symlink(&source_dir, &same_as)?;
r
}
Some(SourceRecipe::Path { path }) => {
let path = recipe_dir.join(path);
let cached = source_dir.is_dir() && modified_dir(&path)? <= modified_dir(&source_dir)?;
if !cached {
log_to_pty!(
logger,
"[DEBUG]: {:?} is newer than {:?}",
path.display(),
source_dir.display()
);
copy_dir_all(&path, &source_dir).map_err(wrap_io_err!(
&path,
source_dir,
"Copying source"
))?;
}
FetchResult::new(source_dir, "local_source".to_string(), cached)
}
Some(SourceRecipe::Git {
git,
upstream,
branch,
rev,
patches,
script,
shallow_clone,
}) => {
//TODO: use libgit?
let shallow_clone = *shallow_clone == Some(true);
let cached = if !source_dir.is_dir() {
// Create source.tmp
let source_dir_tmp = recipe_dir.join("source.tmp");
create_dir_clean(&source_dir_tmp)?;
// Clone the repository to source.tmp
let mut command = Command::new("git");
command
.arg("clone")
.arg("--recursive")
.arg(translate_mirror(&git));
if let Some(branch) = branch {
command.arg("--branch").arg(branch);
}
if shallow_clone {
command
.arg("--filter=tree:0")
.arg("--also-filter-submodules");
}
command.arg(&source_dir_tmp);
if let Err(e) = run_command(command, logger) {
if !is_redox() {
return Err(e);
}
// TODO: RedoxFS has a race condition problem with `--recursive` and running in multi CPU.
// It is appear that running the submodule update separately fixes it. Remove this when
// `git clone https://gitlab.redox-os.org/redox-os/relibc --recursive` proven to work in Redox OS.
let mut cmds = vec!["update", "--init"];
if shallow_clone {
cmds.push("--filter=tree:0");
}
manual_git_recursive_submodule(logger, &source_dir_tmp, cmds)?;
}
// Move source.tmp to source atomically
rename(&source_dir_tmp, &source_dir)?;
false
} else if !check_source {
true
} else {
if !source_dir.join(".git").is_dir() {
bail_other_err!(
"{:?} is not a git repository, but recipe indicated git source",
source_dir.display()
);
}
// Reset origin
let mut command = Command::new("git");
command.arg("-C").arg(&source_dir);
command.arg("remote").arg("set-url").arg("origin").arg(git);
run_command(command, logger)?;
// Fetch origin
let mut command = Command::new("git");
command.arg("-C").arg(&source_dir);
command.arg("fetch").arg("origin");
run_command(command, logger)?;
let (head_rev, detached_rev) = get_git_head_rev(&source_dir)?;
match (rev, detached_rev) {
(Some(rev), true) => {
if let Ok(exp_rev) = get_git_tag_rev(&source_dir, &rev) {
exp_rev == head_rev
} else {
let mut command = Command::new("git");
command.arg("-C").arg(&source_dir);
command.arg("gc");
run_command(command, logger)?;
if let Ok(exp_rev) = get_git_tag_rev(&source_dir, &rev) {
exp_rev == head_rev
} else {
false
}
}
}
(None, false) => {
let (_, remote_branch, remote_name, remote_url) =
get_git_remote_tracking(&source_dir)?;
// TODO: how to get default branch and compare it here?
if let Some(branch) = branch
&& branch != &remote_branch
{
false
} else if remote_name != "origin" || &remote_url != chop_dot_git(git) {
false
} else {
match get_git_fetch_rev(&source_dir, &remote_url, &remote_branch) {
Ok(fetch_rev) => fetch_rev == head_rev,
Err(e) => {
log_to_pty!(logger, "{}", e);
false
}
}
}
}
_ => false,
}
};
if !cached {
if let Some(_upstream) = upstream {
//TODO: set upstream URL (is this needed?)
// git remote set-url upstream "$GIT_UPSTREAM" &> /dev/null ||
// git remote add upstream "$GIT_UPSTREAM"
// git fetch upstream
}
if !patches.is_empty() || script.is_some() {
// Hard reset
let mut command = Command::new("git");
command.arg("-C").arg(&source_dir);
command.arg("reset").arg("--hard");
run_command(command, logger)?;
}
if let Some(rev) = rev {
// Check out specified revision
let mut command = Command::new("git");
command.arg("-C").arg(&source_dir);
command.arg("checkout").arg(rev);
run_command(command, logger)?;
} else if !is_redox() {
//TODO: complicated stuff to check and reset branch to origin
//TODO: redox can't undestand this (got exit status 1)
let mut command = Command::new("bash");
command.arg("-c").arg(GIT_RESET_BRANCH);
if let Some(branch) = branch {
command.env("BRANCH", branch);
}
command.current_dir(&source_dir);
run_command(command, logger)?;
}
// Sync submodules URL
let mut command = Command::new("git");
command.arg("-C").arg(&source_dir);
command.arg("submodule").arg("sync").arg("--recursive");
if let Err(e) = run_command(command, logger) {
if !is_redox() {
return Err(e);
}
manual_git_recursive_submodule(logger, &source_dir, vec!["sync"])?;
}
// Update submodules
let mut command = Command::new("git");
command.arg("-C").arg(&source_dir);
command
.arg("submodule")
.arg("update")
.arg("--init")
.arg("--recursive");
if shallow_clone {
command.arg("--filter=tree:0");
}
if let Err(e) = run_command(command, logger) {
if !is_redox() {
return Err(e);
}
let mut cmds = vec!["update", "--init"];
if shallow_clone {
cmds.push("--filter=tree:0");
}
manual_git_recursive_submodule(logger, &source_dir, cmds)?;
}
fetch_apply_patches(recipe_dir, patches, script, &source_dir, logger)?;
}
let (head_rev, _) = get_git_head_rev(&source_dir)?;
FetchResult::new(source_dir, head_rev, cached)
}
Some(SourceRecipe::Tar {
tar,
blake3,
patches,
script,
}) => {
let source_tar = recipe_dir.join("source.tar");
let ident = blake3.clone().unwrap_or("no_tar_blake3_hash_info".into());
let mut tar_updated = false;
loop {
if !source_tar.is_file() {
tar_updated = true;
download_wget(&tar, &source_tar, logger)?;
}
if !check_source {
break;
}
let source_tar_blake3 = get_blake3(&source_tar)?;
if let Some(blake3) = blake3 {
if source_tar_blake3 == *blake3 {
break;
}
if tar_updated {
bail_other_err!(
"The downloaded tar blake3 {source_tar_blake3:?} is not equal to blake3 in recipe.toml"
)
} else {
log_to_pty!(
logger,
"DEBUG: source tar blake3 is different and need redownload"
);
remove_all(&source_tar)?;
}
} else {
//TODO: set blake3 hash on the recipe with something like "cook fix"
log_to_pty!(
logger,
"WARNING: set blake3 for '{}' to '{}'",
source_tar.display(),
source_tar_blake3
);
break;
}
}
let mut cached = true;
if source_dir.is_dir() {
if tar_updated || fetch_is_patches_newer(recipe_dir, patches, &source_dir)? {
log_to_pty!(
logger,
"DEBUG: source tar or patches is newer than the source directory"
);
remove_all(&source_dir)?
}
}
if !source_dir.is_dir() {
// Create source.tmp
let source_dir_tmp = recipe_dir.join("source.tmp");
create_dir_clean(&source_dir_tmp)?;
fetch_extract_tar(source_tar, &source_dir_tmp, logger)?;
fetch_apply_patches(recipe_dir, patches, script, &source_dir_tmp, logger)?;
// Move source.tmp to source atomically
rename(&source_dir_tmp, &source_dir)?;
cached = false;
}
FetchResult::new(source_dir, ident, cached)
}
// Local Sources
None => {
if !source_dir.is_dir() {
log_to_pty!(
logger,
"WARNING: Recipe without source section expected source dir at '{}'",
source_dir.display(),
);
create_dir(&source_dir)?;
}
FetchResult::cached(source_dir, "local_source".into())
}
};
if let BuildKind::Cargo {
cargopath,
cargoflags: _,
cargopackages: _,
cargoexamples: _,
} = &recipe.recipe.build.kind
{
if fetch_will_build(recipe) {
fetch_cargo(&result.source_dir, cargopath.as_ref(), logger)?;
}
}
fetch_apply_source_info(recipe, result.source_ident.to_string())?;
Ok(result)
}
fn manual_git_recursive_submodule(
logger: &PtyOut,
source_dir: &PathBuf,
cmd: Vec<&str>,
) -> Result<()> {
log_to_pty!(
logger,
"Git submodule {} failed, might be caused by race condition in RedoxFS, retrying without --recursive.",
cmd[0]
);
let mut repo_registry: BTreeMap<PathBuf, bool> = BTreeMap::new();
loop {
let mut dirty_git = false;
let output = Command::new("find")
.args(&[".", "-name", ".git"])
.current_dir(&source_dir)
.output()
.map_err(wrap_io_err!("Failed to execute find"))?;
let stdout = String::from_utf8_lossy(&output.stdout);
for line in stdout.lines() {
let git_path = PathBuf::from(line);
if let Some(repo_root) = git_path.parent() {
let repo_root_buf = repo_root.to_path_buf();
if !repo_registry.contains_key(&repo_root_buf) {
repo_registry.insert(repo_root_buf.clone(), false);
dirty_git = true;
}
}
}
if !dirty_git {
// completed
return Ok(());
}
let pending_repos: Vec<PathBuf> = repo_registry
.iter()
.filter(|&(_, &synced)| !synced)
.map(|(path, _)| path.clone())
.collect();
if pending_repos.is_empty() {
bail_other_err!("No pending repos but dirty");
}
for repo in pending_repos {
println!("==> Processing: {:?}", repo);
let mut command = Command::new("git");
command.arg("-C").arg(&repo).current_dir(&source_dir);
command.arg("submodule");
for cmd in &cmd {
command.arg(cmd);
}
run_command(command, logger)?;
repo_registry.insert(repo, true);
}
}
}
/// This does the same check as in cook_build
fn fetch_will_build(recipe: &CookRecipe) -> bool {
let check_source = !recipe.is_deps;
if !check_source {
// there could be more check here, but it's heavy so just assume it will build
return true;
}
let stage_dirs =
cook_build::get_stage_dirs(&recipe.recipe.optional_packages, &recipe.target_dir());
let stage_pkgars: Vec<PathBuf> = stage_dirs
.iter()
.map(|p| p.with_added_extension("pkgar"))
.collect();
let stage_present = stage_pkgars.iter().all(|file| file.is_file());
!stage_present
}
pub(crate) fn fetch_make_symlink(source_dir: &PathBuf, same_as: &String) -> Result<()> {
let target_dir = Path::new(same_as).join("source");
if !source_dir.is_symlink() {
if source_dir.is_dir() {
bail_other_err!(
"'{dir:?}' is a directory, but recipe indicated a symlink. \n\
try removing '{dir:?}' if you haven't made any changes that would be lost",
dir = source_dir.display(),
)
}
std::os::unix::fs::symlink(&target_dir, source_dir).map_err(|err| {
format!(
"failed to symlink '{}' to '{}': {}\n{:?}",
target_dir.display(),
source_dir.display(),
err,
err
)
})?;
}
Ok(())
}
pub(crate) fn fetch_resolve_canon(
recipe_dir: &Path,
same_as: &String,
is_host: bool,
) -> Result<CookRecipe> {
let canon_dir = Path::new(recipe_dir).join(same_as);
if canon_dir
.to_str()
.unwrap()
.chars()
.filter(|c| *c == '/')
.count()
> 50
{
bail_other_err!("Infinite loop detected");
}
if !canon_dir.exists() {
bail_other_err!("{dir:?} is not exists", dir = canon_dir.display());
}
CookRecipe::from_path(canon_dir.as_path(), true, is_host).map_err(Error::from)
}
pub(crate) fn fetch_extract_tar(
source_tar: PathBuf,
source_dir_tmp: &PathBuf,
logger: &PtyOut,
) -> Result<()> {
let mut command = Command::new("tar");
let verbose = crate::config::get_config().cook.verbose;
if is_redox() {
command.arg(if verbose { "xvf" } else { "xf" });
} else {
command.arg("--extract");
command.arg("--no-same-owner");
if verbose {
command.arg("--verbose");
}
command.arg("--file");
}
command.arg(&source_tar);
command.arg("--directory").arg(source_dir_tmp);
command.arg("--strip-components").arg("1");
run_command(command, logger)?;
Ok(())
}
pub(crate) fn fetch_cargo(
source_dir: &PathBuf,
cargopath: Option<&String>,
logger: &PtyOut,
) -> Result<()> {
let mut source_dir = source_dir.clone();
if let Some(cargopath) = cargopath {
source_dir = source_dir.join(cargopath);
}
let local_redoxer = Path::new("target/release/cookbook_rbos_redoxer");
let mut command = if is_redox() && !local_redoxer.is_file() {
Command::new("cookbook_rbos_redoxer")
} else {
let cookbook_redoxer = local_redoxer
.canonicalize()
.unwrap_or(PathBuf::from("cargo"));
Command::new(&cookbook_redoxer)
};
command.arg("fetch");
command.arg("--manifest-path");
command.arg(source_dir.join("Cargo.toml").into_os_string());
run_command(command, logger)?;
Ok(())
}
pub fn fetch_remote(
recipe_dir: &Path,
recipe: &CookRecipe,
offline_mode: bool,
source_dir: PathBuf,
logger: &PtyOut,
) -> Result<FetchResult> {
let (mut manager, repository) = fetch_repo::get_binary_repo();
let target_dir = create_target_dir(recipe_dir, recipe.target)?;
if logger.is_some() {
let writer = logger.as_ref().unwrap().1.try_clone().unwrap();
manager.set_callback(Rc::new(RefCell::new(PlainPtyCallback::new(writer))));
}
let packages = recipe.recipe.get_packages_list();
let name = recipe_dir
.file_name()
.ok_or("Unable to get recipe name")?
.to_str()
.unwrap();
let mut result = None;
let mut cached = true;
for package in packages {
let (_, source_pkgar, source_toml) = package_source_paths(package, &target_dir);
let source_name = get_package_name(name, package);
let Some(repo_blake3) = repository.packages.get(&source_name) else {
bail_other_err!("Package {source_name} does not exist in server repository")
};
if !offline_mode {
if source_toml.is_file() {
let pkg_toml = read_source_toml(&source_toml)?;
if &pkg_toml.blake3 != repo_blake3 {
log_to_pty!(logger, "DEBUG: Updating source binaries");
remove_all(&source_toml)?;
if source_pkgar.is_file() {
remove_all(&source_pkgar)?;
}
}
}
if !source_toml.is_file() {
{
let toml_file = File::create(&source_toml)
.map_err(|e| format!("Unable to create source.toml: {e:?}"))?;
let mut writer = DownloadBackendWriter::ToFile(toml_file);
manager
.download(&format!("{}.toml", &source_name), None, &mut writer)
.map_err(|e| format!("Unable to download source.toml: {e:?}"))?;
}
let pkg_toml = read_source_toml(&source_toml)?;
let pkgar_file = File::create(&source_pkgar)
.map_err(|e| format!("Unable to create source.pkgar: {e:?}"))?;
let mut writer = DownloadBackendWriter::ToFile(pkgar_file);
manager
.download(
&format!("{}.pkgar", &source_name),
Some(pkg_toml.network_size),
&mut writer,
)
.map_err(|e| format!("Unable to download source.pkgar: {e:?}"))?;
cached = false;
}
// manager.download(file, 0, dest)
} else {
offline_check_exists(&source_pkgar)?;
offline_check_exists(&source_toml)?;
}
// guaranteed to exist once and last in iteration
if package.is_none() {
let pkg_toml = read_source_toml(&source_toml)?;
fetch_apply_source_info_from_remote(
recipe,
&SourceIdentifier {
commit_identifier: pkg_toml.commit_identifier.clone(),
source_identifier: pkg_toml.source_identifier.clone(),
time_identifier: pkg_toml.time_identifier.clone(),
..Default::default()
},
)?;
result = Some(FetchResult::new(
source_dir.clone(),
pkg_toml.source_identifier,
cached,
));
}
}
result.ok_or_else(wrap_other_err!("There's no mandatory package in remote"))
}
fn read_source_toml(source_toml: &Path) -> Result<pkg::Package> {
let mut file =
File::open(source_toml).map_err(|e| format!("Unable to open source.toml: {e:?}"))?;
let mut contents = String::new();
file.read_to_string(&mut contents)
.map_err(|e| format!("Unable to read source.toml: {e:?}"))?;
let pkg_toml = pkg::Package::from_toml(&contents)
.map_err(|e| format!("Unable to parse source.toml: {e:?}"))?;
Ok(pkg_toml)
}
pub(crate) fn fetch_is_patches_newer(
recipe_dir: &Path,
patches: &Vec<String>,
source_dir: &PathBuf,
) -> Result<bool> {
// don't check source files inside as it can be mixed with user patches
let source_time = modified(&source_dir)?;
for patch_name in patches {
let patch_file = recipe_dir.join(patch_name);
if !patch_file.is_file() {
bail_other_err!("Failed to find patch file {:?}", patch_file.display());
}
let patch_time = modified(&patch_file)?;
if patch_time > source_time {
return Ok(true);
}
}
return Ok(false);
}
pub(crate) fn fetch_apply_patches(
recipe_dir: &Path,
patches: &Vec<String>,
script: &Option<String>,
source_dir_tmp: &PathBuf,
logger: &PtyOut,
) -> Result<()> {
for patch_name in patches {
let patch_file = recipe_dir.join(patch_name);
if !patch_file.is_file() {
bail_other_err!("Failed to find patch file {:?}", patch_file.display());
}
let patch = fs::read_to_string(&patch_file).map_err(|err| {
format!(
"failed to read patch file '{}': {}\n{:#?}",
patch_file.display(),
err,
err
)
})?;
let mut command = Command::new("patch");
command.arg("--directory").arg(source_dir_tmp);
command.arg("--strip=1");
run_command_stdin(command, patch.as_bytes(), logger)?;
}
Ok(if let Some(script) = script {
let mut command = Command::new("bash");
command.arg("-ex");
command.current_dir(source_dir_tmp);
run_command_stdin(
command,
format!("{SHARED_PRESCRIPT}\n{script}").as_bytes(),
logger,
)?;
})
}
pub(crate) fn fetch_apply_source_info(
recipe: &CookRecipe,
source_identifier: String,
) -> Result<String> {
let ident = crate::cook::ident::get_ident();
let info = SourceIdentifier {
commit_identifier: ident.commit.to_string(),
time_identifier: ident.time.to_string(),
source_identifier: source_identifier,
};
fetch_apply_source_info_from_remote(&recipe, &info)?;
Ok(info.source_identifier)
}
pub(crate) fn fetch_apply_source_info_from_remote(
recipe: &CookRecipe,
info: &SourceIdentifier,
) -> Result<()> {
let target_dir = create_target_dir(&recipe.dir, recipe.target)?;
let source_toml_path = target_dir.join("source_info.toml");
serialize_and_write(&source_toml_path, &info)?;
Ok(())
}
pub fn fetch_get_source_info(recipe: &CookRecipe) -> Result<SourceIdentifier> {
let target_dir = recipe.target_dir();
let source_toml_path = target_dir.join("source_info.toml");
let toml_content = fs::read_to_string(source_toml_path)
.map_err(|e| format!("Unable to read source_info.toml: {:?}", e))?;
let parsed = toml::from_str(&toml_content)
.map_err(|e| format!("Unable to parse source_info.toml: {:?}", e))?;
Ok(parsed)
}
+204
View File
@@ -0,0 +1,204 @@
use std::{
cell::RefCell,
io::{PipeWriter, Write},
path::{Path, PathBuf},
rc::Rc,
time::Duration,
};
use pkg::{
PackageName, RemotePackage, RepoManager, Repository,
callback::{Callback, SilentCallback},
net_backend::{CurlBackend, DownloadBackend},
};
// TODO: This is a workaround, but as long as whole
// fetch operation is in single thread, this is ok
thread_local! {
static BINARY_REPO: RefCell<Option<(RepoManager, Repository)>> = RefCell::new(None);
}
fn load_cached_repo(path: &Path) -> Option<Repository> {
let metadata = std::fs::metadata(path).ok()?;
if !crate::config::get_config().cook.offline {
let yesterday = std::time::SystemTime::now().checked_sub(Duration::from_secs(24 * 3600))?;
if metadata.modified().ok()? < yesterday {
// stale cache
let _ = std::fs::remove_file(path);
return None;
}
}
let toml_str = std::fs::read_to_string(path).ok()?;
Repository::from_toml(&toml_str).ok()
}
fn init_binary_repo() -> (RepoManager, Repository) {
let callback = Rc::new(RefCell::new(SilentCallback::new()));
let download_backend = CurlBackend::new().expect("Curl not found");
let mut repo = RepoManager::new(callback, Box::new(download_backend));
repo.add_remote(crate::REMOTE_PKG_SOURCE, redoxer::target())
.expect("Unable to add remote");
let repo_path = PathBuf::from("build/remotes");
repo.set_download_path(repo_path.clone());
repo.sync_keys().expect("Unable to sync keys");
let repo_toml = load_cached_repo(&repo_path.join("repo.toml")).unwrap_or_else(|| {
let (toml_str, _) = repo
.get_package_toml(&PackageName::new("repo").unwrap())
.expect("Failed to fetch repo.toml");
Repository::from_toml(&toml_str).expect("Fetched repo.toml is invalid")
});
(repo, repo_toml)
}
pub fn get_binary_repo() -> (RepoManager, Repository) {
BINARY_REPO.with(|cell| {
let mut opt = cell.borrow_mut();
if opt.is_none() {
*opt = Some(init_binary_repo());
}
let (repo, repo_toml) = opt.as_ref().unwrap();
((*repo).clone(), repo_toml.clone())
})
}
pub struct PlainPtyCallback {
size: u64,
unknown_size: bool,
pos: u64,
fetch_processed: usize,
fetch_total: usize,
interactive: bool,
download_file: Option<String>,
pty: PipeWriter,
}
impl PlainPtyCallback {
pub fn new(pty: PipeWriter) -> Self {
Self {
size: 0,
unknown_size: false,
pos: 0,
fetch_processed: 0,
fetch_total: 0,
interactive: false,
download_file: None,
pty,
}
}
/// Set if user require to agree on terminal
pub fn set_interactive(&mut self, enabled: bool) {
self.interactive = enabled;
}
fn flush(&self) {
let _ = std::io::stderr().flush();
}
pub fn format_size(bytes: u64) -> String {
if bytes == 0 {
return "0 B".to_string();
}
const UNITS: [&str; 5] = ["B", "KiB", "MiB", "GiB", "TiB"];
let i = (bytes as f64).log(1024.0).floor() as usize;
let size = bytes as f64 / 1024.0_f64.powi(i as i32);
format!("{:.2} {}", size, UNITS[i])
}
fn downloading_str(&self) -> &'static str {
"Downloading"
}
}
const RESET_LINE: &str = "\r\x1b[2K";
impl Callback for PlainPtyCallback {
fn fetch_start(&mut self, initial_count: usize) {
self.fetch_total = 0;
self.fetch_processed = 0;
self.fetch_package_increment(0, initial_count);
}
fn fetch_package_name(&mut self, pkg_name: &PackageName) {
// resuming after fetch_package_increment
let _ = write!(&self.pty, " {}", pkg_name.as_str());
self.flush();
}
fn fetch_package_increment(&mut self, added_processed: usize, added_count: usize) {
self.fetch_processed += added_processed;
self.fetch_total += added_count;
let _ = write!(
&self.pty,
"{RESET_LINE}Fetching: [{}/{}]",
self.fetch_processed, self.fetch_total
);
self.flush();
}
fn fetch_end(&mut self) {
if self.fetch_processed == self.fetch_total {
let _ = writeln!(&self.pty, "{RESET_LINE}Fetch complete.");
} else {
let _ = writeln!(&self.pty, "{RESET_LINE}Fetch incomplete.");
}
}
fn download_start(&mut self, length: u64, file: &str) {
self.size = length;
self.unknown_size = length == 0;
self.pos = 0;
if !self.unknown_size {
let _ = write!(&self.pty, "{RESET_LINE}{} {file}", self.downloading_str());
self.download_file = Some(file.to_string());
self.flush();
}
}
fn download_increment(&mut self, downloaded: u64) {
self.pos += downloaded;
if self.unknown_size {
self.size += downloaded;
}
if self.unknown_size {
return;
}
// keep using MB for consistency
let pos_mb = self.pos as f64 / 1_048_576.0;
let size_mb = self.size as f64 / 1_048_576.0;
let file_name = self
.download_file
.as_ref()
.map(|s| s.as_str())
.unwrap_or("");
let _ = write!(
&self.pty,
"{RESET_LINE}{} {} [{:.2} MB / {:.2} MB]",
self.downloading_str(),
file_name,
pos_mb,
size_mb
);
self.flush();
}
fn download_end(&mut self) {
if !self.unknown_size {
let _ = writeln!(&self.pty, "");
self.download_file = None;
}
}
fn install_extract(&mut self, remote_pkg: &RemotePackage) {
let _ = writeln!(&self.pty, "Extracting {}...", remote_pkg.package.name);
self.flush();
}
}
+456
View File
@@ -0,0 +1,456 @@
use serde::Serialize;
use std::{
collections::BTreeSet,
fs,
io::{self, Write},
path::{Path, PathBuf},
process::{self, Command, Stdio},
time::SystemTime,
};
use walkdir::{DirEntry, WalkDir};
use crate::{
Error, Result, bail_other_err,
config::translate_mirror,
cook::pty::{PtyOut, spawn_to_pipe},
wrap_io_err, wrap_other_err,
};
//TODO: pub(crate) for all of these functions
pub fn remove_all(path: &Path) -> Result<()> {
if path.is_dir() {
fs::remove_dir_all(path)
} else {
fs::remove_file(path)
}
.map_err(wrap_io_err!(path, "Removing all"))
}
pub fn create_dir(dir: &Path) -> Result<()> {
fs::create_dir_all(dir).map_err(wrap_io_err!(dir, "Recursively creating dir"))
}
pub fn create_dir_clean(dir: &Path) -> Result<()> {
if dir.is_dir() {
remove_all(dir)?;
}
create_dir(dir)
}
pub fn create_target_dir(recipe_dir: &Path, target: &'static str) -> Result<PathBuf> {
let target_dir = recipe_dir.join("target").join(target);
if !target_dir.is_dir() {
create_dir(&target_dir)?;
}
Ok(target_dir)
}
pub fn copy_dir_all(src: impl AsRef<Path>, dst: impl AsRef<Path>) -> io::Result<()> {
fs::create_dir_all(&dst)?;
for entry in fs::read_dir(src)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir() {
copy_dir_all(entry.path(), dst.as_ref().join(entry.file_name()))?;
} else {
fs::copy(entry.path(), dst.as_ref().join(entry.file_name()))?;
}
}
Ok(())
}
pub fn move_dir_all_fn<'a>(
src: impl AsRef<Path>,
mv: &'a Box<impl Fn(PathBuf) -> Option<&'a Path>>,
) -> io::Result<()> {
move_dir_all_inner_fn(&src, &src, mv)
}
fn move_dir_all_inner_fn<'a>(
src: impl AsRef<Path>,
srcrel: impl AsRef<Path>,
mv: &'a Box<impl Fn(PathBuf) -> Option<&'a Path>>,
) -> io::Result<()> {
let mut files = Vec::new();
for entry in fs::read_dir(&src)? {
let entry = entry?;
let ty = entry.file_type()?;
if ty.is_dir() {
move_dir_all_inner_fn(entry.path(), srcrel.as_ref(), mv)?;
} else {
let path: PathBuf = entry.path();
let Ok(relpath) = path.strip_prefix(&srcrel) else {
continue;
};
if let Some(dst) = mv(relpath.to_path_buf()) {
files.push((entry.path(), relpath.to_path_buf(), dst.to_owned()));
}
}
}
for (src, srcrel, dst) in files {
let path = dst.join(&srcrel);
fs::create_dir_all(&path.parent().unwrap())?;
std::fs::rename(&src, &path)?;
}
Ok(())
}
pub fn symlink(original: impl AsRef<Path>, link: impl AsRef<Path>) -> Result<()> {
std::os::unix::fs::symlink(&original, &link)
.map_err(wrap_io_err!(link.as_ref(), "Creating symlink"))
}
fn modified_inner(path: &Path, metadata: fs::Metadata) -> Result<SystemTime> {
metadata
.modified()
.map_err(wrap_io_err!(path, "Reading modified time"))
}
pub fn modified(path: &Path) -> Result<SystemTime> {
let metadata = fs::metadata(path).map_err(wrap_io_err!(path, "Reading metadata"))?;
modified_inner(path, metadata)
}
pub fn modified_all(
path: &Vec<PathBuf>,
func: fn(path: &Path) -> Result<SystemTime>,
) -> Result<SystemTime> {
let mut newest = SystemTime::UNIX_EPOCH;
for entry_res in path {
let modified = func(entry_res)?;
if modified > newest {
newest = modified;
}
}
Ok(newest)
}
pub fn modified_all_btree<'a>(
path: impl Iterator<Item = &'a Path>,
func: fn(path: &Path) -> Result<SystemTime>,
) -> Result<SystemTime> {
let mut newest = SystemTime::UNIX_EPOCH;
for entry_res in path {
let modified = func(entry_res)?;
if modified > newest {
newest = modified;
}
}
Ok(newest)
}
fn modified_dir_inner<F: FnMut(&DirEntry) -> bool>(dir: &Path, filter: F) -> Result<SystemTime> {
let mut newest = modified(dir)?;
for entry_res in WalkDir::new(dir).into_iter().filter_entry(filter) {
let entry = entry_res?;
let modified = modified_inner(entry.path(), entry.metadata()?)?;
if modified > newest {
newest = modified;
}
}
Ok(newest)
}
pub fn modified_dir(dir: &Path) -> Result<SystemTime> {
modified_dir_inner(dir, |_| true)
}
pub fn modified_dir_ignore_git(dir: &Path) -> Result<SystemTime> {
modified_dir_inner(dir, |entry| {
entry
.file_name()
.to_str()
.map(|s| s != ".git")
.unwrap_or(true)
})
}
pub fn check_files_present(dir: &Path, expected_files: &BTreeSet<&str>) -> Result<bool> {
let entries = fs::read_dir(dir).map_err(wrap_io_err!(dir, "Reading list files"))?;
let mut matches = 0;
for entry_res in entries {
let entry = entry_res.map_err(wrap_io_err!(dir, "Reading file entry"))?;
let filename = entry.file_name();
let Some(filename) = filename.to_str() else {
continue;
};
if expected_files.contains(&filename) {
matches += 1
} else if filename.starts_with('.') {
continue;
} else {
return Ok(false);
}
}
Ok(matches == expected_files.len())
}
pub fn rename(src: &Path, dst: &Path) -> Result<()> {
fs::rename(src, dst).map_err(wrap_io_err!(src, dst, "Renaming"))
}
pub fn run_command(mut command: process::Command, stdout_pipe: &PtyOut) -> Result<()> {
let status = spawn_to_pipe(&mut command, stdout_pipe)?
.wait()
.map_err(wrap_io_err!("waiting to exit"))?;
if !status.success() {
return Err(Error::Command(command, status));
}
Ok(())
}
pub fn run_command_stdin(
mut command: process::Command,
stdin_data: &[u8],
stdout_pipe: &PtyOut,
) -> Result<()> {
command.stdin(Stdio::piped());
let mut child = spawn_to_pipe(&mut command, stdout_pipe)?;
if let Some(ref mut stdin) = child.stdin {
stdin
.write_all(stdin_data)
.map_err(wrap_io_err!("Writing to stdin"))?;
} else {
bail_other_err!("stdin is not captured");
}
let status = child.wait().map_err(wrap_io_err!("Spawning"))?;
if !status.success() {
return Err(Error::Command(command, status));
}
Ok(())
}
pub fn serialize_and_write<T: Serialize>(file_path: &Path, content: &T) -> Result<()> {
let toml_content = toml::to_string(content).map_err(|err| {
wrap_other_err!(
"Failed to serialize content for {:?}: {}",
file_path.display(),
err
)()
})?;
fs::write(file_path, toml_content).map_err(wrap_io_err!(file_path, "Writing to file"))?;
Ok(())
}
pub fn offline_check_exists(path: &PathBuf) -> Result<()> {
if !path.exists() {
bail_other_err!(
"{path:?} is not exist and unable to continue in offline mode",
path = path.display(),
);
}
Ok(())
}
pub fn download_wget(url: &str, dest: &PathBuf, logger: &PtyOut) -> Result<()> {
if !dest.is_file() {
let dest_tmp = PathBuf::from(format!("{}.tmp", dest.display()));
let mut command = Command::new("wget");
command.arg(translate_mirror(url));
command.arg("--continue").arg("-O").arg(&dest_tmp);
run_command(command, logger)?;
rename(&dest_tmp, &dest)?;
}
Ok(())
}
pub fn read_to_string(path: &Path) -> Result<String> {
fs::read_to_string(path).map_err(wrap_io_err!(path, "Reading file"))
}
/// get commit rev and return if it's detached or not
pub fn get_git_head_rev(dir: &PathBuf) -> Result<(String, bool)> {
let git_head = dir.join(".git/HEAD");
let head_str = read_to_string(&git_head)?;
if head_str.starts_with("ref: ") {
let entry = head_str["ref: ".len()..].trim_end();
let git_ref = dir.join(".git").join(entry);
let ref_str = if git_ref.is_file() {
read_to_string(&git_ref)?
} else {
get_git_ref_entry(dir, entry)?
};
Ok((ref_str.trim().to_string(), false))
} else {
Ok((head_str.trim().to_string(), true))
}
}
/// get commit from "rev" which either a full commit hash or a tag name
pub fn get_git_tag_rev(dir: &PathBuf, tag: &str) -> Result<String> {
if tag.len() == 40 && tag.chars().all(|f| f.is_ascii_hexdigit()) {
return Ok(tag.to_string());
}
get_git_ref_entry(dir, &format!("refs/tags/{tag}"))
}
pub fn get_git_ref_entry(dir: &PathBuf, entry: &str) -> Result<String> {
// https://git-scm.com/book/en/v2/Git-Internals-Maintenance-and-Data-Recovery
let git_refs = dir.join(".git/packed-refs");
let refs_str = read_to_string(&git_refs)?;
let mut lines = refs_str.lines();
while let Some(line) = lines.next() {
if line.contains(entry) {
let mut sha = line
.split_whitespace()
.next()
.ok_or_else(wrap_other_err!("Packed-refs line is malformed"))?;
if let Some(next_line) = lines.next() {
if next_line.starts_with('^') {
sha = &next_line[1..];
}
}
return Ok(sha.to_string());
}
}
Err(wrap_other_err!("Could not find a rev for {}", entry)())
}
/// get commit rev after fetch
pub fn get_git_fetch_rev(dir: &PathBuf, remote_url: &str, remote_branch: &str) -> Result<String> {
let git_fetch_head = dir.join(".git/FETCH_HEAD");
let fetch_head_content = read_to_string(&git_fetch_head)?;
let expected_comment_part = format!("branch '{}' of {}", remote_branch, remote_url);
for line in fetch_head_content.lines() {
if line.contains(&expected_comment_part) && !line.contains("not-for-merge") {
let sha = line
.split_whitespace()
.next()
.ok_or_else(wrap_other_err!("FETCH_HEAD line is malformed"))?;
return Ok(sha.to_string());
}
}
Err(wrap_other_err!(
"Could not find a fetch target for tracking {}",
expected_comment_part
)())
}
/// (local_branch_name, remote_branch, remote_name, remote_url)
/// -> ("fix_stuff", "master", "origin", "https://gitlab.redox-os.org/willnode/redox")
pub fn get_git_remote_tracking(dir: &PathBuf) -> Result<(String, String, String, String)> {
let git_head = dir.join(".git/HEAD");
let git_config = dir.join(".git/config");
let head_content = read_to_string(&git_head)?;
if !head_content.starts_with("ref: ") {
let sha = head_content.trim_end().to_string();
return Ok((sha, "".to_string(), "".to_string(), "".to_string()));
}
let local_branch_path = head_content["ref: ".len()..].trim_end();
let local_branch_name = get_git_branch_name(local_branch_path)?;
let config_content = read_to_string(&git_config)?;
let branch_section = format!("[branch \"{}\"]", local_branch_name);
let mut remote_name: Option<String> = None;
let mut remote_branch: Option<String> = None;
let mut parsing_branch_section = false;
for line in config_content.lines().map(|l| l.trim()) {
if line.is_empty() {
continue;
}
if line == branch_section {
parsing_branch_section = true;
continue;
}
if parsing_branch_section {
if line.starts_with('[') {
break;
}
if line.starts_with("remote = ") {
remote_name = Some(line["remote = ".len()..].trim().to_string());
}
if line.starts_with("merge = ") {
remote_branch = Some(get_git_branch_name(line["merge = ".len()..].trim())?);
}
}
}
let remote_name_str = remote_name.ok_or_else(wrap_other_err!(
"Branch {:?} is not tracking a remote",
local_branch_name
))?;
let remote_branch_str = remote_branch.unwrap_or("".into());
let remote_section = format!("[remote \"{}\"]", remote_name_str);
let mut remote_url: Option<String> = None;
let mut parsing_remote_section = false;
for line in config_content.lines().map(|l| l.trim()) {
if line.is_empty() {
continue;
}
if line == remote_section {
parsing_remote_section = true;
continue;
}
if parsing_remote_section {
if line.starts_with('[') {
break;
}
if line.starts_with("url = ") {
let mut url = line["url = ".len()..].trim();
url = chop_dot_git(url);
remote_url = Some(url.to_string());
}
}
}
let remote_url_str = remote_url.ok_or_else(wrap_other_err!(
"Could not find URL for remote {:?} in .git/config.",
remote_name_str
))?;
Ok((
local_branch_name,
remote_branch_str,
remote_name_str,
remote_url_str,
))
}
pub(crate) fn chop_dot_git(url: &str) -> &str {
if url.ends_with(".git") {
return &url[..url.len() - ".git".len()];
}
url
}
fn get_git_branch_name(local_branch_path: &str) -> Result<String> {
// TODO: incorrectly handle branch with slashes
Ok(local_branch_path
.split('/')
.last()
.ok_or_else(wrap_other_err!(
"Failed to parse branch name of {:?}",
local_branch_path
))?
.to_string())
}
+46
View File
@@ -0,0 +1,46 @@
use std::{
process::{Command, Stdio},
sync::OnceLock,
};
#[derive(Debug, Default)]
pub struct IdentifierConfig {
pub commit: String,
pub time: String,
}
impl IdentifierConfig {
fn new() -> Self {
let (commit, _) = crate::cook::fs::get_git_head_rev(
&std::env::current_dir().expect("unable to get $PWD"),
)
.unwrap_or(("".into(), false));
// better than importing heavy deps like chrono
let time = String::from_utf8_lossy(
&Command::new("date")
.arg("-u")
.arg("+%Y-%m-%dT%H:%M:%SZ")
.stdout(Stdio::piped())
.output()
.expect("Failed to get current ISO-formatted time")
.stdout
.trim_ascii(),
)
.into();
IdentifierConfig { commit, time }
}
}
static IDENTIFIER_CONFIG: OnceLock<IdentifierConfig> = OnceLock::new();
pub fn get_ident() -> &'static IdentifierConfig {
IDENTIFIER_CONFIG
.get()
.expect("Identifier is not initialized")
}
pub fn init_ident() {
IDENTIFIER_CONFIG
.set(IdentifierConfig::new())
.expect("Identifier is initialized twice")
}
+310
View File
@@ -0,0 +1,310 @@
use std::{
collections::BTreeSet,
path::{Path, PathBuf},
};
use pkg::{InstallState, Package, PackageName, PackagePrefix, PackageState};
use pkgar::ext::PackageSrcExt;
use pkgar_core::HeaderFlags;
use crate::{
Error,
config::CookConfig,
cook::{cook_build::BuildResult, fetch, fs::*, pty::PtyOut},
log_to_pty,
recipe::{BuildKind, CookRecipe, OptionalPackageRecipe},
};
pub fn package(
recipe: &CookRecipe,
build_result: &BuildResult,
cook_config: &CookConfig,
logger: &PtyOut,
) -> Result<(), String> {
let name = &recipe.name;
let target_dir = &recipe.target_dir();
let auto_deps = &build_result.auto_deps;
if recipe.recipe.build.kind == BuildKind::None {
// metapackages don't have stage dir and optional packages
package_toml(
target_dir.join("stage.toml"),
recipe,
None,
None,
recipe.recipe.package.dependencies.clone(),
&auto_deps,
)?;
return Ok(());
}
let secret_path = "build/id_ed25519.toml";
let public_path = "build/id_ed25519.pub.toml";
if !Path::new(secret_path).is_file() || !Path::new(public_path).is_file() {
if !Path::new("build").is_dir() {
create_dir(Path::new("build"))?;
}
let (public_key, secret_key) = pkgar_keys::SecretKeyFile::new();
public_key
.save(public_path)
.map_err(|err| format!("failed to save pkgar public key: {:?}", err))?;
secret_key
.save(secret_path)
.map_err(|err| format!("failed to save pkgar secret key: {:?}", err))?;
}
let packages = recipe.recipe.get_packages_list();
for package in packages {
let (stage_dir, package_file, package_meta) = package_stage_paths(package, target_dir);
// Rebuild package if stage is newer
if package_file.is_file() && !build_result.cached {
log_to_pty!(logger, "DEBUG: updating '{}'", package_file.display());
remove_all(&package_file)?;
if package_meta.is_file() {
remove_all(&package_meta)?;
}
}
if !package_file.is_file() {
pkgar::create_with_flags(
secret_path,
package_file.to_str().unwrap(),
stage_dir.to_str().unwrap(),
HeaderFlags::latest(
pkgar_core::Architecture::Independent,
match cook_config.compressed {
true => pkgar_core::Packaging::LZMA2,
false => pkgar_core::Packaging::Uncompressed,
},
),
)
.map_err(|err| format!("failed to create pkgar archive: {:?}", err))?;
}
let deps = if package.is_some() {
BTreeSet::from([name.with_prefix(PackagePrefix::Any)])
} else {
auto_deps.clone()
};
if !package_meta.is_file() {
let name = match package {
Some(p) => PackageName::new(format!("{}.{}", name.name(), p.name))
.map_err(|e| format!("{}", e))?,
None => name.clone(),
};
let package_deps = match package {
Some(p) => p
.dependencies
.iter()
.map(|dep| {
if dep.name().is_empty() {
name.with_suffix(dep.suffix())
} else {
dep.clone()
}
})
.collect(),
None => recipe.recipe.package.dependencies.clone(),
};
package_toml(
package_meta,
recipe,
Some((Path::new(public_path), &package_file)),
package,
package_deps,
&deps,
)?;
}
}
Ok(())
}
pub fn package_toml(
toml_path: PathBuf,
recipe: &CookRecipe,
package_file: Option<(&Path, &PathBuf)>,
package_suffix: Option<&OptionalPackageRecipe>,
mut package_deps: Vec<PackageName>,
auto_deps: &BTreeSet<PackageName>,
) -> Result<(), String> {
for dep in auto_deps.iter() {
if !package_deps.contains(dep) {
package_deps.push(dep.clone());
}
}
let (hash, network_size, storage_size) = if let Some((pkey_path, archive_path)) = package_file {
use pkgar_core::PackageSrc;
let pkey = pkgar_keys::PublicKeyFile::open(pkey_path)
.map_err(|e| format!("Unable to read public key: {e:?}"))?
.pkey;
let mut package = pkgar::PackageFile::new(archive_path, &pkey).map_err(|e| {
format!(
"Unable to read packaged pkgar file {}: {e:?}",
archive_path.display(),
)
})?;
let mt = std::fs::metadata(archive_path).map_err(|e| {
format!(
"Unable to read packaged pkgar file {}: {e:?}",
archive_path.display(),
)
})?;
let package_size = mt.len();
let header = package.header();
let storage_size = match header.flags.packaging() {
pkgar_core::Packaging::LZMA2 => {
let mut size = header
.total_size()
.map_err(|e| Error::Pkgar(pkgar::Error::Core(e)))?
as u64;
let entries = package
.read_entries()
.map_err(|e| format!("Unable to get lzma entry: {e}"))?;
for entry in entries {
let data_reader = package
.data_reader(&entry)
.map_err(|e| format!("Unable to read lzma entry: {e}"))?;
size += data_reader.unpacked_size;
package
.restore_reader(data_reader.into_inner())
.map_err(|e| format!("Unable to put lzma entry: {e}"))?;
}
size
}
_ => package_size,
};
(
blake3::Hash::from_bytes(package.header().blake3)
.to_hex()
.to_string(),
package_size,
storage_size,
)
} else {
("".into(), 0, 0)
};
let ident_source = fetch::fetch_get_source_info(recipe)?;
let package = Package {
name: PackageName::new(get_package_name(
recipe.name.without_prefix(),
package_suffix,
))
.unwrap(),
version: recipe.guess_version().unwrap_or("TODO".into()),
target: recipe.target.to_string(),
blake3: hash,
network_size,
storage_size,
depends: package_deps,
commit_identifier: ident_source.commit_identifier,
source_identifier: ident_source.source_identifier,
time_identifier: ident_source.time_identifier,
..Default::default()
};
serialize_and_write(&toml_path, &package)?;
return Ok(());
}
pub fn package_target(name: &PackageName) -> &'static str {
if name.is_host() {
redoxer::host_target()
} else {
redoxer::target()
}
}
pub fn package_stage_paths(
package: Option<&OptionalPackageRecipe>,
target_dir: &Path,
) -> (PathBuf, PathBuf, PathBuf) {
let mut target_dir = target_dir.to_path_buf();
if let Some(cross_target) = crate::cross_target() {
// TODO: automatically pass COOKBOOK_CROSS_GNU_TARGET?
target_dir = target_dir.join(cross_target)
}
package_name_paths(package, &target_dir, "stage")
}
pub fn package_source_paths(
package: Option<&OptionalPackageRecipe>,
target_dir: &Path,
) -> (PathBuf, PathBuf, PathBuf) {
package_name_paths(package, target_dir, "source")
}
fn package_name_paths(
package: Option<&OptionalPackageRecipe>,
target_dir: &Path,
name: &str,
) -> (PathBuf, PathBuf, PathBuf) {
let prefix_name = get_package_name(name, package);
let package_stage = target_dir.join(&prefix_name);
let package_file = package_stage.with_added_extension("pkgar");
let package_meta = package_stage.with_added_extension("toml");
(package_stage, package_file, package_meta)
}
pub fn get_package_name(name: &str, package: Option<&OptionalPackageRecipe>) -> String {
get_package_name_inner(name, package.map(|p| p.name.as_str()))
}
fn get_package_name_inner(name: &str, package: Option<&str>) -> String {
let mut prefix_name = name.to_string();
if let Some(package) = package {
prefix_name.push('.');
prefix_name.push_str(package);
}
prefix_name
}
pub fn package_handle_push(
state: &mut PackageState,
archive_path: &Path,
sysroot_dir: &Path,
reinstall: bool,
) -> crate::Result<bool> {
let archive_toml = archive_path.with_extension("toml");
let pkey_path = "build/id_ed25519.pub.toml";
let pkg_toml = Package::from_file(&archive_toml)?;
match state.installed.get(&pkg_toml.name) {
Some(s) if !reinstall && pkg_toml.blake3 == s.blake3 => Ok(true),
Some(s) => {
// "local" is what remote name from installer is hardcoded into
let remote_name = "local".to_string();
let install_state =
InstallState::from_package(&pkg_toml, remote_name, s.manual, s.dependents.clone());
// TODO: use pkgar::replace unless forced reinstall
pkgar::extract(pkey_path, &archive_path, sysroot_dir)?;
state.installed.insert(pkg_toml.name.clone(), install_state);
Ok(false)
}
None => {
// "local" is what remote name from installer is hardcoded into
let remote_name = "local".to_string();
// TODO: Handle manual & depedents
let install_state =
InstallState::from_package(&pkg_toml, remote_name, true, BTreeSet::new());
pkgar::extract(pkey_path, &archive_path, sysroot_dir)?;
// TODO: Inject dependencies
// TODO: Check if we need to inject remote key
state.installed.insert(pkg_toml.name.clone(), install_state);
Ok(false)
}
}
}
+348
View File
@@ -0,0 +1,348 @@
use libc::{self, winsize};
use std::fs::File;
use std::io::{Read, Write};
use std::os::fd::FromRawFd;
use std::os::unix::io::AsRawFd;
use std::os::unix::process::CommandExt;
use std::process::Child;
use std::time::Duration;
use std::{io, mem, ptr};
use std::{
io::{PipeReader, PipeWriter},
process::Command,
};
pub use std::os::unix::io::RawFd;
use crate::{Error, Result, wrap_io_err};
macro_rules! log_to_pty {
($logger:expr, $($arg:tt)+) => {
if $logger.is_some() {
use std::io::Write;
let mut logfd = $logger.as_ref().unwrap().1.try_clone().unwrap();
let _ = logfd.write(format!($($arg)+).as_bytes());
let _ = logfd.write(&[b'\n']);
} else {
eprintln!($($arg)+);
}
};
}
pub(crate) use log_to_pty;
pub type PtyOut<'a> = Option<(&'a mut UnixSlavePty, &'a mut PipeWriter)>;
pub fn setup_pty() -> (
Box<dyn Read + Send>,
PipeReader,
(UnixSlavePty, std::io::PipeWriter),
) {
let pty_system = UnixPtySystem::default();
let pair = pty_system
.openpty(PtySize {
rows: 24, // Standard terminal size
cols: 80, // Standard terminal size
..Default::default()
})
.expect("Unable to open pty");
// TODO: There's no way to handle stdin
let pty_reader = pair
.master
.try_clone_reader()
.expect("Unable to clone pty reader");
let (log_reader, log_writer) = std::io::pipe().expect("Failed to create log pipe");
let pipes = (pair.slave, log_writer);
(pty_reader, log_reader, pipes)
}
pub fn flush_pty(logger: &mut PtyOut) {
let Some((pty, file)) = logger else {
return;
};
// Not sure if flush actually working
let _ = pty.flush();
std::thread::sleep(Duration::from_millis(10));
let _ = file.flush();
}
pub fn spawn_to_pipe(command: &mut Command, stdout_pipe: &PtyOut) -> Result<Child> {
match stdout_pipe {
Some(stdout) => stdout.0.spawn_command(command.into()),
None => Ok(command.spawn().map_err(wrap_io_err!("Spawning"))?),
}
}
pub fn write_to_pty(pty: &PtyOut, text: &str) {
log_to_pty!(pty, "{}", text);
}
//
// based on portable-pty crate
// copied here since it isn't flexible enough
//
#[derive(Default)]
pub struct UnixPtySystem {}
/// Represents the size of the visible display area in the pty
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct PtySize {
/// The number of lines of text
pub rows: u16,
/// The number of columns of text
pub cols: u16,
/// The width of a cell in pixels. Note that some systems never
/// fill this value and ignore it.
pub pixel_width: u16,
/// The height of a cell in pixels. Note that some systems never
/// fill this value and ignore it.
pub pixel_height: u16,
}
impl Default for PtySize {
fn default() -> Self {
PtySize {
rows: 24,
cols: 80,
pixel_width: 0,
pixel_height: 0,
}
}
}
fn openpty(size: PtySize) -> Result<(UnixMasterPty, UnixSlavePty)> {
let mut master: RawFd = -1;
let mut slave: RawFd = -1;
let mut size = winsize {
ws_row: size.rows,
ws_col: size.cols,
ws_xpixel: size.pixel_width,
ws_ypixel: size.pixel_height,
};
let result = unsafe {
// BSDish systems may require mut pointers to some args
#[allow(clippy::unnecessary_mut_passed)]
libc::openpty(
&mut master,
&mut slave,
ptr::null_mut(),
ptr::null_mut(),
&mut size,
)
};
if result != 0 {
return Err(Error::from_last_io_error("Opening openpty"));
}
let master = UnixMasterPty {
fd: PtyFd(unsafe { File::from_raw_fd(master) }),
};
let slave = UnixSlavePty {
fd: PtyFd(unsafe { File::from_raw_fd(slave) }),
};
// Ensure that these descriptors will get closed when we execute
// the child process. This is done after constructing the Pty
// instances so that we ensure that the Ptys get drop()'d if
// the cloexec() functions fail (unlikely!).
cloexec(master.fd.as_raw_fd())?;
cloexec(slave.fd.as_raw_fd())?;
Ok((master, slave))
}
pub struct PtyPair {
// slave is listed first so that it is dropped first.
// The drop order is stable and specified by rust rfc 1857
pub slave: UnixSlavePty,
pub master: UnixMasterPty,
}
impl UnixPtySystem {
fn openpty(&self, size: PtySize) -> Result<PtyPair> {
let (master, slave) = openpty(size)?;
Ok(PtyPair {
master: master,
slave: slave,
})
}
}
struct PtyFd(pub File);
impl std::ops::Deref for PtyFd {
type Target = File;
fn deref(&self) -> &File {
&self.0
}
}
impl Read for PtyFd {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
match self.0.read(buf) {
Err(ref e) if e.raw_os_error() == Some(libc::EIO) => {
// EIO indicates that the slave pty has been closed.
// Treat this as EOF so that std::io::Read::read_to_string
// and similar functions gracefully terminate when they
// encounter this condition
Ok(0)
}
x => x,
}
}
}
impl PtyFd {
fn resize(&self, size: PtySize) -> Result<()> {
let ws_size = winsize {
ws_row: size.rows,
ws_col: size.cols,
ws_xpixel: size.pixel_width,
ws_ypixel: size.pixel_height,
};
if unsafe {
libc::ioctl(
self.0.as_raw_fd(),
libc::TIOCSWINSZ as _,
&ws_size as *const _,
)
} != 0
{
return Err(Error::from_last_io_error("ioctl resize (TIOCSWINSZ)"));
}
Ok(())
}
fn get_size(&self) -> Result<PtySize> {
let mut size: winsize = unsafe { mem::zeroed() };
if unsafe {
libc::ioctl(
self.0.as_raw_fd(),
libc::TIOCGWINSZ as _,
&mut size as *mut _,
)
} != 0
{
return Err(Error::from_last_io_error("ioctl get size (TIOCGWINSZ)"));
}
Ok(PtySize {
rows: size.ws_row,
cols: size.ws_col,
pixel_width: size.ws_xpixel,
pixel_height: size.ws_ypixel,
})
}
fn spawn_command(&self, cmd: &mut Command) -> Result<std::process::Child> {
unsafe {
cmd
// .stdin(self.as_stdio()?)
.stdout(self.try_clone().map_err(wrap_io_err!("Cloning pty"))?)
.stderr(self.try_clone().map_err(wrap_io_err!("Cloning pty"))?)
.pre_exec(move || {
// Clean up a few things before we exec the program
// Clear out any potentially problematic signal
// dispositions that we might have inherited
for signo in &[
libc::SIGCHLD,
libc::SIGHUP,
libc::SIGINT,
libc::SIGQUIT,
libc::SIGTERM,
libc::SIGALRM,
] {
libc::signal(*signo, libc::SIG_DFL);
}
let empty_set: libc::sigset_t = std::mem::zeroed();
libc::sigprocmask(libc::SIG_SETMASK, &empty_set, std::ptr::null_mut());
// Establish ourselves as a session leader.
if libc::setsid() == -1 {
return Err(io::Error::last_os_error());
}
Ok(())
})
};
let mut child = cmd.spawn().map_err(wrap_io_err!("Spawning cmd"))?;
// Ensure that we close out the slave fds that Child retains;
// they are not what we need (we need the master side to reference
// them) and won't work in the usual way anyway.
// In practice these are None, but it seems best to be move them
// out in case the behavior of Command changes in the future.
// child.stdin.take();
child.stdout.take();
child.stderr.take();
Ok(child)
}
fn flush(&mut self) -> Result<()> {
self.0.flush().map_err(wrap_io_err!("Flushing pty"))
}
}
/// Represents the master end of a pty.
/// The file descriptor will be closed when the Pty is dropped.
pub struct UnixMasterPty {
fd: PtyFd,
}
/// Represents the slave end of a pty.
/// The file descriptor will be closed when the Pty is dropped.
pub struct UnixSlavePty {
fd: PtyFd,
}
/// Helper function to set the close-on-exec flag for a raw descriptor
fn cloexec(fd: RawFd) -> Result<()> {
let flags = unsafe { libc::fcntl(fd, libc::F_GETFD) };
if flags == -1 {
return Err(Error::from_last_io_error("fcntl to read flags"));
}
let result = unsafe { libc::fcntl(fd, libc::F_SETFD, flags | libc::FD_CLOEXEC) };
if result == -1 {
return Err(Error::from_last_io_error("fcntl to set CLOEXEC"));
}
Ok(())
}
impl UnixSlavePty {
fn spawn_command(&self, builder: &mut Command) -> Result<std::process::Child> {
Ok(self.fd.spawn_command(builder)?)
}
fn flush(&mut self) -> Result<()> {
self.fd.flush()
}
}
impl UnixMasterPty {
#[allow(unused)]
fn resize(&self, size: PtySize) -> Result<()> {
self.fd.resize(size)
}
#[allow(unused)]
fn get_size(&self) -> Result<PtySize> {
self.fd.get_size()
}
fn try_clone_reader(&self) -> Result<Box<dyn Read + Send>> {
let fd = PtyFd(
self.fd
.try_clone()
.map_err(wrap_io_err!("Cloning pty fd"))?,
);
Ok(Box::new(fd))
}
}
+417
View File
@@ -0,0 +1,417 @@
// Scripts here is executed using "cookbook_redoxer env" where CC, RUSTFLAGS, etc. defined.
// Look up redoxer env script if you want to see how they work.
pub(crate) static SHARED_PRESCRIPT: &str = r#"
# Build dynamically
function DYNAMIC_INIT {
case "${TARGET}" in
"i586-unknown-redox" | "riscv64gc-unknown-redox")
[ -z "${COOKBOOK_VERBOSE}" ] || echo "WARN: ${TARGET} does not support dynamic linking." >&2
return
;;
esac
[ -z "${COOKBOOK_VERBOSE}" ] || echo "DEBUG: Program is being compiled dynamically."
COOKBOOK_CONFIGURE_FLAGS=(
--host="${GNU_TARGET}"
--prefix="/usr"
--enable-shared
--disable-static
)
COOKBOOK_CMAKE_FLAGS=(
-DBUILD_SHARED_LIBS=True
-DENABLE_SHARED=True
-DENABLE_STATIC=False
)
COOKBOOK_MESON_FLAGS=(
--buildtype release
--wrap-mode nofallback
-Ddefault_library=shared
-Dprefix=/usr
)
# TODO: check paths for spaces
export LDFLAGS="${USER_LDFLAGS}-Wl,-rpath-link,${COOKBOOK_SYSROOT}/lib -L${COOKBOOK_SYSROOT}/lib"
export RUSTFLAGS="-C target-feature=-crt-static -L native=${COOKBOOK_SYSROOT}/lib -C link-arg=-Wl,-rpath-link,${COOKBOOK_SYSROOT}/lib"
export COOKBOOK_DYNAMIC=1
if [ function = $(type -t reexport_flags) ]; then
reexport_flags
fi
}
COOKBOOK_AUTORECONF="autoreconf"
autotools_recursive_regenerate() {
for f in $(find . -name configure.ac -o -name configure.in -type f | sort); do
echo "* autotools regen in '$(dirname $f)'..."
( cd "$(dirname "$f")" && "${COOKBOOK_AUTORECONF}" -fvi "$@" -I${COOKBOOK_HOST_SYSROOT}/share/aclocal )
done
}
# Build both dynamically and statically
function DYNAMIC_STATIC_INIT {
DYNAMIC_INIT
if [ "${COOKBOOK_DYNAMIC}" == "1" ]
then
COOKBOOK_CONFIGURE_FLAGS=(
--host="${GNU_TARGET}"
--prefix="/usr"
--enable-shared
--enable-static
)
COOKBOOK_CMAKE_FLAGS=(
-DBUILD_SHARED_LIBS=True
-DENABLE_SHARED=True
-DENABLE_STATIC=True
)
COOKBOOK_MESON_FLAGS=(
--buildtype release
--wrap-mode nofallback
-Ddefault_library=both
-Dprefix=/usr
)
fi
}
function GNU_CONFIG_GET {
wget -O "$1" "https://gitlab.redox-os.org/redox-os/gnu-config/-/raw/master/config.sub?inline=false"
}
"#;
pub(crate) static BUILD_PRESCRIPT: &str = r#"
# Add cookbook bins to path
export PATH="${COOKBOOK_ROOT}/bin:${PATH}"
# Add toolchain dir to path if exists
if [ ! -z "${COOKBOOK_TOOLCHAIN}" ]
then
export PATH="${COOKBOOK_TOOLCHAIN}/bin:${PATH}"
export LD_LIBRARY_PATH="${COOKBOOK_TOOLCHAIN}/lib:${LD_LIBRARY_PATH}"
fi
# This puts cargo build artifacts in the build directory
export CARGO_TARGET_DIR="${COOKBOOK_BUILD}/target"
# This adds the sysroot includes for most C compilation
#TODO: check paths for spaces!
export CPPFLAGS="${CPPFLAGS:+$CPPFLAGS }-I${COOKBOOK_SYSROOT}/include"
# This adds the sysroot libraries and compiles binaries statically for most C compilation
#TODO: check paths for spaces!
USER_LDFLAGS="${LDFLAGS:+$LDFLAGS }"
export LDFLAGS="${USER_LDFLAGS}-L${COOKBOOK_SYSROOT}/lib --static"
# This reexport C variables into custom build script that can be consumed by cc crate
function reexport_flags {
target=${TARGET//-/_}
export CFLAGS_${target}="${CFLAGS:+$CFLAGS }${CPPFLAGS}"
export CXXFLAGS_${target}="${CXXFLAGS:+$CXXFLAGS }${CPPFLAGS}"
export LDFLAGS_${target}="${LDFLAGS}"
}
# These ensure that pkg-config gets the right flags from the sysroot
if [ "${TARGET}" != "${COOKBOOK_HOST_TARGET}" ]
then
export PKG_CONFIG_ALLOW_CROSS=1
export PKG_CONFIG_PATH=
export PKG_CONFIG_LIBDIR="${COOKBOOK_SYSROOT}/lib/pkgconfig"
export PKG_CONFIG_SYSROOT_DIR="${COOKBOOK_SYSROOT}"
fi
# To build the debug version of a Cargo program, add COOKBOOK_DEBUG=true, and
# to not strip symbols from the final package, add COOKBOOK_NOSTRIP=true to the recipe
# (or to your environment) before calling cookbook_cargo or cookbook_cargo_packages
build_type=release
install_flags=--no-track
build_flags=--release
if [ ! -z "${COOKBOOK_DEBUG}" ]
then
install_flags+=" --debug"
build_flags=
build_type=debug
export CPPFLAGS="${CPPFLAGS} -g"
fi
if [ ! -z "${COOKBOOK_OFFLINE}" ]
then
build_flags+=" --offline"
install_flags+=" --offline"
fi
reexport_flags
COOKBOOK_CARGO="${COOKBOOK_REDOXER}"
COOKBOOK_CARGO_FLAGS=(
--locked
)
# cargo template using cargo install
function cookbook_cargo {
"${COOKBOOK_CARGO}" install \
--path "${COOKBOOK_SOURCE}${COOKBOOK_CARGO_PATH:+/$COOKBOOK_CARGO_PATH}" \
--root "${COOKBOOK_STAGE}/usr" \
-j "${COOKBOOK_MAKE_JOBS}" ${install_flags} \
${COOKBOOK_CARGO_FLAGS[@]} "$@"
}
# cargo template using cargo build (prefixed name)
function cookbook_cargo_build {
recipe="${recipe:-$(basename "${COOKBOOK_RECIPE}")}"
bin_dir="${bin_dir:-.}"
bin_flags="${bin_flags:-}"
bin_name="${bin_name:-$(basename "${COOKBOOK_CARGO_PATH}")}"
bin_final_name="${bin_final_name:-${recipe}_${bin_name//_/-}}"
mkdir -pv "${COOKBOOK_STAGE}/usr/bin"
"${COOKBOOK_CARGO}" build \
--manifest-path "${COOKBOOK_SOURCE}${COOKBOOK_CARGO_PATH:+/$COOKBOOK_CARGO_PATH}/Cargo.toml" \
${bin_flags} ${build_flags} -j "${COOKBOOK_MAKE_JOBS}" ${COOKBOOK_CARGO_FLAGS[@]}
cp -v \
"target/${TARGET}/${build_type}/${bin_dir}/${bin_name}" \
"${COOKBOOK_STAGE}/usr/bin/${bin_final_name}"
unset bin_name bin_flags bin_dir bin_final_name
}
# helper for installing binaries that are cargo examples
function cookbook_cargo_examples {
recipe="$(basename "${COOKBOOK_RECIPE}")"
for example in "$@"
do
bin_dir="examples" bin_name="${example}" bin_flags="--example ${example}" cookbook_cargo_build
done
}
# helper for installing binaries that are cargo packages
function cookbook_cargo_packages {
recipe="$(basename "${COOKBOOK_RECIPE}")"
mkdir -pv "${COOKBOOK_STAGE}/usr/bin"
for package in "$@"
do
bin_name="${package}" bin_flags="--package ${package}" bin_final_name="${package//_/-}" cookbook_cargo_build
done
}
# configure template
COOKBOOK_CONFIGURE="${COOKBOOK_SOURCE}/configure"
COOKBOOK_CONFIGURE_FLAGS=(
--host="${GNU_TARGET}"
--prefix="/usr"
--disable-shared
--enable-static
)
COOKBOOK_MAKE="make"
function cookbook_configure {
"${COOKBOOK_CONFIGURE}" "${COOKBOOK_CONFIGURE_FLAGS[@]}" "$@"
"${COOKBOOK_MAKE}" -j "${COOKBOOK_MAKE_JOBS}"
"${COOKBOOK_MAKE}" install DESTDIR="${COOKBOOK_STAGE}"
}
COOKBOOK_CMAKE="cmake"
COOKBOOK_NINJA="ninja"
COOKBOOK_CMAKE_FLAGS=(
-DBUILD_SHARED_LIBS=False
-DENABLE_SHARED=False
-DENABLE_STATIC=True
)
function generate_cookbook_cmake_file {
target=$1
gcc_prefix=$2
sysroot=$3
file=$4
arch=$(echo "$target" | cut -d - -f1)
os=$(echo "$target" | cut -d - -f3)
if [ "$os" = "linux" ]; then
SYSTEM_NAME="Linux"
else
SYSTEM_NAME="UnixPaths"
fi
cat > $file <<EOF
set(CMAKE_AR ${gcc_prefix}ar)
set(CMAKE_CXX_COMPILER ${gcc_prefix}g++)
set(CMAKE_C_COMPILER ${gcc_prefix}gcc)
set(CMAKE_FIND_ROOT_PATH ${sysroot})
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
set(CMAKE_PLATFORM_USES_PATH_WHEN_NO_SONAME 1)
set(CMAKE_PREFIX_PATH, ${sysroot})
set(CMAKE_RANLIB ${gcc_prefix}ranlib)
set(CMAKE_SHARED_LIBRARY_SONAME_C_FLAG "-Wl,-soname,")
set(CMAKE_SYSTEM_NAME ${SYSTEM_NAME})
set(CMAKE_SYSTEM_PROCESSOR ${arch})
EOF
if [ "$target" = "$TARGET" ]
then
echo "set(CMAKE_C_FLAGS \"${CFLAGS} ${CPPFLAGS}\")" >> $file
echo "set(CMAKE_CXX_FLAGS \"${CFLAGS} ${CPPFLAGS}\")" >> $file
fi
if [ -n "${CC_WRAPPER}" ]
then
echo "set(CMAKE_C_COMPILER_LAUNCHER ${CC_WRAPPER})" >> $file
echo "set(CMAKE_CXX_COMPILER_LAUNCHER ${CC_WRAPPER})" >> $file
fi
}
function cookbook_cmake {
if [ "$TARGET" = "$COOKBOOK_HOST_TARGET" ]; then
GCC_PREFIX=
else
GCC_PREFIX=$GNU_TARGET-
fi
generate_cookbook_cmake_file "$TARGET" "$GCC_PREFIX" "$COOKBOOK_SYSROOT" cross_file.cmake
"${COOKBOOK_CMAKE}" "${COOKBOOK_SOURCE}" \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_CROSSCOMPILING=True \
-DCMAKE_INSTALL_INCLUDEDIR=include \
-DCMAKE_INSTALL_LIBDIR=lib \
-DCMAKE_INSTALL_OLDINCLUDEDIR=/include \
-DCMAKE_INSTALL_PREFIX=/usr \
-DCMAKE_INSTALL_SBINDIR=bin \
-DCMAKE_TOOLCHAIN_FILE=cross_file.cmake \
-GNinja \
-Wno-dev \
"${COOKBOOK_CMAKE_FLAGS[@]}" \
"$@"
"${COOKBOOK_NINJA}" -j"${COOKBOOK_MAKE_JOBS}"
DESTDIR="${COOKBOOK_STAGE}" "${COOKBOOK_NINJA}" install -j"${COOKBOOK_MAKE_JOBS}"
}
COOKBOOK_MESON="meson"
COOKBOOK_MESON_FLAGS=(
--buildtype release
--wrap-mode nofallback
-Ddefault_library=static
-Dprefix=/usr
)
function cookbook_meson {
# TODO: do this in rust, to handle path spaces as well
function format_flags {
local flags=($1)
local formatted=""
for i in "${!flags[@]}"; do
formatted+="'${flags[$i]}'"
if [ $i -lt $((${#flags[@]} - 1)) ]; then
formatted+=", "
fi
done
echo "$formatted"
}
echo "[binaries]" > cross_file.txt
echo "c = [$(printf "'%s', " $CC | sed 's/, $//')]" >> cross_file.txt
echo "cpp = [$(printf "'%s', " $CXX | sed 's/, $//')]" >> cross_file.txt
echo "ar = '${AR}'" >> cross_file.txt
echo "strip = '${STRIP}'" >> cross_file.txt
echo "pkg-config = '${PKG_CONFIG}'" >> cross_file.txt
echo "llvm-config = '${TARGET}-llvm-config'" >> cross_file.txt
echo "glib-compile-resources = 'glib-compile-resources'" >> cross_file.txt
echo "glib-compile-schemas = 'glib-compile-schemas'" >> cross_file.txt
echo "[host_machine]" >> cross_file.txt
echo "system = '$(echo "${TARGET}" | cut -d - -f3)'" >> cross_file.txt
echo "cpu_family = '$(echo "${TARGET}" | cut -d - -f1)'" >> cross_file.txt
echo "cpu = '$(echo "${TARGET}" | cut -d - -f1)'" >> cross_file.txt
echo "endian = 'little'" >> cross_file.txt
echo "[built-in options]" >> cross_file.txt
echo "prefix = '/usr'" >> cross_file.txt
echo "libdir = 'lib'" >> cross_file.txt
echo "bindir = 'bin'" >> cross_file.txt
echo "c_args = [$(format_flags "$CFLAGS $CPPFLAGS")]" >> cross_file.txt
echo "cpp_args = [$(format_flags "$CXXFLAGS $CPPFLAGS")]" >> cross_file.txt
echo "c_link_args = [$(format_flags "$LDFLAGS")]" >> cross_file.txt
echo "[properties]" >> cross_file.txt
echo "needs_exe_wrapper = true" >> cross_file.txt
echo "sys_root = '${COOKBOOK_SYSROOT}'" >> cross_file.txt
unset AR AS CC CXX LD NM OBJCOPY OBJDUMP PKG_CONFIG RANLIB READELF STRIP
"${COOKBOOK_MESON}" setup \
"${COOKBOOK_SOURCE}" \
. \
--cross-file cross_file.txt \
"${COOKBOOK_MESON_FLAGS[@]}" \
"$@"
"${COOKBOOK_NINJA}" -j"${COOKBOOK_MAKE_JOBS}"
DESTDIR="${COOKBOOK_STAGE}" "${COOKBOOK_NINJA}" install -j"${COOKBOOK_MAKE_JOBS}"
}
"#;
pub(crate) static BUILD_POSTSCRIPT: &str = r#"
# Strip binaries
for dir in "${COOKBOOK_STAGE}/bin" "${COOKBOOK_STAGE}/usr/bin" "${COOKBOOK_STAGE}/libexec" "${COOKBOOK_STAGE}/usr/libexec"
do
if [ -d "${dir}" ] && [ -z "${COOKBOOK_NOSTRIP}" ]
then
find "${dir}" -type f -exec "${GNU_TARGET}-strip" -v {} ';'
fi
done
# Remove libtool files
for dir in "${COOKBOOK_STAGE}/lib" "${COOKBOOK_STAGE}/usr/lib"
do
if [ -d "${dir}" ]
then
find "${dir}" -type f -name '*.la' -exec rm -fv {} ';'
fi
done
# Remove cargo install files
for file in .crates.toml .crates2.json
do
if [ -f "${COOKBOOK_STAGE}/${file}" ]
then
rm -v "${COOKBOOK_STAGE}/${file}"
fi
done
# Add pkgname to appstream metadata
for dir in "${COOKBOOK_STAGE}/share/metainfo" "${COOKBOOK_STAGE}/usr/share/metainfo"
do
if [ -d "${dir}" ]
then
find "${dir}" -type f -name '*.xml' -exec sed -i 's|</component>|<pkgname>'"${COOKBOOK_NAME}"'</pkgname></component>|g' {} ';'
fi
done
"#;
pub(crate) static GIT_RESET_BRANCH: &str = r#"
ORIGIN_BRANCH="$(git branch --remotes | grep '^ origin/HEAD -> ' | cut -d ' ' -f 5-)"
if [ -n "$BRANCH" ]
then
ORIGIN_BRANCH="origin/$BRANCH"
fi
if [ "$(git rev-parse HEAD)" != "$(git rev-parse $ORIGIN_BRANCH)" ]
then
git checkout -B "$(echo "$ORIGIN_BRANCH" | cut -d / -f 2-)" "$ORIGIN_BRANCH"
fi"#;
pub static KILL_ALL_PID: &str = r#"
THISPID=$$
CHILDREN=$(ps -o pid= --ppid $PID | grep -v $THISPID);
ALL_DESCENDANTS='';
while [ -n "$CHILDREN" ]; do
ALL_DESCENDANTS="$ALL_DESCENDANTS $CHILDREN";
CHILDREN=$(ps -o pid= --ppid $(echo $CHILDREN) | tr '\n' ' ');
done;
if [ -n "$ALL_DESCENDANTS" ]; then
kill -9 $ALL_DESCENDANTS;
fi
"#;
+196
View File
@@ -0,0 +1,196 @@
use anyhow::Context;
use pkg::{Package, PackageName};
use std::fmt::Write as _;
use std::{
collections::{HashMap, HashSet},
fs::read_to_string,
path::PathBuf,
};
use crate::recipe::CookRecipe;
pub enum WalkTreeEntry<'a> {
Built(&'a PathBuf, u64),
NotBuilt,
Deduped,
Missing,
}
pub fn display_tree_entry(
package_name: &PackageName,
recipe_map: &HashMap<&PackageName, &CookRecipe>,
prefix: &str,
is_last: bool,
is_build_tree: bool,
visited: &mut HashSet<PackageName>,
total_size: &mut u64,
total_count: &mut u64,
) -> anyhow::Result<()> {
walk_tree_entry(
package_name,
recipe_map,
prefix,
is_last,
is_build_tree,
visited,
total_size,
total_count,
display_pkg_fn,
)
}
pub fn walk_tree_entry(
package_name: &PackageName,
recipe_map: &HashMap<&PackageName, &CookRecipe>,
prefix: &str,
is_last: bool,
is_build_tree: bool,
visited: &mut HashSet<PackageName>,
total_size: &mut u64,
total_count: &mut u64,
op: fn(&PackageName, &str, bool, &WalkTreeEntry) -> anyhow::Result<bool>,
) -> anyhow::Result<()> {
let cook_recipe = match recipe_map.get(package_name) {
Some(r) => r,
None => {
// Data not provided, will not be processed by the build system
op(package_name, prefix, is_last, &WalkTreeEntry::Missing)?;
return Ok(());
}
};
let (_, pkg_path, pkg_toml) = cook_recipe.stage_paths();
let deduped = visited.contains(package_name);
let entry = match (std::fs::metadata(&pkg_path), deduped) {
(_, true) => WalkTreeEntry::Deduped,
(Ok(meta), _) => WalkTreeEntry::Built(&pkg_path, meta.len()),
(Err(_), _) => WalkTreeEntry::NotBuilt,
};
let cached = op(package_name, prefix, is_last, &entry)?;
if deduped || cached {
return Ok(());
}
visited.insert(package_name.clone());
if !cached {
if is_build_tree {
if matches!(entry, WalkTreeEntry::NotBuilt) {
*total_size += 1;
}
} else {
if let WalkTreeEntry::Built(_p, pkg_size) = &entry {
*total_size += pkg_size;
}
}
*total_count += 1;
}
let pkg_meta: Package;
let mut all_deps_set: HashSet<&PackageName> = HashSet::new();
if is_build_tree {
all_deps_set.extend(cook_recipe.recipe.build.dependencies.iter());
all_deps_set.extend(cook_recipe.recipe.package.dependencies.iter());
} else {
if let Ok(pkg_toml_str) = read_to_string(&pkg_toml) {
// more accurate with auto deps
pkg_meta = toml::from_str(&pkg_toml_str)
.context(format!("Unable to parse {}", pkg_toml.display()))?;
all_deps_set.extend(pkg_meta.depends.iter());
}
}
if all_deps_set.is_empty() {
return Ok(());
}
let sorted_deps: Vec<&PackageName> = all_deps_set.into_iter().collect();
let deps_count = sorted_deps.len();
let child_prefix = if is_last { " " } else { "" };
for (i, dep_name) in sorted_deps.iter().enumerate() {
walk_tree_entry(
dep_name,
recipe_map,
&format!("{}{}", prefix, child_prefix),
i == deps_count - 1,
is_build_tree,
visited,
total_size,
total_count,
op,
)?;
}
Ok(())
}
pub fn display_pkg_fn(
package_name: &PackageName,
prefix: &str,
is_last: bool,
entry: &WalkTreeEntry,
) -> anyhow::Result<bool> {
let size_str = match entry {
WalkTreeEntry::Built(_path_buf, size) => format!("[{}]", format_size(*size)),
WalkTreeEntry::NotBuilt => "(not built)".to_string(),
WalkTreeEntry::Deduped => "".to_string(),
WalkTreeEntry::Missing => "(omitted)".to_string(),
};
let line_prefix = if is_last { "└── " } else { "├── " };
println!("{}{}{} {}", prefix, line_prefix, package_name, size_str);
// TODO: check dirty build by checking source ident
Ok(false)
}
pub fn walk_file_tree(dir: &PathBuf, prefix: &str, buffer: &mut String) -> std::io::Result<u64> {
if !dir.is_dir() {
return Ok(0);
}
let fmt_err = std::io::Error::other;
let mut entries: Vec<_> = std::fs::read_dir(dir)?.filter_map(|e| e.ok()).collect();
entries.sort_by(|a, b| a.file_name().cmp(&b.file_name()));
let mut total_size = 0;
for (index, entry) in entries.iter().enumerate() {
let path = entry.path();
let metadata = entry.metadata()?;
let is_last = index == entries.len() - 1;
let line_prefix = if is_last { "└── " } else { "├── " };
let file_name = path
.file_name()
.and_then(|n| n.to_str())
.unwrap_or("Unknown");
if path.is_dir() {
writeln!(buffer, "{}{}{}/", prefix, line_prefix, file_name).map_err(fmt_err)?;
let new_prefix = format!("{}{}", prefix, if is_last { " " } else { "" });
walk_file_tree(&path, &new_prefix, buffer)?;
} else {
let size = metadata.len();
total_size += size;
writeln!(
buffer,
"{}{}{} ({})",
prefix,
line_prefix,
file_name,
format_size(size)
)
.map_err(fmt_err)?;
}
}
Ok(total_size)
}
pub fn format_size(bytes: u64) -> String {
if bytes == 0 {
return "0 B".to_string();
}
const UNITS: [&str; 5] = ["B", "KiB", "MiB", "GiB", "TiB"];
let i = (bytes as f64).log(1024.0).floor() as usize;
let size = bytes as f64 / 1024.0_f64.powi(i as i32);
format!("{:.2} {}", size, UNITS[i])
}
+208
View File
@@ -0,0 +1,208 @@
pub mod config;
pub mod cook;
pub mod recipe;
pub mod staged_pkg;
pub mod web;
/// Default for maximum number of levels to descend down dependencies tree.
pub const WALK_DEPTH: usize = 16;
/// Default remote package source, for recipes with build type = "remote"
pub const REMOTE_PKG_SOURCE: &str = "https://static.redox-os.org/pkg";
pub fn is_redox() -> bool {
cfg!(target_os = "redox")
}
pub fn cross_target() -> Option<String> {
std::env::var("COOKBOOK_CROSS_TARGET")
.ok()
.and_then(|s| if s.is_empty() { None } else { Some(s) })
}
// Errors
use std::fmt::Display;
use std::io;
use std::path::PathBuf;
use std::process::{Command, ExitStatus};
/// Error types used through cookbook.
///
/// When writing IO context, don't use "Failed at XXX". Look at impl Display for suitable word to use.
#[derive(Debug)]
pub enum Error {
Io {
source: io::Error,
path: Option<PathBuf>,
context: &'static str,
},
FileIo {
source: io::Error,
src: PathBuf,
dst: PathBuf,
context: &'static str,
},
Command(Command, ExitStatus),
Package(pkg::PackageError),
Pkgar(pkgar::Error),
Other(String),
}
impl Error {
pub fn from_last_io_error(context: &'static str) -> Error {
wrap_io_err!(context)(io::Error::last_os_error())
}
pub fn from_io_error(err: io::Error, context: &'static str) -> Error {
wrap_io_err!(context)(err)
}
}
impl Display for Error {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Error::Io {
source,
path,
context,
} => {
if let Some(path) = path {
write!(f, "{context} failed at \"{}\": {}", path.display(), source)
} else {
write!(f, "{context} failed: {}", source)
}
}
Error::FileIo {
source,
src,
dst,
context,
} => {
write!(
f,
"{context} failed from \"{}\" to \"{}\": {}",
src.display(),
dst.display(),
source
)
}
Error::Command(command, exit_status) => {
write!(
f,
"Failed to run [{:?}]: exited with status {}",
command, exit_status
)
}
Error::Package(package_error) => write!(f, "{}", package_error),
Error::Pkgar(error) => write!(f, "{}", error),
Error::Other(context) => {
write!(f, "{context}")
}
}
}
}
macro_rules! wrap_io_err {
($context:expr) => {
|source| crate::Error::Io {
source,
path: None,
context: $context,
}
};
($path:expr, $context:expr) => {
|source| crate::Error::Io {
source,
path: Some($path.to_path_buf()),
context: $context,
}
};
($src:expr, $dst:expr, $context:expr) => {
|source| crate::Error::FileIo {
source,
src: $src.to_path_buf(),
dst: $dst.to_path_buf(),
context: $context,
}
};
}
macro_rules! wrap_other_err {
($($arg:tt)*) => {
|| crate::Error::Other(format!($($arg)*))
};
}
macro_rules! bail_other_err {
($($arg:tt)*) => {
return Err(crate::Error::Other(format!($($arg)*)))
};
}
impl From<&'static str> for Error {
fn from(value: &'static str) -> Self {
Error::Other(value.to_string())
}
}
impl From<String> for Error {
fn from(value: String) -> Self {
Error::Other(value)
}
}
impl From<Error> for String {
fn from(val: Error) -> Self {
format!("{}", val)
}
}
impl From<pkg::PackageError> for Error {
fn from(value: pkg::PackageError) -> Self {
Error::Package(value)
}
}
impl From<pkgar::Error> for Error {
fn from(value: pkgar::Error) -> Self {
match value {
pkgar::Error::Io {
source,
path,
context,
} => Error::Io {
source,
path,
context,
},
_ => Error::Pkgar(value),
}
}
}
impl From<walkdir::Error> for Error {
fn from(value: walkdir::Error) -> Self {
if value.io_error().is_some() {
let path = value.path().map(|s| s.to_path_buf());
Error::Io {
source: value.into_io_error().unwrap(),
path: path,
context: "Walkdir error",
}
} else {
wrap_other_err!(
"Walkdir file system loop found at {:?}",
value.path().map(|s| s.to_string_lossy().to_string()),
)()
}
}
}
pub type Result<T> = std::result::Result<T, Error>;
pub(crate) use wrap_io_err;
pub(crate) use wrap_other_err;
pub(crate) use bail_other_err;
pub(crate) use cook::pty::log_to_pty;
+716
View File
@@ -0,0 +1,716 @@
use std::{
collections::BTreeSet,
convert::TryInto,
fs,
path::{Path, PathBuf},
};
use pkg::{PackageError, PackageName};
use regex::Regex;
use serde::{Deserialize, Serialize};
use crate::{WALK_DEPTH, cook::package as cook_package, staged_pkg};
/// Specifies how to download the source for a recipe
#[derive(Clone, Debug, Deserialize, PartialEq, Serialize)]
#[serde(untagged)]
pub enum SourceRecipe {
/// Reuse the source directory of another package
///
/// This is useful when a single source repo contains multiple projects which each have their
/// own recipe to build them.
SameAs {
/// Relative path to the package for which to reuse the source dir
same_as: String,
},
/// Path source
Path {
/// The path to the source
path: String,
},
/// A git repository source
Git {
/// The URL for the git repository, such as https://gitlab.redox-os.org/redox-os/ion.git
git: String,
/// The URL for an upstream repository
upstream: Option<String>,
/// The optional branch of the git repository to track, such as master. Please specify to
/// make updates to the rev easier
branch: Option<String>,
/// The optional revision of the git repository to use for builds. Please specify for
/// reproducible builds
rev: Option<String>,
/// The optional config to clone with treeless clone. Default is true if "rev" added
shallow_clone: Option<bool>,
/// A list of patch files to apply to the source
#[serde(default)]
patches: Vec<String>,
/// Optional script to run to prepare the source
script: Option<String>,
},
/// A tar file source
Tar {
/// The URL of a tar source
tar: String,
/// The optional blake3 sum of the tar file. Please specify this to make reproducible
/// builds more reliable
blake3: Option<String>,
/// A list of patch files to apply to the source
#[serde(default)]
patches: Vec<String>,
/// Optional script to run to prepare the source, such as ./autogen.sh
script: Option<String>,
},
}
/// Specifies how to build a recipe
#[derive(Debug, Clone, Deserialize, PartialEq, Serialize)]
#[serde(tag = "template")]
pub enum BuildKind {
/// Will not build (for meta packages)
#[serde(rename = "none")]
None,
/// Will download compiled package from remote
#[serde(rename = "remote")]
Remote,
/// Will build and install using cargo
#[serde(rename = "cargo")]
Cargo {
#[serde(default)]
cargopath: Option<String>,
#[serde(default)]
cargoflags: Vec<String>,
#[serde(default)]
cargopackages: Vec<String>,
#[serde(default)]
cargoexamples: Vec<String>,
},
/// Will build and install using configure and make
#[serde(rename = "configure")]
Configure {
#[serde(default)]
configureflags: Vec<String>,
},
/// Will build and install using cmake
#[serde(rename = "cmake")]
Cmake {
#[serde(default)]
cmakeflags: Vec<String>,
},
/// Will build and install using meson
#[serde(rename = "meson")]
Meson {
#[serde(default)]
mesonflags: Vec<String>,
},
/// Will build and install using custom commands
#[serde(rename = "custom")]
Custom { script: String },
}
impl Default for BuildKind {
fn default() -> Self {
BuildKind::None
}
}
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Serialize)]
#[serde(default)]
pub struct BuildRecipe {
#[serde(flatten)]
pub kind: BuildKind,
pub dependencies: Vec<PackageName>,
#[serde(rename = "dev-dependencies")]
pub dev_dependencies: Vec<PackageName>,
}
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Serialize)]
#[serde(default)]
pub struct PackageRecipe {
pub dependencies: Vec<PackageName>,
pub version: Option<String>,
pub description: Option<String>,
}
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Serialize)]
#[serde(default)]
pub struct OptionalPackageRecipe {
pub name: String,
pub dependencies: Vec<PackageName>,
pub files: Vec<String>,
}
/// Everything required to build a Redox package
#[derive(Debug, Clone, Default, Deserialize, PartialEq, Serialize)]
#[serde(default)]
pub struct Recipe {
/// Specifies how to download the source for this recipe
pub source: Option<SourceRecipe>,
/// Specifies how to build this recipe
pub build: BuildRecipe,
/// Specifies how to package this recipe
pub package: PackageRecipe,
/// Specifies optional packages based from this recipe
#[serde(rename = "optional-packages")]
pub optional_packages: Vec<OptionalPackageRecipe>,
}
impl BuildRecipe {
pub fn new(kind: BuildKind) -> Self {
let mut build = Self::default();
build.kind = kind;
build
}
pub fn set_as_remote(&mut self) {
if self.kind == BuildKind::None {
// BuildKind::Remote won't handle remote meta-packages
return;
}
self.kind = BuildKind::Remote;
self.dev_dependencies = Vec::new();
}
pub fn set_as_none(&mut self) {
self.kind = BuildKind::None;
self.dependencies = Vec::new();
self.dev_dependencies = Vec::new();
}
}
#[derive(Debug, Clone, PartialEq)]
pub struct CookRecipe {
pub name: PackageName,
pub dir: PathBuf,
pub recipe: Recipe,
pub target: &'static str,
/// If false, it's listed on install config
pub is_deps: bool,
pub rule: String,
}
impl Recipe {
pub fn new(file: &PathBuf) -> Result<Recipe, PackageError> {
if !file.is_file() {
return Err(PackageError::FileMissing(file.clone()));
}
let toml = fs::read_to_string(&file)
.map_err(|err| PackageError::FileError(err.raw_os_error(), file.clone()))?;
let recipe: Recipe =
toml::from_str(&toml).map_err(|err| PackageError::Parse(err, Some(file.clone())))?;
Ok(recipe)
}
pub fn get_packages_list(&self) -> Vec<Option<&OptionalPackageRecipe>> {
let mut packages: Vec<Option<&OptionalPackageRecipe>> =
self.optional_packages.iter().map(|p| Some(p)).collect();
// the mandatory package, put last because of cook_build
packages.push(None);
packages
}
}
impl CookRecipe {
pub fn new(name: PackageName, dir: PathBuf, mut recipe: Recipe) -> Result<Self, PackageError> {
let target = cook_package::package_target(&name);
if name.is_host() {
let thisname = name.without_host();
let fn_map = |p: PackageName| {
if p.is_host() {
if p.name() == thisname { None } else { Some(p) }
} else if p.is_target() {
None
} else {
Some(p.with_host())
}
};
recipe.build.dependencies = recipe
.build
.dependencies
.into_iter()
.filter_map(fn_map)
.collect();
recipe.build.dev_dependencies = recipe
.build
.dev_dependencies
.into_iter()
.filter_map(fn_map)
.collect();
}
Ok(Self {
name,
dir,
recipe,
target,
is_deps: false,
rule: "".into(),
})
}
pub fn from_name(name: PackageName) -> Result<Self, PackageError> {
let dir = staged_pkg::find(name.name())
.ok_or_else(|| PackageError::PackageNotFound(name.clone()))?;
let file = dir.join("recipe.toml");
let recipe = Recipe::new(&file)?;
Self::new(name, dir.to_path_buf(), recipe)
}
pub fn from_list(names: Vec<PackageName>) -> Result<Vec<Self>, PackageError> {
let mut packages = Vec::new();
for name in names {
packages.push(Self::from_name(name)?);
}
Ok(packages)
}
pub fn from_path(dir: &Path, read_recipe: bool, is_host: bool) -> Result<Self, PackageError> {
let file = dir.join("recipe.toml");
let mut name: PackageName = dir.file_name().unwrap().try_into()?;
if is_host {
name = name.with_host();
}
let recipe = if read_recipe {
Recipe::new(&file)?
} else {
// clean/unfetch don't need to read recipe
Recipe::default()
};
Self::new(name, dir.to_path_buf(), recipe)
}
fn new_recursive(
names: &[PackageName],
recurse_build_deps: bool,
recurse_dev_build_deps: bool,
recurse_package_deps: bool,
collect_build_deps: bool,
collect_package_deps: bool,
collect_self: bool,
recursion: usize,
) -> Result<Vec<Self>, PackageError> {
if recursion == 0 {
return Err(PackageError::Recursion(Default::default()));
}
let mut recipes = Vec::new();
let mut recipes_set = BTreeSet::new();
for name in names {
let recipe = Self::from_name(name.clone())?;
if recurse_build_deps {
let dependencies = Self::new_recursive(
&recipe.recipe.build.dependencies,
recurse_build_deps,
recurse_dev_build_deps,
recurse_package_deps,
collect_build_deps,
collect_package_deps,
collect_build_deps,
recursion - 1,
)
.map_err(|mut err| {
err.append_recursion(name);
err
})?;
for dependency in dependencies {
if !recipes_set.contains(&dependency.name) {
recipes_set.insert(dependency.name.clone());
recipes.push(dependency);
}
}
}
if recurse_dev_build_deps {
let dependencies = Self::new_recursive(
&recipe.recipe.build.dev_dependencies,
recurse_build_deps,
recurse_dev_build_deps,
recurse_package_deps,
collect_build_deps,
collect_package_deps,
collect_build_deps,
recursion - 1,
)
.map_err(|mut err| {
err.append_recursion(name);
err
})?;
for dependency in dependencies {
if !recipes_set.contains(&dependency.name) {
recipes_set.insert(dependency.name.clone());
recipes.push(dependency);
}
}
}
if recurse_package_deps {
let dependencies = Self::new_recursive(
&recipe.recipe.package.dependencies,
recurse_build_deps,
recurse_dev_build_deps,
recurse_package_deps,
collect_build_deps,
collect_package_deps,
collect_package_deps,
recursion - 1,
)
.map_err(|mut err| {
err.append_recursion(name);
err
})?;
for dependency in dependencies {
if !recipes_set.contains(&dependency.name) {
recipes_set.insert(dependency.name.clone());
recipes.push(dependency);
}
}
}
if collect_self && !recipes_set.contains(&recipe.name) {
recipes_set.insert(recipe.name.clone());
recipes.push(recipe);
}
}
Ok(recipes)
}
pub fn get_build_deps_recursive(
names: &[PackageName],
include_dev: bool,
) -> Result<Vec<Self>, PackageError> {
let packages = Self::new_recursive(
names,
true,
include_dev,
false,
true,
false,
true,
WALK_DEPTH,
)?;
Ok(packages)
}
pub fn get_package_deps_recursive(
names: &[PackageName],
include_names: bool,
) -> Result<Vec<PackageName>, PackageError> {
// recurse_build_deps == true here as libraries (build deps) can have runtime files (package deps)
let packages = Self::new_recursive(
names,
true,
false,
true,
false,
true,
include_names,
WALK_DEPTH,
)?;
Ok(packages.into_iter().map(|p| p.name).collect())
}
pub fn get_all_deps_names_recursive(
names: &[PackageName],
include_dev: bool,
) -> Result<Vec<PackageName>, PackageError> {
let packages =
Self::new_recursive(names, true, include_dev, true, true, true, true, WALK_DEPTH)?;
Ok(packages.into_iter().map(|p| p.name).collect())
}
pub fn reload_recipe(&mut self) -> Result<(), PackageError> {
self.recipe = Self::from_path(&self.dir, true, self.name.is_host())?.recipe;
let _ = self.apply_filesystem_config(&self.rule.clone());
Ok(())
}
/// returns stage dir, pkgar file and toml file.
pub fn stage_paths(&self) -> (PathBuf, PathBuf, PathBuf) {
let r = self.name.suffix().map(|p| OptionalPackageRecipe {
name: p.to_string(),
..Default::default()
});
cook_package::package_stage_paths(r.as_ref(), &self.target_dir())
}
pub fn target_dir(&self) -> PathBuf {
self.dir.join("target").join(self.target)
}
pub fn apply_filesystem_config(&mut self, rule: &str) -> Result<(), anyhow::Error> {
match rule {
// build from source as usual
"source" => {}
// keep local changes
"local" => self.recipe.source = None,
// download from remote build
"binary" => {
self.recipe.source = None;
self.recipe.build.set_as_remote();
}
// don't build this recipe (unlikely to go here unless some deps need it)
// TODO: Note that we're assuming this being ignored from e.g. metapackages
// TODO: Will totally broke build if this recipe needed as some other build dependencies
"ignore" => {
self.recipe.source = None;
self.recipe.build.set_as_none();
}
rule => {
anyhow::bail!(
// Fail fast because we could risk losing local changes if "local" was typo'ed
"Invalid pkg config {} = \"{}\"\nExpecting either 'source', 'local', 'binary' or 'ignore'",
self.name.as_str(),
rule
);
}
}
self.rule = rule.to_string();
Ok(())
}
pub fn guess_version(&self) -> Option<String> {
let recipe = &self.recipe;
if recipe.build.kind == BuildKind::None {
return Some("".into()); // signifies a meta package
} else if let Some(v) = &recipe.package.version {
return Some(v.to_string());
}
let re = VersionExtractor::new();
let mut dir = self.dir.to_path_buf();
if let Some(r) = &recipe.source {
match r {
SourceRecipe::Tar {
tar,
blake3: _,
patches: _,
script: _,
} => {
if let Some(ver) = re.extract_ver(&tar) {
return Some(ver);
}
}
SourceRecipe::Git {
git: _,
upstream: _,
branch,
rev,
shallow_clone: _,
patches: _,
script: _,
} => {
if let Some(rev) = rev {
if let Some(ver) = re.extract_ver(&rev) {
return Some(ver);
}
}
if let Some(branch) = branch {
if let Some(ver) = re.extract_ver(&branch) {
return Some(ver);
}
}
}
SourceRecipe::SameAs { same_as } => {
dir = self.dir.join(same_as);
}
_ => {}
}
};
let cargo_path = dir.join("source/Cargo.toml");
if let Some(ver) = VersionExtractor::extract_cargo_ver(&cargo_path) {
return Some(ver);
}
None
}
}
// TODO: Wrap these vectors in a struct
pub fn recipes_mark_as_deps(names: &[PackageName], packages: &mut Vec<CookRecipe>) {
for package in packages.iter_mut() {
package.is_deps = !names.contains(&package.name);
}
}
pub fn recipes_flatten_package_names(packages: Vec<CookRecipe>) -> Vec<CookRecipe> {
let mut new_packages = Vec::new();
let mut packages_set = BTreeSet::new();
for mut package in packages {
let is_host = package.name.is_host();
let mut name = package.name.with_suffix(None);
if is_host {
name = name.with_host();
}
if !packages_set.contains(name.as_str()) {
packages_set.insert(name.to_string());
package.name = name;
new_packages.push(package);
}
}
new_packages
}
#[derive(Serialize, Deserialize)]
pub struct AutoDeps {
pub packages: BTreeSet<PackageName>,
}
pub struct VersionExtractor {
regex: Regex,
}
impl VersionExtractor {
pub fn new() -> Self {
Self {
regex: Regex::new(r"\d+(\.\d+){1,2}").unwrap(),
}
}
pub fn extract_ver(&self, text: &str) -> Option<String> {
if let Some(arm) = self.regex.captures(&text) {
return Some(arm.get(0)?.as_str().to_string());
}
None
}
fn extract_cargo_ver(path: &Path) -> Option<String> {
let content = std::fs::read_to_string(path).ok()?;
let manifest = content.parse::<toml::Table>().ok()?;
if let Some(version) = manifest
.get("package")
.and_then(|pkg| pkg.get("version"))
.and_then(|v| v.as_str())
{
return Some(version.to_string());
}
if let Some(version) = manifest
.get("workspace")
.and_then(|ws| ws.get("package"))
.and_then(|pkg| pkg.get("version"))
.and_then(|v| v.as_str())
{
return Some(version.to_string());
}
None
}
}
#[cfg(test)]
mod tests {
use pkg::PackageName;
#[test]
fn git_cargo_recipe() {
use crate::recipe::{BuildKind, BuildRecipe, Recipe, SourceRecipe};
let recipe: Recipe = toml::from_str(
r#"
[source]
git = "https://gitlab.redox-os.org/redox-os/acid.git"
branch = "master"
rev = "06344744d3d55a5ac9a62a6059cb363d40699bbc"
[build]
template = "cargo"
"#,
)
.unwrap();
assert_eq!(
recipe,
Recipe {
source: Some(SourceRecipe::Git {
git: "https://gitlab.redox-os.org/redox-os/acid.git".to_string(),
upstream: None,
branch: Some("master".to_string()),
rev: Some("06344744d3d55a5ac9a62a6059cb363d40699bbc".to_string()),
patches: Vec::new(),
script: None,
shallow_clone: None,
}),
build: BuildRecipe::new(BuildKind::Cargo {
cargopath: None,
cargoflags: Vec::new(),
cargopackages: Vec::new(),
cargoexamples: Vec::new(),
}),
..Default::default()
}
);
}
#[test]
fn tar_custom_recipe() {
use crate::recipe::{BuildKind, BuildRecipe, Recipe, SourceRecipe};
let recipe: Recipe = toml::from_str(
r#"
[source]
tar = "http://downloads.xiph.org/releases/ogg/libogg-1.3.3.tar.xz"
blake3 = "8220c0e4082fa26c07b10bfe31f641d2e33ebe1d1bb0b20221b7016bc8b78a3a"
[build]
template = "custom"
script = "make"
"#,
)
.unwrap();
assert_eq!(
recipe,
Recipe {
source: Some(SourceRecipe::Tar {
tar: "http://downloads.xiph.org/releases/ogg/libogg-1.3.3.tar.xz".to_string(),
blake3: Some(
"8220c0e4082fa26c07b10bfe31f641d2e33ebe1d1bb0b20221b7016bc8b78a3a"
.to_string()
),
patches: Vec::new(),
script: None,
}),
build: BuildRecipe::new(BuildKind::Custom {
script: "make".to_string()
}),
..Default::default()
}
);
}
#[test]
fn meta_recipe() {
use crate::recipe::{BuildKind, BuildRecipe, PackageRecipe, Recipe};
let recipe: Recipe = toml::from_str(
r#"
[package]
dependencies = [
"gcc13",
]
"#,
)
.unwrap();
assert_eq!(
recipe,
Recipe {
source: None,
build: BuildRecipe::new(BuildKind::None),
package: PackageRecipe {
dependencies: vec![PackageName::new("gcc13").unwrap()],
..Default::default()
},
..Default::default()
}
);
}
}
+162
View File
@@ -0,0 +1,162 @@
use std::borrow::Cow;
use std::collections::{BTreeMap, BTreeSet, HashMap};
use std::ffi::OsStr;
use std::path::{Path, PathBuf};
use std::sync::LazyLock;
use pkg::{Package, PackageError, PackageName};
// This file contains code that caches recipe paths.
// TODO: This file is previously resides in `pkg` crate,
// and can actually be merged with other logic in this cookbook.
static RECIPE_PATHS: LazyLock<HashMap<PackageName, PathBuf>> = LazyLock::new(|| {
let mut recipe_paths = HashMap::new();
let mut walker = ignore::WalkBuilder::new("recipes");
walker.follow_links(true);
for entry_res in walker.build() {
let Ok(entry) = entry_res else {
continue;
};
if entry.file_name() == OsStr::new("recipe.toml") {
let recipe_file = entry.path();
let Some(recipe_dir) = recipe_file.parent() else {
continue;
};
let Some(recipe_name) = recipe_dir
.file_name()
.and_then(|x| x.to_str()?.try_into().ok())
else {
continue;
};
if let Some(other_dir) = recipe_paths.insert(recipe_name, recipe_dir.to_path_buf()) {
eprintln!(
"recipe {:?} has two or more entries: {:?} replaced by {:?}",
recipe_dir.file_name(),
other_dir,
recipe_dir,
);
}
}
}
recipe_paths
});
pub fn find(recipe: &str) -> Option<&'static Path> {
RECIPE_PATHS.get(recipe).map(PathBuf::as_path)
}
pub fn list(prefix: impl AsRef<Path>) -> BTreeSet<PathBuf> {
let prefix = prefix.as_ref();
RECIPE_PATHS
.values()
.map(|path| prefix.join(path))
.collect()
}
pub fn new(name: &PackageName) -> Result<Package, PackageError> {
let dir = find(name.name()).ok_or_else(|| PackageError::PackageNotFound(name.clone()))?;
from_path(dir, name.suffix())
}
pub fn from_path(dir: &Path, feature: Option<&str>) -> Result<Package, PackageError> {
let target = redoxer::target();
let stage_name = match feature {
Some(f) => Cow::Owned(format!("stage.{f}.toml")),
None => Cow::Borrowed("stage.toml"),
};
let file = dir.join("target").join(target).join(stage_name.as_ref());
if !file.is_file() {
return Err(PackageError::FileMissing(file));
}
let toml = std::fs::read_to_string(&file)
.map_err(|err| PackageError::FileError(err.raw_os_error(), file.clone()))?;
toml::from_str(&toml).map_err(|err| PackageError::Parse(err, Some(file)))
}
pub fn new_recursive(
names: &[PackageName],
nonstop: bool,
recursion: usize,
) -> Result<Vec<Package>, PackageError> {
if names.len() == 0 {
return Ok(vec![]);
}
let (list, map) = new_recursive_nonstop(names, recursion);
if nonstop && list.len() > 0 {
Ok(list)
} else if !nonstop && map.len() == list.len() {
Ok(list)
} else {
let (_, res) = map.into_iter().find(|(_, v)| v.is_err()).unwrap();
Err(res.err().unwrap())
}
}
/// List ordered success packages and map of failed packages.
/// A package can be both success and failed if dependencies aren't satistied.
pub fn new_recursive_nonstop(
names: &[PackageName],
recursion: usize,
) -> (
Vec<Package>,
BTreeMap<PackageName, Result<(), PackageError>>,
) {
let mut packages = Vec::new();
let mut packages_map = BTreeMap::new();
for name in names {
if packages_map.contains_key(name) {
continue;
}
let package = if recursion == 0 {
Err(PackageError::Recursion(Default::default()))
} else {
new(name)
};
match package {
Ok(package) => {
let mut has_invalid_dependency = false;
let (dependencies, dependencies_map) =
new_recursive_nonstop(&package.depends, recursion - 1);
for dependency in dependencies {
if !packages_map.contains_key(&dependency.name) {
packages_map.insert(dependency.name.clone(), Ok(()));
packages.push(dependency);
}
}
for (dep_name, result) in dependencies_map {
if let Err(mut e) = result {
if !packages_map.contains_key(&dep_name) {
e.append_recursion(name);
packages_map.insert(dep_name, Err(e));
}
has_invalid_dependency = true;
}
}
// TODO: this check is redundant
if !packages_map.contains_key(name) {
packages_map.insert(
name.clone(),
if has_invalid_dependency {
Err(PackageError::DependencyInvalid(name.clone()))
} else {
Ok(())
},
);
packages.push(package);
}
}
Err(e) => {
packages_map.insert(name.clone(), Err(e));
}
}
}
(packages, packages_map)
}
+131
View File
@@ -0,0 +1,131 @@
use std::{
collections::{BTreeMap, BTreeSet, HashMap},
env, fs,
path::{Path, PathBuf},
};
use pkg::{Package, PackageName};
use crate::{
recipe::CookRecipe,
staged_pkg,
web::html::{generate_html_index, generate_html_pkg},
};
pub mod html;
#[derive(Clone)]
pub struct CliWebConfig {
/// path relative to cwd dir to generate web files
out_dir: PathBuf,
/// absolute url to repo (not the web) instead of "/repo"
repo_url: String,
/// this repository build url
this_repo: String,
}
impl CliWebConfig {
pub fn parse_args() -> Option<CliWebConfig> {
if env::var("COOKBOOK_WEB").ok().as_deref() != Some("true") {
return None;
}
let Ok(pwd) = env::current_dir() else {
return None;
};
Some(CliWebConfig {
repo_url: env::var("COOKBOOK_WEB_REPO_URL")
.ok()
.unwrap_or("/repo".to_string()),
out_dir: pwd.join(
env::var("COOKBOOK_WEB_OUT_DIR")
.ok()
.unwrap_or("web".to_string()),
),
// TODO: Hardcoded URL, maybe get this remote-url next time
this_repo: "https://gitlab.redox-os.org/redox-os/redox".to_string(),
})
}
}
const CSS: &str = include_str!("./web/style.css");
pub fn generate_web(all_packages: &Vec<String>, config: &CliWebConfig) {
let repo_path = &config.out_dir.join(redoxer::target());
if !repo_path.is_dir() {
fs::create_dir_all(repo_path).unwrap();
}
let mut valid_packages = Vec::new();
let mut dependents_map: HashMap<String, BTreeSet<String>> = HashMap::new();
for package_name in all_packages {
let Ok(package_name) = PackageName::new(package_name) else {
continue;
};
let Some(recipe_path) = staged_pkg::find(package_name.name()) else {
continue;
};
let Ok(mut package) = staged_pkg::from_path(&recipe_path, package_name.suffix()) else {
// TODO: report failed build
continue;
};
let Ok(mut recipe) = CookRecipe::from_path(&recipe_path, true, false) else {
continue;
};
for dep in &package.depends {
dependents_map
.entry(dep.to_string())
.or_default()
.insert(package.name.to_string());
}
// TODO: temporary bug fix in the suffix lost
package.name = package_name.clone();
// CookRecipe::from_path always have no suffix
recipe.name = package_name;
valid_packages.push((package, recipe));
}
for (package, recipe) in &valid_packages {
let dependents = dependents_map
.get(package.name.as_str())
.cloned()
.unwrap_or_default();
let stage_files_path = recipe.stage_paths().0.with_added_extension("files");
let stage_files = fs::read_to_string(stage_files_path).ok();
let html_path = repo_path.join(format!("{}.html", package.name.as_str()));
generate_html_pkg(
&package,
&recipe,
&dependents.into_iter().collect(),
&stage_files,
&html_path,
&config,
);
}
let mut grouped_packages: BTreeMap<String, Vec<&(Package, CookRecipe)>> = BTreeMap::new();
for item in &valid_packages {
let category = get_category(&item.1.dir);
grouped_packages.entry(category).or_default().push(item);
}
let index_path = repo_path.join("index.html");
let style_path = repo_path.join("style.css");
generate_html_index(grouped_packages, &index_path, &config);
fs::write(style_path, CSS).expect("Failed to write CSS file");
}
pub(crate) fn get_category(dir: &Path) -> String {
let Some(category) = dir.parent().map(|p| p.display().to_string()) else {
return "uncategorized".to_string();
};
category["recipes/".len()..].to_string()
}
+329
View File
@@ -0,0 +1,329 @@
use crate::cook::ident;
use crate::recipe::SourceRecipe;
use crate::web::get_category;
use crate::{cook::tree::format_size, recipe::CookRecipe};
use pkg::Package;
use std::collections::BTreeMap;
use std::{fs, path::Path};
pub fn generate_html_pkg(
package: &Package,
recipe: &CookRecipe,
dependents: &Vec<String>,
stage_files: &Option<String>,
html_path: &Path,
config: &crate::web::CliWebConfig,
) {
let name = &package.name;
let version = &package.version;
let target = &package.target;
let category = &get_category(&recipe.dir);
let description = recipe
.recipe
.package
.description
.as_ref()
.map(|p| p.as_str())
.unwrap_or("-");
let desc_html = recipe
.recipe
.package
.description
.as_ref()
.map(|desc| format!(r#"<p class="description">{}</p>"#, desc))
.unwrap_or_default();
let repo_url = &config.repo_url;
let deps_html = if package.depends.is_empty() {
String::from("<p>None</p>")
} else {
let items: Vec<String> = package
.depends
.iter()
.map(|dep| format!(r#"<li><a href="{dep}.html">{dep}</a></li>"#))
.collect();
format!("<ul>\n{}\n</ul>", items.join("\n"))
};
let dependents_html = if dependents.is_empty() {
String::from("<p>None</p>")
} else {
let items: Vec<String> = dependents
.iter()
.map(|dep| format!(r#"<li><a href="{dep}.html">{dep}</a></li>"#))
.collect();
format!("<ul>\n{}\n</ul>", items.join("\n"))
};
let mut source_html = match &recipe.recipe.source {
Some(SourceRecipe::Git { git, .. }) => {
let host = get_hostname(git);
let tree_link = get_tree_url(git, host, &package.source_identifier, None);
let short_commit = get_short_commit(&package.source_identifier);
format!(
r#"
<table>
<tr><th>Git:</th><td><a href="{git}" target="_blank">{host}</a></td></tr>
<tr><th>Commit:</th><td><a href="{tree_link}" target="_blank">{short_commit}</a></td></tr>
</table>"#
)
}
Some(SourceRecipe::Tar { tar, .. }) => {
let host = get_hostname(tar);
format!(
r#"<table>
<tr><th>Tarball:</th><td><a href="{tar}" target="_blank">{host}</a></td></tr>
</table>"#
)
}
Some(SourceRecipe::SameAs { same_as }) => {
let r = Path::new(same_as).file_name().unwrap().to_string_lossy();
format!(
r#"<table>
<tr><th>Same as:</th><td><a href="{r}.html">{r}</a></td></tr>
</table>"#
)
}
_ => String::from(r#"<p>No source specified.</p>"#),
};
let (files_html, files_count) = if let Some(stage_files) = stage_files {
let count = stage_files
.split('\n')
.filter(|p| !p.ends_with('/') && !p.is_empty())
.count();
(format!("<pre>{stage_files}</pre>"), format!("{}", count))
} else {
(
String::from(r#"<p>No package files defined.</p>"#),
String::from("?"),
)
};
{
let host = get_hostname(&config.this_repo);
let tree_link = get_tree_url(
&config.this_repo,
host,
&package.commit_identifier,
Some(&format!("recipes/{category}/{}/recipe.toml", name.name())),
);
let short_commit = get_short_commit(&package.commit_identifier);
source_html += &format!(
r#"
<table>
<tr><th>Build script:</th><td><a href="{tree_link}" target="_blank">{short_commit}</a></td></tr>
</table>
"#
);
}
let (arch, os) = {
let target_split: Vec<&str> = package.target.split('-').collect();
(
target_split
.get(0)
.map(|s| s.to_string())
.unwrap_or("-".into()),
target_split
.get(2)
.map(|s| s.to_string())
.unwrap_or("-".into()),
)
};
let html = format!(
r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{name} - Red Bear OS Package</title>
<link rel="stylesheet" href="style.css">
</head>
<body>
<header class="pkg-header">
<div class="container">
<a href="index.html" class="back-link">&larr; Back to packages</a>
<h1>{name} <span class="version">{version}</span></h1>
{desc_html}
<p class="description">{description}</p>
<div class="install-action">
<span class="prompt">$</span>
<code>pkg install {name}</code>
</div>
</div>
</header>
<main class="pkg-content container">
<div class="pkg-main">
<section class="pkg-deps card">
<h2>Dependencies</h2>
{deps_html}
</section>
<section class="pkg-dependents card">
<h2>Dependents</h2>
{dependents_html}
</section>
<section class="pkg-recipe card">
<h2>Package Files</h2>
{files_html}
</section>
</div>
<section class="pkg-meta card">
<table>
<tr><th></th><td><a href="{repo_url}/{target}/{name}.pkgar" target="_blank">Download</a></td></tr>
</table>
<h2>Package Info</h2>
<table>
<tr><th>OS</th><td>{os}</td></tr>
<tr><th>Architecture</th><td>{arch}</td></tr>
<tr><th>Category</th><td><a href="index.html#cat-{category}">{category}</a></td></tr>
<tr><th>Network Size</th><td>{network_size}</td></tr>
<tr><th>Storage Size</th><td>{storage_size}</td></tr>
<tr><th>File count</th><td>{files_count}</td></tr>
<tr><th>Published</th><td title="{published}">{published_short}</td></tr>
<tr><th>Hash</th><td><code class="hash meta-box">{blake3}</code></td></tr>
</table>
<h2>Package Source</h2>
{source_html}
<div style="height:100px"></div>
</section>
</main>
</body>
</html>"#,
network_size = format_size(package.network_size),
storage_size = format_size(package.storage_size),
published_short = &package.time_identifier[0..10],
published = package.time_identifier,
blake3 = package.blake3,
);
fs::write(html_path, html).expect("Failed to write package HTML file");
}
pub fn generate_html_index(
grouped_packages: BTreeMap<String, Vec<&(Package, CookRecipe)>>,
index_path: &Path,
config: &crate::web::CliWebConfig,
) {
let mut categories_html = Vec::new();
for (category, pkgs) in grouped_packages {
let cards_html: Vec<String> = pkgs
.iter()
.map(|(pkg, _recipe)| {
let name = &pkg.name;
format!(
r#"
<div class="package-card">
<h3 class="pkg-name"><a href="{name}.html">{name}</a></h3>
<div class="pkg-stats">
<span class="pkg-version">{version}</span>
<span class="pkg-size">{size}</span>
</div>
</div>"#,
name = name,
version = pkg.version,
size = format_size(pkg.network_size)
)
})
.collect();
let category_block = format!(
r#"
<section class="category-section">
<h2 class="category-title" id="cat-{category}">{category}</h2>
<div class="package-grid">
{cards}
</div>
</section>"#,
category = category,
cards = cards_html.join("\n")
);
categories_html.push(category_block);
}
let html = format!(
r#"<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Red Bear OS Package Repository</title>
<link rel="stylesheet" href="style.css">
</head>
<body>
<header class="index-header">
<h1>Red Bear OS Package Repository</h1>
<p class="description">Repository for <code>{target}</code></p>
</header>
<main class="index-content container">
{category_sections}
<footer>
<p>Generated at <code>{commit_time}</code> with build tree <a href="{commit_tree}" target="_blank">{commit_hash}</a></p>
<div style="height:100px"></div>
</footer>
</main>
</body>
</html>"#,
target = redoxer::target(),
category_sections = categories_html.join("\n\n"),
commit_time = &ident::get_ident().time,
commit_hash = get_short_commit(&ident::get_ident().commit),
commit_tree = get_tree_url(
&config.this_repo,
get_hostname(&config.this_repo),
&ident::get_ident().commit,
None
),
);
fs::write(index_path, html).expect("Failed to write index HTML file");
}
fn get_hostname(url: &str) -> &str {
url.split("://")
.nth(1)
.unwrap_or(url)
.split('/')
.next()
.unwrap_or(url)
.split(':')
.next()
.unwrap_or(url)
}
pub fn get_tree_url(git_url: &str, host: &str, commit: &str, folder: Option<&str>) -> String {
let mut base_url = git_url.trim_end_matches(".git").to_string();
if let Some(ssh_path) = base_url.strip_prefix("git@") {
// "git@github.com:user/repo" -> "https://github.com/user/repo"
base_url = format!("https://{}", ssh_path.replace(':', "/"));
} else if base_url.starts_with("git://") {
// "git://github.com/user/repo" -> "https://github.com/user/repo"
base_url = base_url.replacen("git://", "https://", 1);
}
let base_url = if host == "github.com" {
format!("{}/tree/{}", base_url, commit)
} else if host.contains("gitlab") {
format!("{}/-/tree/{}", base_url, commit)
} else {
return format!("{}?commit={}", base_url, commit);
};
match folder {
Some(f) => format!("{base_url}/{f}"),
None => base_url,
}
}
fn get_short_commit(commit: &str) -> &str {
commit.get(0..7).unwrap_or("?")
}
+292
View File
@@ -0,0 +1,292 @@
* {
box-sizing: border-box;
margin: 0;
padding: 0;
}
body {
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif;
background-color: #eee;
color: #222;
line-height: 1.6;
}
.container {
max-width: 1280px;
margin: 0 auto;
padding: 0 20px;
}
.category-section {
margin-bottom: 50px;
}
.category-title {
font-size: 1.5rem;
color: #222;
border-bottom: 2px solid #eee;
padding-bottom: 10px;
margin-bottom: 20px;
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace;
}
.package-grid {
display: block;
display: flex;
flex-wrap: wrap;
margin: -10px;
}
.package-card {
background-color: #fff;
border: 1px solid #eee;
border-radius: 6px;
padding: 15px;
margin: 10px;
display: inline-block;
width: 30%;
vertical-align: top;
display: flex;
flex: 0 1 280px;
flex-direction: column;
justify-content: space-between;
}
.package-card .pkg-name {
margin-bottom: 15px;
font-size: 1.25rem;
}
.package-card .pkg-name a {
border: none;
}
.package-card .pkg-name a:hover {
text-decoration: underline;
}
.package-card .pkg-stats {
display: block;
display: flex;
justify-content: space-between;
align-items: center;
color: #6a737d;
font-size: 0.9rem;
border-top: 1px solid #eee;
padding-top: 10px;
}
.package-card .pkg-version {
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, monospace;
background-color: #fff;
padding: 3px 6px;
border-radius: 4px;
color: #222;
}
.package-card .pkg-size {
font-weight: 500;
}
a {
color: #222;
text-decoration: none;
border-bottom: 1px solid #eee;
}
a:hover {
color: #000000;
border-bottom: 1px solid #222;
}
h1, h2, h3 {
font-weight: 600;
margin: 1rem 0;
}
code {
font-family: ui-monospace, SFMono-Regular, "SF Mono", Menlo, Consolas, "Liberation Mono", monospace;
background-color: #fff;
padding: 0.2em 0.4em;
border-radius: 3px;
font-size: 0.9em;
}
.card {
background-color: #ffffff;
border: 1px solid #eee;
border-radius: 6px;
padding: 20px;
margin-bottom: 20px;
}
.pkg-header, .index-header {
background-color: #ffffff;
border-bottom: 1px solid #ddd;
padding: 40px 0;
margin-bottom: 40px;
text-align: center;
}
.pkg-header h1 {
font-size: 2.5rem;
margin-bottom: 0.5rem;
}
.pkg-header .version {
color: #6a737d;
font-size: 1.5rem;
font-weight: 400;
}
.pkg-header .description {
font-size: 1.2rem;
color: #586069;
max-width: 600px;
margin: 0 auto 1.5rem auto;
}
.back-link {
display: inline-block;
margin-bottom: 20px;
color: #6a737d;
border: none;
font-size: 0.9rem;
}
.back-link:hover {
color: #222;
border: none;
}
.install-action {
display: inline-block;
background-color: #fff;
border: 1px solid #ddd;
border-radius: 6px;
padding: 12px 20px;
font-family: ui-monospace, SFMono-Regular, monospace;
font-size: 1.1rem;
color: #222;
}
.install-action .prompt {
color: #6a737d;
margin-right: 12px;
}
.install-action code {
background-color: transparent;
padding: 0;
font-size: 1.1rem;
user-select: all;
}
.pkg-content {
display: flex;
flex-wrap: wrap;
justify-content: space-between;
}
.pkg-main, .pkg-meta {
width: 100%;
}
@media (min-width: 768px) {
.pkg-main {
width: 60%;
}
.pkg-meta {
width: 35%;
}
}
.meta-box {
overflow-x: auto;
display: block;
max-width: 150px;
user-select: all;
padding: 8px;
white-space: nowrap;
}
table {
width: 100%;
border-collapse: collapse;
}
th, td {
padding: 10px 0;
text-align: left;
border-bottom: 1px solid #eee;
}
th {
color: #6a737d;
font-weight: 500;
}
.pkg-meta table th {
width: 40%;
padding-right: 10px;
}
.pkg-deps ul, .pkg-dependents ul {
list-style-type: none;
display: flex;
flex-wrap: wrap;
}
.pkg-deps li, .pkg-dependents li {
padding: 8px 0;
border-bottom: 1px solid #eee;
width: 50%;
}
@media (prefers-color-scheme: dark) {
body {
background-color: #000;
color: #ccc;
}
.package-card, .card, .pkg-header, .index-header {
background-color: #111;
border-color: #333;
}
.category-title {
color: #f0f6fc;
border-bottom-color: #333;
}
.package-card .pkg-stats {
color: #8b949e;
border-top-color: #333;
}
.package-card .pkg-version, code, .install-action {
background-color: #222;
color: #cdd;
border-color: #333;
}
a, .pkg-header h1, .back-link:hover {
color: #5af;
border-bottom-color: #333;
}
a:hover {
color: #7cf;
border-bottom-color: #7cf;
}
.pkg-header .version, .pkg-header .description, .back-link, .install-action .prompt, th {
color: #999;
}
th, td, .pkg-deps li, .pkg-dependents li {
border-bottom-color: #333;
}
}