feat: add Cub package backend modules

Ultraworked with [Sisyphus](https://github.com/code-yeongyu/oh-my-openagent)

Co-authored-by: Sisyphus <clio-agent@sisyphuslabs.ai>
This commit is contained in:
2026-05-07 20:57:11 +01:00
parent 1f98fc18a8
commit 900fefc46e
3 changed files with 1941 additions and 0 deletions
@@ -0,0 +1,773 @@
use std::collections::BTreeMap;
use crate::error::CubError;
const DEFAULT_AUR_BASE_URL: &str = "https://aur.archlinux.org";
#[derive(Debug, Clone, PartialEq)]
pub struct AurPackage {
pub name: String,
pub version: String,
pub description: String,
pub url: String,
pub license: Vec<String>,
pub depends: Vec<String>,
pub makedepends: Vec<String>,
pub optdepends: Vec<String>,
pub provides: Vec<String>,
pub conflicts: Vec<String>,
pub num_votes: u64,
pub popularity: f64,
pub last_modified: i64,
pub out_of_date: Option<bool>,
}
#[cfg(feature = "full")]
pub struct AurClient {
pub base_url: String,
client: reqwest::blocking::Client,
}
#[cfg(not(feature = "full"))]
pub struct AurClient {
pub base_url: String,
}
impl AurClient {
pub fn new() -> Self {
#[cfg(feature = "full")]
{
Self {
base_url: DEFAULT_AUR_BASE_URL.to_string(),
client: reqwest::blocking::Client::new(),
}
}
#[cfg(not(feature = "full"))]
{
Self {
base_url: DEFAULT_AUR_BASE_URL.to_string(),
}
}
}
pub fn search(&self, query: &str, by: Option<&str>) -> Result<Vec<AurPackage>, CubError> {
#[cfg(feature = "full")]
{
let trimmed_query = query.trim();
if trimmed_query.is_empty() {
return Err(aur_error("search query cannot be empty"));
}
let mut params = vec![("v", "5"), ("type", "search"), ("arg", trimmed_query)];
if let Some(field) = by.and_then(non_empty_trimmed) {
params.push(("by", field));
}
let url = self.rpc_url(&params)?;
self.fetch(
url,
format!("no results found for search query '{trimmed_query}'"),
)
}
#[cfg(not(feature = "full"))]
{
let _ = (query, by);
Err(feature_not_enabled_error())
}
}
pub fn info(&self, pkgs: &[&str]) -> Result<Vec<AurPackage>, CubError> {
#[cfg(feature = "full")]
{
if pkgs.is_empty() {
return Err(aur_error("info request requires at least one package"));
}
let mut url = self.rpc_url(&[("v", "5"), ("type", "info")])?;
let mut appended = 0usize;
{
let mut pairs = url.query_pairs_mut();
for pkg in pkgs.iter().copied().filter_map(non_empty_trimmed) {
pairs.append_pair("arg[]", pkg);
appended += 1;
}
}
if appended == 0 {
return Err(aur_error("info request requires at least one package"));
}
self.fetch(url, "no packages found for info request".to_string())
}
#[cfg(not(feature = "full"))]
{
let _ = pkgs;
Err(feature_not_enabled_error())
}
}
#[cfg(feature = "full")]
fn rpc_url(&self, params: &[(&str, &str)]) -> Result<reqwest::Url, CubError> {
let mut url = reqwest::Url::parse(&self.base_url)
.map_err(|err| aur_error(format!("invalid base URL '{}': {err}", self.base_url)))?;
url.set_path("rpc");
{
let mut pairs = url.query_pairs_mut();
for (key, value) in params {
pairs.append_pair(key, value);
}
}
Ok(url)
}
#[cfg(feature = "full")]
fn fetch(&self, url: reqwest::Url, empty_message: String) -> Result<Vec<AurPackage>, CubError> {
let response = self
.client
.get(url)
.send()
.map_err(|err| aur_error(format!("request failed: {err}")))?
.error_for_status()
.map_err(|err| aur_error(format!("request failed: {err}")))?;
let body = response
.text()
.map_err(|err| aur_error(format!("failed to read response body: {err}")))?;
parse_rpc_response(&body, &empty_message)
}
}
fn non_empty_trimmed(value: &str) -> Option<&str> {
let trimmed = value.trim();
if trimmed.is_empty() {
None
} else {
Some(trimmed)
}
}
fn aur_error(message: impl Into<String>) -> CubError {
CubError::Conversion(format!("AUR: {}", message.into()))
}
#[cfg(not(feature = "full"))]
fn feature_not_enabled_error() -> CubError {
CubError::Conversion("reqwest feature not enabled".into())
}
fn parse_rpc_response(body: &str, empty_message: &str) -> Result<Vec<AurPackage>, CubError> {
let root = JsonParser::new(body)
.parse()
.map_err(|err| aur_error(format!("failed to parse JSON response: {err}")))?;
let object = match root {
JsonValue::Object(object) => object,
_ => {
return Err(aur_error(
"invalid JSON response: expected top-level object",
))
}
};
let version = require_i64(&object, "version")?;
if version != 5 {
return Err(aur_error(format!("unexpected RPC version: {version}")));
}
let response_type = require_string(&object, "type")?;
if response_type == "error" {
let message =
optional_string(&object, "error")?.unwrap_or_else(|| "unknown AUR error".to_string());
return Err(aur_error(message));
}
if response_type != "search" && response_type != "multiinfo" && response_type != "info" {
return Err(aur_error(format!(
"unexpected RPC response type: {response_type}"
)));
}
let resultcount = require_i64(&object, "resultcount")?;
let results = require_array(&object, "results")?;
if resultcount <= 0 || results.is_empty() {
return Err(aur_error(empty_message.to_string()));
}
let mut packages = Vec::with_capacity(results.len());
for value in results {
packages.push(AurPackage::from_json_value(value)?);
}
if packages.is_empty() {
return Err(aur_error(empty_message.to_string()));
}
Ok(packages)
}
impl AurPackage {
fn from_json_value(value: &JsonValue) -> Result<Self, CubError> {
let object = match value {
JsonValue::Object(object) => object,
_ => return Err(aur_error("invalid package entry: expected object")),
};
Ok(Self {
name: require_string(object, "Name")?,
version: require_string(object, "Version")?,
description: optional_string(object, "Description")?.unwrap_or_default(),
url: optional_string(object, "URL")?.unwrap_or_default(),
license: optional_string_list(object, "License")?,
depends: optional_string_list(object, "Depends")?,
makedepends: optional_string_list(object, "MakeDepends")?,
optdepends: optional_string_list(object, "OptDepends")?,
provides: optional_string_list(object, "Provides")?,
conflicts: optional_string_list(object, "Conflicts")?,
num_votes: optional_u64(object, "NumVotes")?.unwrap_or(0),
popularity: optional_f64(object, "Popularity")?.unwrap_or(0.0),
last_modified: optional_i64(object, "LastModified")?.unwrap_or(0),
out_of_date: optional_out_of_date(object, "OutOfDate")?,
})
}
}
fn require_string(object: &BTreeMap<String, JsonValue>, key: &str) -> Result<String, CubError> {
optional_string(object, key)?.ok_or_else(|| aur_error(format!("missing field '{key}'")))
}
fn optional_string(
object: &BTreeMap<String, JsonValue>,
key: &str,
) -> Result<Option<String>, CubError> {
match object.get(key) {
None | Some(JsonValue::Null) => Ok(None),
Some(JsonValue::String(value)) => Ok(Some(value.clone())),
Some(_) => Err(aur_error(format!("invalid field '{key}': expected string"))),
}
}
fn require_array<'a>(
object: &'a BTreeMap<String, JsonValue>,
key: &str,
) -> Result<&'a [JsonValue], CubError> {
match object.get(key) {
Some(JsonValue::Array(values)) => Ok(values.as_slice()),
Some(_) => Err(aur_error(format!("invalid field '{key}': expected array"))),
None => Err(aur_error(format!("missing field '{key}'"))),
}
}
fn optional_string_list(
object: &BTreeMap<String, JsonValue>,
key: &str,
) -> Result<Vec<String>, CubError> {
match object.get(key) {
None | Some(JsonValue::Null) => Ok(Vec::new()),
Some(JsonValue::String(value)) => Ok(vec![value.clone()]),
Some(JsonValue::Array(values)) => {
let mut items = Vec::with_capacity(values.len());
for value in values {
match value {
JsonValue::String(item) => items.push(item.clone()),
JsonValue::Null => {}
_ => {
return Err(aur_error(format!(
"invalid field '{key}': expected string array"
)))
}
}
}
Ok(items)
}
Some(_) => Err(aur_error(format!(
"invalid field '{key}': expected string array"
))),
}
}
fn require_i64(object: &BTreeMap<String, JsonValue>, key: &str) -> Result<i64, CubError> {
optional_i64(object, key)?.ok_or_else(|| aur_error(format!("missing field '{key}'")))
}
fn optional_i64(object: &BTreeMap<String, JsonValue>, key: &str) -> Result<Option<i64>, CubError> {
match object.get(key) {
None | Some(JsonValue::Null) => Ok(None),
Some(JsonValue::Integer(value)) => Ok(Some(*value)),
Some(_) => Err(aur_error(format!(
"invalid field '{key}': expected integer"
))),
}
}
fn optional_u64(object: &BTreeMap<String, JsonValue>, key: &str) -> Result<Option<u64>, CubError> {
match object.get(key) {
None | Some(JsonValue::Null) => Ok(None),
Some(JsonValue::Integer(value)) if *value >= 0 => Ok(Some(*value as u64)),
Some(_) => Err(aur_error(format!(
"invalid field '{key}': expected unsigned integer"
))),
}
}
fn optional_f64(object: &BTreeMap<String, JsonValue>, key: &str) -> Result<Option<f64>, CubError> {
match object.get(key) {
None | Some(JsonValue::Null) => Ok(None),
Some(JsonValue::Integer(value)) => Ok(Some(*value as f64)),
Some(JsonValue::Float(value)) => Ok(Some(*value)),
Some(_) => Err(aur_error(format!("invalid field '{key}': expected number"))),
}
}
fn optional_out_of_date(
object: &BTreeMap<String, JsonValue>,
key: &str,
) -> Result<Option<bool>, CubError> {
match object.get(key) {
None | Some(JsonValue::Null) => Ok(None),
Some(JsonValue::Bool(value)) => Ok(Some(*value)),
Some(JsonValue::Integer(value)) => Ok(Some(*value != 0)),
Some(JsonValue::Float(value)) => Ok(Some(*value != 0.0)),
Some(_) => Err(aur_error(format!(
"invalid field '{key}': expected bool or number"
))),
}
}
#[derive(Debug, Clone, PartialEq)]
enum JsonValue {
Null,
Bool(bool),
Integer(i64),
Float(f64),
String(String),
Array(Vec<JsonValue>),
Object(BTreeMap<String, JsonValue>),
}
struct JsonParser<'a> {
input: &'a [u8],
position: usize,
}
impl<'a> JsonParser<'a> {
fn new(input: &'a str) -> Self {
Self {
input: input.as_bytes(),
position: 0,
}
}
fn parse(mut self) -> Result<JsonValue, String> {
let value = self.parse_value()?;
self.skip_whitespace();
if self.position != self.input.len() {
return Err(format!(
"unexpected trailing content at byte {}",
self.position
));
}
Ok(value)
}
fn parse_value(&mut self) -> Result<JsonValue, String> {
self.skip_whitespace();
match self.peek_byte() {
Some(b'{') => self.parse_object(),
Some(b'[') => self.parse_array(),
Some(b'"') => self.parse_string().map(JsonValue::String),
Some(b't') => self.parse_true(),
Some(b'f') => self.parse_false(),
Some(b'n') => self.parse_null(),
Some(b'-' | b'0'..=b'9') => self.parse_number(),
Some(byte) => Err(format!(
"unexpected byte '{}' at {}",
byte as char, self.position
)),
None => Err("unexpected end of input".to_string()),
}
}
fn parse_object(&mut self) -> Result<JsonValue, String> {
self.expect_byte(b'{')?;
self.skip_whitespace();
let mut object = BTreeMap::new();
if self.peek_byte() == Some(b'}') {
self.position += 1;
return Ok(JsonValue::Object(object));
}
loop {
self.skip_whitespace();
let key = self.parse_string()?;
self.skip_whitespace();
self.expect_byte(b':')?;
let value = self.parse_value()?;
object.insert(key, value);
self.skip_whitespace();
match self.peek_byte() {
Some(b',') => {
self.position += 1;
}
Some(b'}') => {
self.position += 1;
break;
}
Some(byte) => {
return Err(format!(
"unexpected byte '{}' in object at {}",
byte as char, self.position
))
}
None => return Err("unexpected end of input while parsing object".to_string()),
}
}
Ok(JsonValue::Object(object))
}
fn parse_array(&mut self) -> Result<JsonValue, String> {
self.expect_byte(b'[')?;
self.skip_whitespace();
let mut values = Vec::new();
if self.peek_byte() == Some(b']') {
self.position += 1;
return Ok(JsonValue::Array(values));
}
loop {
values.push(self.parse_value()?);
self.skip_whitespace();
match self.peek_byte() {
Some(b',') => {
self.position += 1;
}
Some(b']') => {
self.position += 1;
break;
}
Some(byte) => {
return Err(format!(
"unexpected byte '{}' in array at {}",
byte as char, self.position
))
}
None => return Err("unexpected end of input while parsing array".to_string()),
}
}
Ok(JsonValue::Array(values))
}
fn parse_string(&mut self) -> Result<String, String> {
self.expect_byte(b'"')?;
let mut value = String::new();
loop {
match self.peek_byte() {
Some(b'"') => {
self.position += 1;
return Ok(value);
}
Some(b'\\') => {
self.position += 1;
value.push(self.parse_escape_sequence()?);
}
Some(0x00..=0x1F) => {
return Err(format!(
"invalid control character in string at {}",
self.position
))
}
Some(_) => value.push_str(&self.parse_raw_string_segment()?),
None => break,
}
}
Err("unexpected end of input while parsing string".to_string())
}
fn parse_raw_string_segment(&mut self) -> Result<String, String> {
let start = self.position;
while let Some(byte) = self.peek_byte() {
if matches!(byte, b'"' | b'\\' | 0x00..=0x1F) {
break;
}
self.position += 1;
}
let segment = std::str::from_utf8(&self.input[start..self.position])
.map_err(|err| format!("invalid UTF-8 in string: {err}"))?;
Ok(segment.to_string())
}
fn parse_escape_sequence(&mut self) -> Result<char, String> {
match self.next_byte() {
Some(b'"') => Ok('"'),
Some(b'\\') => Ok('\\'),
Some(b'/') => Ok('/'),
Some(b'b') => Ok('\u{0008}'),
Some(b'f') => Ok('\u{000C}'),
Some(b'n') => Ok('\n'),
Some(b'r') => Ok('\r'),
Some(b't') => Ok('\t'),
Some(b'u') => self.parse_unicode_escape(),
Some(byte) => Err(format!(
"invalid escape byte '{}' at {}",
byte as char,
self.position.saturating_sub(1)
)),
None => Err("unexpected end of input while parsing escape sequence".to_string()),
}
}
fn parse_unicode_escape(&mut self) -> Result<char, String> {
let start = self.position;
let mut value = 0u32;
for _ in 0..4 {
let byte = self.next_byte().ok_or_else(|| {
"unexpected end of input while parsing unicode escape".to_string()
})?;
value = (value << 4)
| match byte {
b'0'..=b'9' => (byte - b'0') as u32,
b'a'..=b'f' => (byte - b'a' + 10) as u32,
b'A'..=b'F' => (byte - b'A' + 10) as u32,
_ => {
return Err(format!(
"invalid unicode escape at byte {}",
start.saturating_sub(2)
))
}
};
}
char::from_u32(value).ok_or_else(|| format!("invalid unicode scalar value: {value:#X}"))
}
fn parse_true(&mut self) -> Result<JsonValue, String> {
self.expect_keyword(b"true")?;
Ok(JsonValue::Bool(true))
}
fn parse_false(&mut self) -> Result<JsonValue, String> {
self.expect_keyword(b"false")?;
Ok(JsonValue::Bool(false))
}
fn parse_null(&mut self) -> Result<JsonValue, String> {
self.expect_keyword(b"null")?;
Ok(JsonValue::Null)
}
fn parse_number(&mut self) -> Result<JsonValue, String> {
let start = self.position;
if self.peek_byte() == Some(b'-') {
self.position += 1;
}
self.parse_digits()?;
if self.peek_byte() == Some(b'.') {
self.position += 1;
self.parse_digits()?;
}
if matches!(self.peek_byte(), Some(b'e' | b'E')) {
self.position += 1;
if matches!(self.peek_byte(), Some(b'+' | b'-')) {
self.position += 1;
}
self.parse_digits()?;
}
let slice = std::str::from_utf8(&self.input[start..self.position])
.map_err(|err| format!("invalid UTF-8 in number: {err}"))?;
if slice
.as_bytes()
.iter()
.any(|byte| matches!(byte, b'.' | b'e' | b'E'))
{
slice
.parse::<f64>()
.map(JsonValue::Float)
.map_err(|err| format!("invalid number '{slice}': {err}"))
} else {
slice
.parse::<i64>()
.map(JsonValue::Integer)
.map_err(|err| format!("invalid number '{slice}': {err}"))
}
}
fn parse_digits(&mut self) -> Result<(), String> {
let start = self.position;
while matches!(self.peek_byte(), Some(b'0'..=b'9')) {
self.position += 1;
}
if self.position == start {
Err(format!("expected digit at byte {}", self.position))
} else {
Ok(())
}
}
fn expect_keyword(&mut self, keyword: &[u8]) -> Result<(), String> {
for expected in keyword {
match self.next_byte() {
Some(byte) if byte == *expected => {}
Some(byte) => {
return Err(format!(
"unexpected byte '{}' while parsing keyword at {}",
byte as char,
self.position.saturating_sub(1)
))
}
None => return Err("unexpected end of input while parsing keyword".to_string()),
}
}
Ok(())
}
fn skip_whitespace(&mut self) {
while matches!(self.peek_byte(), Some(b' ' | b'\n' | b'\r' | b'\t')) {
self.position += 1;
}
}
fn expect_byte(&mut self, expected: u8) -> Result<(), String> {
match self.next_byte() {
Some(byte) if byte == expected => Ok(()),
Some(byte) => Err(format!(
"expected '{}' but found '{}' at {}",
expected as char,
byte as char,
self.position.saturating_sub(1)
)),
None => Err(format!(
"expected '{}' but reached end of input",
expected as char
)),
}
}
fn peek_byte(&self) -> Option<u8> {
self.input.get(self.position).copied()
}
fn next_byte(&mut self) -> Option<u8> {
let byte = self.peek_byte()?;
self.position += 1;
Some(byte)
}
}
#[cfg(test)]
mod tests {
use super::*;
const SAMPLE_RESPONSE: &str = r#"
{
"version": 5,
"type": "search",
"resultcount": 1,
"results": [
{
"Name": "ripgrep-all",
"Version": "0.10.6-1",
"Description": "ripgrep, but also search in PDFs, E-Books, Office documents, zip, tar.gz, etc.",
"URL": "https://github.com/phiresky/ripgrep-all",
"License": ["AGPL-3.0-only"],
"Depends": ["poppler", "ffmpeg"],
"MakeDepends": ["rust"],
"OptDepends": ["pandoc: search markdown conversions"],
"Provides": ["rga"],
"Conflicts": ["rga-git"],
"NumVotes": 412,
"Popularity": 4.52,
"LastModified": 1718300444,
"OutOfDate": null
}
]
}
"#;
const OUT_OF_DATE_RESPONSE: &str = r#"
{
"version": 5,
"type": "multiinfo",
"resultcount": 1,
"results": [
{
"Name": "demo-pkg",
"Version": "1.2.3-1",
"Description": "demo",
"URL": null,
"License": "MIT",
"Depends": null,
"MakeDepends": [],
"OptDepends": [],
"Provides": [],
"Conflicts": [],
"NumVotes": 3,
"Popularity": 0.75,
"LastModified": 1718300000,
"OutOfDate": 1719000000
}
]
}
"#;
#[test]
fn deserializes_aur_package_from_sample_json() {
let packages =
parse_rpc_response(SAMPLE_RESPONSE, "no results").expect("parse sample AUR JSON");
let package = packages.first().expect("sample package");
assert_eq!(package.name, "ripgrep-all");
assert_eq!(package.version, "0.10.6-1");
assert_eq!(
package.description,
"ripgrep, but also search in PDFs, E-Books, Office documents, zip, tar.gz, etc."
);
assert_eq!(package.url, "https://github.com/phiresky/ripgrep-all");
assert_eq!(package.license, vec!["AGPL-3.0-only"]);
assert_eq!(package.depends, vec!["poppler", "ffmpeg"]);
assert_eq!(package.makedepends, vec!["rust"]);
assert_eq!(
package.optdepends,
vec!["pandoc: search markdown conversions"]
);
assert_eq!(package.provides, vec!["rga"]);
assert_eq!(package.conflicts, vec!["rga-git"]);
assert_eq!(package.num_votes, 412);
assert_eq!(package.popularity, 4.52);
assert_eq!(package.last_modified, 1718300444);
assert_eq!(package.out_of_date, None);
}
#[test]
fn converts_non_null_out_of_date_to_true() {
let packages =
parse_rpc_response(OUT_OF_DATE_RESPONSE, "no results").expect("parse sample AUR JSON");
let package = packages.first().expect("sample package");
assert_eq!(package.url, "");
assert_eq!(package.license, vec!["MIT"]);
assert_eq!(package.depends, Vec::<String>::new());
assert_eq!(package.out_of_date, Some(true));
}
}
@@ -0,0 +1,371 @@
use std::ffi::OsString;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::Command;
use crate::error::CubError;
use crate::sandbox::SandboxConfig;
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct CookResult {
pub pkgar_path: PathBuf,
pub toml_path: PathBuf,
pub stage_dir: PathBuf,
}
pub fn cook_recipe(
recipe_dir: &Path,
target: &str,
repo_dir: &Path,
) -> Result<CookResult, CubError> {
let repo_binary = find_repo_binary_with_hint(repo_dir).ok_or_else(|| {
CubError::BuildFailed(
"repo binary not found. Build tools must be installed inside Red Bear OS to cook recipes."
.to_string(),
)
})?;
let mut sandbox = SandboxConfig::new(recipe_dir);
sandbox.target = target.to_string();
sandbox.gnu_target = gnu_target_for(target);
sandbox.setup()?;
let output = Command::new(&repo_binary)
.arg("cook")
.arg(recipe_dir)
.current_dir(repo_dir)
.envs(sandbox.env_vars())
.output()?;
if !output.status.success() {
return Err(CubError::BuildFailed(render_command_failure(
&output.stdout,
&output.stderr,
)));
}
let recipe_name = recipe_name(recipe_dir)?;
let pkgar_path =
find_repo_artifact(repo_dir, target, &recipe_name, "pkgar")?.ok_or_else(|| {
CubError::BuildFailed(format!(
"repo cook succeeded, but no .pkgar artifact was found for recipe '{}' under {}",
recipe_name,
repo_dir.display()
))
})?;
let toml_path = find_repo_artifact(repo_dir, target, &recipe_name, "toml")?
.unwrap_or_else(|| recipe_dir.join("recipe.toml"));
Ok(CookResult {
pkgar_path,
toml_path,
stage_dir: sandbox.stage_dir,
})
}
pub fn is_repo_available() -> bool {
find_repo_binary().is_some()
}
pub fn find_repo_binary() -> Option<PathBuf> {
let cwd = std::env::current_dir().ok();
find_repo_binary_from(std::env::var_os("PATH"), cwd.as_deref(), None)
}
pub fn cook_available() -> Result<(), CubError> {
if is_repo_available() {
Ok(())
} else {
Err(CubError::BuildFailed(
"repo binary not found. Build tools must be installed inside Red Bear OS to cook recipes."
.to_string(),
))
}
}
fn find_repo_binary_with_hint(repo_dir: &Path) -> Option<PathBuf> {
let cwd = std::env::current_dir().ok();
find_repo_binary_from(std::env::var_os("PATH"), cwd.as_deref(), Some(repo_dir))
}
fn find_repo_binary_from(
path_env: Option<OsString>,
cwd: Option<&Path>,
repo_dir: Option<&Path>,
) -> Option<PathBuf> {
if let Some(path_env) = path_env {
for entry in std::env::split_paths(&path_env) {
let candidate = entry.join("repo");
if is_executable_file(&candidate) {
return Some(candidate);
}
}
}
if let Some(repo_dir) = repo_dir {
let candidate = repo_dir.join("target/release/repo");
if is_executable_file(&candidate) {
return Some(candidate);
}
}
if let Some(cwd) = cwd {
let candidate = cwd.join("target/release/repo");
if is_executable_file(&candidate) {
return Some(candidate);
}
}
None
}
fn find_repo_artifact(
repo_dir: &Path,
target: &str,
recipe_name: &str,
extension: &str,
) -> Result<Option<PathBuf>, CubError> {
for directory in repo_artifact_dirs(repo_dir, target) {
if let Some(path) = find_artifact_in_dir(&directory, recipe_name, extension)? {
return Ok(Some(path));
}
}
Ok(None)
}
fn repo_artifact_dirs(repo_dir: &Path, target: &str) -> Vec<PathBuf> {
let mut dirs = Vec::new();
for dir in [
repo_dir.join("repo").join(target),
repo_dir.join(target),
repo_dir.to_path_buf(),
] {
if !dirs.iter().any(|existing| existing == &dir) {
dirs.push(dir);
}
}
dirs
}
fn find_artifact_in_dir(
dir: &Path,
recipe_name: &str,
extension: &str,
) -> Result<Option<PathBuf>, CubError> {
if !dir.is_dir() {
return Ok(None);
}
let exact = dir.join(format!("{recipe_name}.{extension}"));
if exact.is_file() {
return Ok(Some(exact));
}
let mut latest: Option<(std::time::SystemTime, PathBuf)> = None;
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if !path.is_file() || path.extension().and_then(|ext| ext.to_str()) != Some(extension) {
continue;
}
let modified = entry
.metadata()
.and_then(|metadata| metadata.modified())
.unwrap_or(std::time::SystemTime::UNIX_EPOCH);
match &latest {
Some((latest_modified, _)) if modified <= *latest_modified => {}
_ => latest = Some((modified, path)),
}
}
Ok(latest.map(|(_, path)| path))
}
fn is_executable_file(path: &Path) -> bool {
let metadata = match fs::metadata(path) {
Ok(metadata) => metadata,
Err(_) => return false,
};
if !metadata.is_file() {
return false;
}
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
metadata.permissions().mode() & 0o111 != 0
}
#[cfg(not(unix))]
{
true
}
}
fn gnu_target_for(target: &str) -> String {
if let Some(prefix) = target.strip_suffix("-unknown-redox") {
format!("{prefix}-redox")
} else {
target.to_string()
}
}
fn recipe_name(recipe_dir: &Path) -> Result<String, CubError> {
recipe_dir
.file_name()
.and_then(|name| name.to_str())
.map(|name| name.to_string())
.ok_or_else(|| {
CubError::BuildFailed(format!(
"could not determine recipe name from {}",
recipe_dir.display()
))
})
}
fn render_command_failure(stdout: &[u8], stderr: &[u8]) -> String {
let stderr = String::from_utf8_lossy(stderr).trim().to_string();
let stdout = String::from_utf8_lossy(stdout).trim().to_string();
if !stderr.is_empty() {
stderr
} else if !stdout.is_empty() {
stdout
} else {
"repo cook failed without diagnostic output".to_string()
}
}
#[cfg(test)]
mod tests {
use super::{find_repo_binary, is_repo_available};
use std::fs;
use std::path::{Path, PathBuf};
use std::sync::{Mutex, OnceLock};
use tempfile::tempdir;
fn env_lock() -> &'static Mutex<()> {
static LOCK: OnceLock<Mutex<()>> = OnceLock::new();
LOCK.get_or_init(|| Mutex::new(()))
}
struct TestEnvGuard {
previous_path: Option<std::ffi::OsString>,
previous_cwd: PathBuf,
}
impl TestEnvGuard {
fn new(path: Option<&Path>, cwd: &Path) -> Self {
let previous_path = std::env::var_os("PATH");
let previous_cwd = std::env::current_dir().expect("current dir");
if let Some(path) = path {
// SAFETY: tests serialize PATH mutations with a process-wide mutex.
unsafe { std::env::set_var("PATH", path) }
} else {
// SAFETY: tests serialize PATH mutations with a process-wide mutex.
unsafe { std::env::remove_var("PATH") }
}
std::env::set_current_dir(cwd).expect("set current dir");
Self {
previous_path,
previous_cwd,
}
}
}
impl Drop for TestEnvGuard {
fn drop(&mut self) {
if let Some(path) = &self.previous_path {
// SAFETY: tests serialize PATH mutations with a process-wide mutex.
unsafe { std::env::set_var("PATH", path) }
} else {
// SAFETY: tests serialize PATH mutations with a process-wide mutex.
unsafe { std::env::remove_var("PATH") }
}
std::env::set_current_dir(&self.previous_cwd).expect("restore current dir");
}
}
#[test]
fn find_repo_binary_uses_path_entry() {
let _guard = env_lock().lock().expect("lock env");
let temp = tempdir().expect("tempdir");
let bin_dir = temp.path().join("bin");
let cwd = temp.path().join("cwd");
fs::create_dir_all(&bin_dir).expect("create bin dir");
fs::create_dir_all(&cwd).expect("create cwd dir");
let repo_binary = bin_dir.join("repo");
create_executable(&repo_binary);
let _env = TestEnvGuard::new(Some(&bin_dir), &cwd);
assert_eq!(find_repo_binary(), Some(repo_binary));
}
#[test]
fn find_repo_binary_uses_local_release_binary() {
let _guard = env_lock().lock().expect("lock env");
let temp = tempdir().expect("tempdir");
let cwd = temp.path().join("project");
let local_repo = cwd.join("target/release/repo");
fs::create_dir_all(local_repo.parent().expect("local repo parent"))
.expect("create target dir");
create_executable(&local_repo);
let _env = TestEnvGuard::new(None, &cwd);
assert_eq!(find_repo_binary(), Some(local_repo));
}
#[test]
fn is_repo_available_reports_true_when_local_binary_exists() {
let _guard = env_lock().lock().expect("lock env");
let temp = tempdir().expect("tempdir");
let cwd = temp.path().join("project");
let local_repo = cwd.join("target/release/repo");
fs::create_dir_all(local_repo.parent().expect("local repo parent"))
.expect("create target dir");
create_executable(&local_repo);
let _env = TestEnvGuard::new(None, &cwd);
assert!(is_repo_available());
}
#[test]
fn is_repo_available_reports_false_without_repo_binary() {
let _guard = env_lock().lock().expect("lock env");
let temp = tempdir().expect("tempdir");
let cwd = temp.path().join("project");
fs::create_dir_all(&cwd).expect("create cwd dir");
let _env = TestEnvGuard::new(None, &cwd);
assert!(!is_repo_available());
}
fn create_executable(path: &Path) {
fs::write(path, b"#!/bin/sh\nexit 0\n").expect("write repo binary");
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let mut permissions = fs::metadata(path).expect("metadata").permissions();
permissions.set_mode(0o755);
fs::set_permissions(path, permissions).expect("set permissions");
}
}
}
@@ -0,0 +1,797 @@
pub use crate::converter::{ConversionReport, ConversionResult};
use crate::deps::map_dependency;
use crate::error::CubError;
use crate::rbpkgbuild::{
BuildSection, BuildTemplate, CompatSection, ConversionStatus, DependenciesSection,
InstallSection, PackageSection, PatchesSection, PolicySection, RbPkgBuild, SourceEntry,
SourceSection, SourceType,
};
#[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct AurSrcInfo {
pub pkgbase: String,
pub pkgname: Vec<String>,
pub pkgver: String,
pub pkgrel: String,
pub pkgdesc: String,
pub url: String,
pub arch: Vec<String>,
pub license: Vec<String>,
pub depends: Vec<String>,
pub makedepends: Vec<String>,
pub checkdepends: Vec<String>,
pub optdepends: Vec<(String, Option<String>)>,
pub provides: Vec<String>,
pub conflicts: Vec<String>,
pub source: Vec<String>,
pub sha256sums: Vec<String>,
}
pub fn convert_pkgbuild(content: &str) -> Result<ConversionResult, CubError> {
let pkgnames = extract_array_assignment(content, "pkgname").unwrap_or_default();
let split_packages = extract_split_packages(content);
let pkgname = pkgnames
.first()
.cloned()
.or_else(|| split_packages.first().cloned())
.or_else(|| extract_scalar_assignment(content, "pkgbase"))
.ok_or_else(|| CubError::Conversion("missing pkgname in PKGBUILD".to_string()))?;
let pkgver = extract_scalar_assignment(content, "pkgver")
.ok_or_else(|| CubError::Conversion("missing pkgver in PKGBUILD".to_string()))?;
let pkgrel_raw =
extract_scalar_assignment(content, "pkgrel").unwrap_or_else(|| "1".to_string());
let pkgrel = pkgrel_raw.parse::<u32>().unwrap_or(1);
let pkgdesc = extract_scalar_assignment(content, "pkgdesc").unwrap_or_default();
let url = extract_scalar_assignment(content, "url").unwrap_or_default();
let licenses = extract_array_assignment(content, "license").unwrap_or_default();
let depends = extract_array_assignment(content, "depends").unwrap_or_default();
let makedepends = extract_array_assignment(content, "makedepends").unwrap_or_default();
let checkdepends = extract_array_assignment(content, "checkdepends").unwrap_or_default();
let optdepends_raw = extract_array_assignment(content, "optdepends").unwrap_or_default();
let provides = extract_array_assignment(content, "provides").unwrap_or_default();
let conflicts = extract_array_assignment(content, "conflicts").unwrap_or_default();
let sources = extract_array_assignment(content, "source").unwrap_or_default();
let sha256sums = extract_array_assignment(content, "sha256sums").unwrap_or_default();
let template = detect_build_template(content);
let mut warnings = detect_linuxisms(content);
let mut actions_required = Vec::new();
let mapped_runtime = map_dep_list(&depends, &mut warnings, &mut actions_required);
let mapped_build = map_dep_list(&makedepends, &mut warnings, &mut actions_required);
let mapped_check = map_dep_list(&checkdepends, &mut warnings, &mut actions_required);
let optdepends = optdepends_raw
.iter()
.flat_map(|raw| parse_optdepends(raw))
.map(|(name, _)| name)
.collect::<Vec<_>>();
let mapped_optional = map_dep_list(&optdepends, &mut warnings, &mut actions_required);
if sources.is_empty() {
warnings.push("PKGBUILD does not define any source entries".to_string());
}
if pkgnames.len() > 1 || !split_packages.is_empty() {
warnings.push(
"split package PKGBUILD detected; converting the primary package only".to_string(),
);
actions_required
.push("review split package metadata and package_* install logic manually".to_string());
}
let status = if warnings.is_empty() && actions_required.is_empty() {
ConversionStatus::Full
} else {
ConversionStatus::Partial
};
let rbpkg = RbPkgBuild {
format: 1,
package: PackageSection {
name: sanitize_pkgname(&pkgname),
version: pkgver,
release: pkgrel,
description: pkgdesc,
homepage: url,
license: licenses,
architectures: vec!["x86_64-unknown-redox".to_string()],
maintainers: Vec::new(),
},
source: SourceSection {
sources: sources
.into_iter()
.enumerate()
.map(|(index, source)| {
source_from_arch(source, sha256sums.get(index).map(String::as_str))
})
.collect(),
},
dependencies: DependenciesSection {
build: mapped_build,
runtime: mapped_runtime,
check: mapped_check,
optional: mapped_optional,
provides,
conflicts,
},
build: BuildSection {
template,
..BuildSection::default()
},
install: InstallSection::default(),
patches: PatchesSection::default(),
compat: CompatSection {
imported_from: "aur".to_string(),
original_pkgbuild: content.to_string(),
conversion_status: status.clone(),
target: "x86_64-unknown-redox".to_string(),
},
policy: PolicySection::default(),
};
rbpkg.validate()?;
let _ = rbpkg.to_srcinfo();
Ok(ConversionResult {
rbpkg,
report: ConversionReport {
status,
warnings,
actions_required,
},
})
}
pub fn parse_optdepends(raw: &str) -> Vec<(String, Option<String>)> {
let binding = strip_unquoted_comment(raw);
let trimmed = binding.trim();
if trimmed.is_empty() {
return Vec::new();
}
let items = if trimmed.starts_with('(') || trimmed.contains('"') || trimmed.contains('\'') {
parse_array(trimmed)
} else if trimmed.contains(':') && trimmed.chars().any(char::is_whitespace) {
vec![trimmed.to_string()]
} else {
shell_split(trimmed)
};
items
.into_iter()
.map(|value| parse_optdepend_entry(&value))
.filter(|(name, _)| !name.is_empty())
.collect()
}
pub fn parse_srcinfo(content: &str) -> Result<AurSrcInfo, CubError> {
let mut info = AurSrcInfo::default();
for raw_line in content.lines() {
let line = raw_line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
let Some((key, value)) = line.split_once('=') else {
return Err(CubError::Conversion(format!(
"invalid .SRCINFO line: {raw_line}"
)));
};
let key = key.trim();
let value = parse_scalar(value.trim());
if value.is_empty() {
continue;
}
match key {
"pkgbase" => info.pkgbase = value,
"pkgname" => info.pkgname.push(value),
"pkgver" => info.pkgver = value,
"pkgrel" => info.pkgrel = value,
"pkgdesc" => {
if info.pkgdesc.is_empty() {
info.pkgdesc = value;
}
}
"url" => {
if info.url.is_empty() {
info.url = value;
}
}
"arch" => info.arch.push(value),
"license" => info.license.push(value),
"depends" => info.depends.push(value),
"makedepends" => info.makedepends.push(value),
"checkdepends" => info.checkdepends.push(value),
"optdepends" => {
let optdepend = parse_optdepend_entry(&value);
if !optdepend.0.is_empty() {
info.optdepends.push(optdepend);
}
}
"provides" => info.provides.push(value),
"conflicts" => info.conflicts.push(value),
"source" => info.source.push(value),
"sha256sums" => info.sha256sums.push(value),
_ => {}
}
}
if info.pkgname.is_empty() {
return Err(CubError::Conversion(
"missing pkgname in .SRCINFO".to_string(),
));
}
if info.pkgver.is_empty() {
return Err(CubError::Conversion(
"missing pkgver in .SRCINFO".to_string(),
));
}
if info.pkgrel.is_empty() {
return Err(CubError::Conversion(
"missing pkgrel in .SRCINFO".to_string(),
));
}
Ok(info)
}
pub fn extract_split_packages(content: &str) -> Vec<String> {
let mut packages = Vec::new();
for raw_line in content.lines() {
let binding = strip_unquoted_comment(raw_line);
let mut line = binding.trim_start();
if let Some(rest) = line.strip_prefix("function ") {
line = rest.trim_start();
}
let Some(rest) = line.strip_prefix("package_") else {
continue;
};
let Some((name, tail)) = rest.split_once('(') else {
continue;
};
if name.is_empty() || !tail.trim_start().starts_with(')') {
continue;
}
let name = name.trim().to_string();
if !packages.contains(&name) {
packages.push(name);
}
}
packages
}
fn map_dep_list(
deps: &[String],
warnings: &mut Vec<String>,
actions_required: &mut Vec<String>,
) -> Vec<String> {
let mut mapped = Vec::new();
for dep in deps {
let mapping = map_dependency(dep);
if mapping.mapped.is_empty() {
warnings.push(format!(
"dependency '{}' has no Redox mapping and was omitted",
mapping.original
));
actions_required.push(format!(
"port or replace dependency '{}' manually",
mapping.original
));
continue;
}
if !mapping.is_exact {
warnings.push(format!(
"dependency '{}' mapped to '{}'",
mapping.original, mapping.mapped
));
}
if !mapped.contains(&mapping.mapped) {
mapped.push(mapping.mapped);
}
}
mapped
}
pub fn detect_build_template(content: &str) -> BuildTemplate {
let lowered = content.to_ascii_lowercase();
if lowered.contains("cargo build") || lowered.contains("cargo install") {
BuildTemplate::Cargo
} else if lowered.contains("meson setup") || lowered.contains(" meson ") {
BuildTemplate::Meson
} else if lowered.contains("cmake") {
BuildTemplate::Cmake
} else if lowered.contains("./configure") || lowered.contains(" configure ") {
BuildTemplate::Configure
} else {
BuildTemplate::Custom
}
}
pub fn detect_linuxisms(content: &str) -> Vec<String> {
let lowered = content.to_ascii_lowercase();
let checks = [
(
"systemctl",
"uses systemctl, which is not available on Redox",
),
(
"/usr/lib/systemd",
"references /usr/lib/systemd, which is Linux-specific",
),
(
"systemd",
"references systemd, which is unavailable on Redox",
),
(
"/proc",
"references /proc, which may require Redox-specific adaptation",
),
];
let mut warnings = Vec::new();
for (needle, warning) in checks {
if lowered.contains(needle) {
warnings.push(warning.to_string());
}
}
warnings
}
pub fn sanitize_pkgname(name: &str) -> String {
name.trim_matches('"')
.to_ascii_lowercase()
.replace('_', "-")
}
pub fn source_from_arch(entry: String, sha256: Option<&str>) -> SourceEntry {
let is_git_source = is_git_source_entry(&entry);
let normalized = normalize_source_entry(&entry);
let source_type =
if is_git_source || normalized.starts_with("git://") || normalized.ends_with(".git") {
SourceType::Git
} else {
SourceType::Tar
};
SourceEntry {
sha256: if matches!(source_type, SourceType::Tar) {
sha256.unwrap_or_default().to_string()
} else {
String::new()
},
url: normalized,
source_type,
rev: String::new(),
branch: String::new(),
}
}
fn is_git_source_entry(entry: &str) -> bool {
let stripped = entry
.split_once("::")
.map(|(_, value)| value)
.unwrap_or(entry)
.trim();
stripped.starts_with("git+")
}
fn normalize_source_entry(entry: &str) -> String {
let stripped = entry
.split_once("::")
.map(|(_, value)| value)
.unwrap_or(entry)
.trim();
stripped
.strip_prefix("git+")
.unwrap_or(stripped)
.to_string()
}
pub fn extract_scalar_assignment(content: &str, name: &str) -> Option<String> {
extract_assignment(content, name).map(|raw| parse_scalar(&raw))
}
pub fn extract_array_assignment(content: &str, name: &str) -> Option<Vec<String>> {
extract_assignment(content, name).map(|raw| parse_array(&raw))
}
fn extract_assignment(content: &str, name: &str) -> Option<String> {
let prefix = format!("{name}=");
let mut lines = content.lines();
while let Some(line) = lines.next() {
let trimmed = line.trim_start();
if !trimmed.starts_with(&prefix) {
continue;
}
let mut value = trimmed[prefix.len()..].trim().to_string();
if value.starts_with('(') {
let mut depth = paren_balance(&value);
while depth > 0 {
let Some(next) = lines.next() else {
break;
};
value.push('\n');
value.push_str(next.trim());
depth += paren_balance(next);
}
} else {
while value.ends_with('\\') {
value.pop();
let Some(next) = lines.next() else {
break;
};
value.push(' ');
value.push_str(next.trim());
}
}
return Some(value);
}
None
}
fn paren_balance(input: &str) -> i32 {
let opens = input.chars().filter(|ch| *ch == '(').count() as i32;
let closes = input.chars().filter(|ch| *ch == ')').count() as i32;
opens - closes
}
fn parse_scalar(raw: &str) -> String {
let binding = strip_unquoted_comment(raw);
let stripped = binding.trim();
if let Some(unquoted) = unquote(stripped) {
unquoted
} else {
stripped.to_string()
}
}
fn parse_array(raw: &str) -> Vec<String> {
let binding = strip_unquoted_comment(raw);
let trimmed = binding.trim();
let inner = trimmed
.strip_prefix('(')
.and_then(|value| value.strip_suffix(')'))
.unwrap_or(trimmed);
shell_split(inner)
}
fn strip_unquoted_comment(input: &str) -> String {
let mut single = false;
let mut double = false;
let mut result = String::new();
for ch in input.chars() {
match ch {
'\'' if !double => {
single = !single;
result.push(ch);
}
'"' if !single => {
double = !double;
result.push(ch);
}
'#' if !single && !double => break,
_ => result.push(ch),
}
}
result
}
fn parse_optdepend_entry(raw: &str) -> (String, Option<String>) {
let value = parse_scalar(raw);
let Some((name, description)) = value.split_once(':') else {
return (value.trim().to_string(), None);
};
let name = name.trim().to_string();
let description = description.trim();
let description = if description.is_empty() {
None
} else {
Some(description.to_string())
};
(name, description)
}
fn unquote(value: &str) -> Option<String> {
if value.len() >= 2 {
let bytes = value.as_bytes();
let first = bytes[0] as char;
let last = bytes[value.len() - 1] as char;
if (first == '\'' && last == '\'') || (first == '"' && last == '"') {
return Some(value[1..value.len() - 1].to_string());
}
}
None
}
pub fn shell_split(input: &str) -> Vec<String> {
let mut items = Vec::new();
let mut current = String::new();
let mut quote: Option<char> = None;
let mut escape = false;
for ch in input.chars() {
if escape {
current.push(ch);
escape = false;
continue;
}
match ch {
'\\' => escape = true,
'\'' | '"' => {
if quote == Some(ch) {
quote = None;
} else if quote.is_none() {
quote = Some(ch);
} else {
current.push(ch);
}
}
'#' if quote.is_none() => break,
ch if ch.is_whitespace() && quote.is_none() => {
if !current.is_empty() {
items.push(current.clone());
current.clear();
}
}
_ => current.push(ch),
}
}
if !current.is_empty() {
items.push(current);
}
items
}
#[cfg(test)]
mod tests {
use super::*;
const PKGBUILD: &str = r#"
pkgname=demo_pkg
pkgver=1.2.3
pkgrel=4
pkgdesc="Demo application"
url="https://example.com/demo"
license=('MIT')
depends=('glibc' 'openssl>=1.1' 'systemd')
makedepends=('cargo' 'pkg-config')
checkdepends=('python')
source=('https://example.com/demo-1.2.3.tar.xz')
sha256sums=('abc123deadbeef')
build() {
cargo build --release
}
package() {
install -Dm755 target/release/demo "$pkgdir/usr/bin/demo"
systemctl --version >/dev/null
}
"#;
#[test]
fn converts_pkgbuild_to_rbpkgbuild() {
let result = convert_pkgbuild(PKGBUILD).expect("convert PKGBUILD");
assert_eq!(result.rbpkg.package.name, "demo-pkg");
assert_eq!(result.rbpkg.package.version, "1.2.3");
assert_eq!(result.rbpkg.package.release, 4);
assert_eq!(result.rbpkg.build.template, BuildTemplate::Cargo);
assert_eq!(
result.rbpkg.dependencies.runtime,
vec!["relibc", "openssl3"]
);
assert_eq!(result.rbpkg.dependencies.build, vec!["cargo", "pkg-config"]);
assert_eq!(result.rbpkg.dependencies.check, vec!["python"]);
assert_eq!(result.rbpkg.source.sources.len(), 1);
assert_eq!(result.rbpkg.source.sources[0].sha256, "abc123deadbeef");
}
#[test]
fn reports_linuxisms_and_unmapped_deps() {
let result = convert_pkgbuild(PKGBUILD).expect("convert PKGBUILD");
assert!(matches!(result.report.status, ConversionStatus::Partial));
assert!(result
.report
.warnings
.iter()
.any(|w| w.contains("systemctl")));
assert!(result
.report
.actions_required
.iter()
.any(|w| w.contains("systemd")));
}
#[test]
fn parses_multiline_arrays() {
let input = "depends=(\n 'glibc'\n 'zlib'\n)\n";
let parsed = extract_array_assignment(input, "depends").expect("depends array");
assert_eq!(parsed, vec!["glibc", "zlib"]);
}
#[test]
fn detects_meson_template() {
let input = "pkgname=demo\npkgver=1\nmeson setup build\n";
assert_eq!(detect_build_template(input), BuildTemplate::Meson);
}
#[test]
fn parses_optdepends_with_descriptions() {
let parsed = parse_optdepends("'libnotify: desktop notifications' 'bash-completion'");
assert_eq!(
parsed,
vec![
(
"libnotify".to_string(),
Some("desktop notifications".to_string())
),
("bash-completion".to_string(), None),
]
);
}
#[test]
fn parses_srcinfo_metadata() {
let srcinfo = parse_srcinfo(
r#"
pkgbase = demo
pkgver = 1.2.3
pkgrel = 4
pkgdesc = Demo application
url = https://example.com/demo
arch = x86_64
license = MIT
depends = openssl>=1.1
makedepends = cargo
checkdepends = python
optdepends = libnotify: desktop notifications
provides = demo-virtual
conflicts = demo-old
source = https://example.com/demo-1.2.3.tar.xz
sha256sums = abc123deadbeef
pkgname = demo
pkgname = demo-docs
"#,
)
.expect("parse .SRCINFO");
assert_eq!(srcinfo.pkgbase, "demo");
assert_eq!(srcinfo.pkgname, vec!["demo", "demo-docs"]);
assert_eq!(srcinfo.pkgver, "1.2.3");
assert_eq!(srcinfo.pkgrel, "4");
assert_eq!(srcinfo.depends, vec!["openssl>=1.1"]);
assert_eq!(srcinfo.makedepends, vec!["cargo"]);
assert_eq!(srcinfo.checkdepends, vec!["python"]);
assert_eq!(
srcinfo.optdepends,
vec![(
"libnotify".to_string(),
Some("desktop notifications".to_string())
)]
);
assert_eq!(srcinfo.provides, vec!["demo-virtual"]);
assert_eq!(srcinfo.conflicts, vec!["demo-old"]);
}
#[test]
fn extracts_split_package_functions() {
let pkbuild = r#"
pkgname=('demo' 'demo-docs')
package_demo() {
:
}
function package_demo_docs() {
:
}
"#;
assert_eq!(
extract_split_packages(pkbuild),
vec!["demo".to_string(), "demo_docs".to_string()]
);
}
#[test]
fn warns_when_converting_split_package_pkgbuild() {
let input = r#"
pkgname=('demo' 'demo-docs')
pkgver=1.0.0
pkgrel=1
source=('https://example.com/demo.tar.xz')
sha256sums=('abc')
build() {
cargo build --release
}
package_demo() {
:
}
function package_demo_docs() {
:
}
"#;
let result = convert_pkgbuild(input).expect("convert split-package PKGBUILD");
assert!(result
.report
.warnings
.iter()
.any(|warning| warning.contains("split package PKGBUILD detected")));
assert!(result
.report
.actions_required
.iter()
.any(|action| action.contains("package_* install logic manually")));
}
#[test]
fn treats_git_plus_sources_as_git() {
let source = source_from_arch("git+https://example.com/repo".to_string(), None);
assert_eq!(source.source_type, SourceType::Git);
assert_eq!(source.url, "https://example.com/repo");
assert!(source.sha256.is_empty());
}
#[test]
fn converts_optdepends_into_optional_dependencies() {
let input = r#"
pkgname=demo
pkgver=1.0.0
pkgrel=1
source=('https://example.com/demo.tar.xz')
sha256sums=('abc')
optdepends=('glibc: C runtime' 'bash-completion')
build() {
cargo build --release
}
"#;
let result = convert_pkgbuild(input).expect("convert PKGBUILD with optdepends");
assert_eq!(
result.rbpkg.dependencies.optional,
vec!["relibc", "bash-completion"]
);
}
}