fix: Improve validation errors
This commit is contained in:
parent
6424bf3573
commit
3d0ae32734
99 changed files with 3773 additions and 425 deletions
|
|
@ -7,16 +7,17 @@ use std::{
|
|||
|
||||
use blue_build_process_management::ASYNC_RUNTIME;
|
||||
use blue_build_recipe::{FromFileList, ModuleExt, Recipe, StagesExt};
|
||||
use blue_build_utils::constants::{
|
||||
MODULE_STAGE_LIST_V1_SCHEMA_URL, MODULE_V1_SCHEMA_URL, RECIPE_V1_SCHEMA_URL,
|
||||
STAGE_V1_SCHEMA_URL,
|
||||
};
|
||||
use bon::Builder;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use log::{debug, info, trace};
|
||||
use miette::{bail, miette, Context, IntoDiagnostic, Report};
|
||||
use rayon::prelude::*;
|
||||
use schema_validator::{
|
||||
SchemaValidator, MODULE_STAGE_LIST_V1_SCHEMA_URL, MODULE_V1_SCHEMA_URL, RECIPE_V1_SCHEMA_URL,
|
||||
STAGE_V1_SCHEMA_URL,
|
||||
};
|
||||
use schema_validator::SchemaValidator;
|
||||
use serde::de::DeserializeOwned;
|
||||
use serde_json::Value;
|
||||
|
||||
|
|
@ -97,11 +98,21 @@ impl BlueBuildCommand for ValidateCommand {
|
|||
impl ValidateCommand {
|
||||
async fn setup_validators(&mut self) -> Result<(), Report> {
|
||||
let (rv, sv, mv, mslv) = tokio::try_join!(
|
||||
SchemaValidator::builder().url(RECIPE_V1_SCHEMA_URL).build(),
|
||||
SchemaValidator::builder().url(STAGE_V1_SCHEMA_URL).build(),
|
||||
SchemaValidator::builder().url(MODULE_V1_SCHEMA_URL).build(),
|
||||
SchemaValidator::builder()
|
||||
.url(RECIPE_V1_SCHEMA_URL)
|
||||
.all_errors(self.all_errors)
|
||||
.build(),
|
||||
SchemaValidator::builder()
|
||||
.url(STAGE_V1_SCHEMA_URL)
|
||||
.all_errors(self.all_errors)
|
||||
.build(),
|
||||
SchemaValidator::builder()
|
||||
.url(MODULE_V1_SCHEMA_URL)
|
||||
.all_errors(self.all_errors)
|
||||
.build(),
|
||||
SchemaValidator::builder()
|
||||
.url(MODULE_STAGE_LIST_V1_SCHEMA_URL)
|
||||
.all_errors(self.all_errors)
|
||||
.build(),
|
||||
)?;
|
||||
self.recipe_validator = Some(rv);
|
||||
|
|
@ -149,15 +160,12 @@ impl ValidateCommand {
|
|||
|
||||
if instance.get(DF::LIST_KEY).is_some() {
|
||||
debug!("{path_display} is a list file");
|
||||
let err = match self
|
||||
let err = self
|
||||
.module_stage_list_validator
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.process_validation(path, file_str.clone(), self.all_errors)
|
||||
{
|
||||
Err(e) => return vec![e],
|
||||
Ok(e) => e,
|
||||
};
|
||||
.process_validation(path, file_str.clone())
|
||||
.err();
|
||||
|
||||
err.map_or_else(
|
||||
|| {
|
||||
|
|
@ -195,13 +203,13 @@ impl ValidateCommand {
|
|||
},
|
||||
)
|
||||
},
|
||||
|err| vec![err],
|
||||
|err| vec![err.into()],
|
||||
)
|
||||
} else {
|
||||
debug!("{path_display} is a single file file");
|
||||
single_validator
|
||||
.process_validation(path, file_str, self.all_errors)
|
||||
.map_or_else(|e| vec![e], |e| e.map_or_else(Vec::new, |e| vec![e]))
|
||||
.process_validation(path, file_str)
|
||||
.map_or_else(|e| vec![e.into()], |()| Vec::new())
|
||||
}
|
||||
}
|
||||
Err(e) => vec![e],
|
||||
|
|
@ -221,11 +229,11 @@ impl ValidateCommand {
|
|||
|
||||
let schema_validator = self.recipe_validator.as_ref().unwrap();
|
||||
let err = schema_validator
|
||||
.process_validation(&self.recipe, recipe_str.clone(), self.all_errors)
|
||||
.map_err(err_vec)?;
|
||||
.process_validation(&self.recipe, recipe_str.clone())
|
||||
.err();
|
||||
|
||||
if let Some(err) = err {
|
||||
Err(vec![err])
|
||||
Err(vec![err.into()])
|
||||
} else {
|
||||
let recipe: Recipe = serde_yaml::from_str(&recipe_str)
|
||||
.into_diagnostic()
|
||||
|
|
|
|||
|
|
@ -35,6 +35,12 @@ impl From<&JsonLocation> for Location {
|
|||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for Location {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.0.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for Location {
|
||||
type Error = miette::Report;
|
||||
|
||||
|
|
|
|||
|
|
@ -1,54 +1,62 @@
|
|||
use std::{
|
||||
borrow::Cow,
|
||||
collections::HashSet,
|
||||
path::Path,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
|
||||
use blue_build_process_management::ASYNC_RUNTIME;
|
||||
use blue_build_recipe::ModuleTypeVersion;
|
||||
use bon::bon;
|
||||
use cached::proc_macro::cached;
|
||||
use colored::Colorize;
|
||||
use indexmap::IndexMap;
|
||||
use jsonschema::{
|
||||
output::Output, BasicOutput, ErrorIterator, Retrieve, Uri, ValidationError, Validator,
|
||||
};
|
||||
use log::{debug, trace};
|
||||
use miette::{bail, miette, Context, IntoDiagnostic, LabeledSpan, NamedSource, Report, Result};
|
||||
use jsonschema::{BasicOutput, Retrieve, Uri, ValidationError, Validator};
|
||||
use miette::{Context, IntoDiagnostic, LabeledSpan, NamedSource};
|
||||
use regex::Regex;
|
||||
use serde_json::Value;
|
||||
|
||||
use super::{location::Location, yaml_span::YamlSpan};
|
||||
|
||||
pub const BASE_SCHEMA_URL: &str = "https://schema.blue-build.org";
|
||||
pub const RECIPE_V1_SCHEMA_URL: &str = "https://schema.blue-build.org/recipe-v1.json";
|
||||
pub const STAGE_V1_SCHEMA_URL: &str = "https://schema.blue-build.org/stage-v1.json";
|
||||
pub const MODULE_V1_SCHEMA_URL: &str = "https://schema.blue-build.org/module-v1.json";
|
||||
pub const MODULE_STAGE_LIST_V1_SCHEMA_URL: &str =
|
||||
"https://schema.blue-build.org/module-stage-list-v1.json";
|
||||
#[cfg(test)]
|
||||
use std::eprintln as trace;
|
||||
#[cfg(test)]
|
||||
use std::eprintln as warn;
|
||||
|
||||
#[cfg(not(test))]
|
||||
use log::{trace, warn};
|
||||
|
||||
mod error;
|
||||
|
||||
pub use error::*;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SchemaValidator {
|
||||
schema: Arc<Value>,
|
||||
validator: Arc<Validator>,
|
||||
url: &'static str,
|
||||
all_errors: bool,
|
||||
}
|
||||
|
||||
#[bon]
|
||||
impl SchemaValidator {
|
||||
#[builder]
|
||||
pub async fn new(url: &'static str) -> Result<Self, Report> {
|
||||
pub async fn new(
|
||||
/// The URL of the schema to validate against
|
||||
url: &'static str,
|
||||
/// Produce all errors found
|
||||
#[builder(default)]
|
||||
all_errors: bool,
|
||||
) -> Result<Self, SchemaValidateBuilderError> {
|
||||
tokio::spawn(async move {
|
||||
let schema: Arc<Value> = Arc::new(
|
||||
reqwest::get(url)
|
||||
.await
|
||||
.into_diagnostic()
|
||||
.with_context(|| format!("Failed to get schema at {url}"))?
|
||||
.json()
|
||||
.await
|
||||
.into_diagnostic()
|
||||
.with_context(|| format!("Failed to get json for schema {url}"))?,
|
||||
);
|
||||
let schema: Value = {
|
||||
#[cfg(not(test))]
|
||||
{
|
||||
reqwest::get(url).await?.json().await?
|
||||
}
|
||||
#[cfg(test)]
|
||||
{
|
||||
serde_json::from_slice(std::fs::read_to_string(url)?.as_bytes())?
|
||||
}
|
||||
};
|
||||
let validator = Arc::new(
|
||||
tokio::task::spawn_blocking({
|
||||
let schema = schema.clone();
|
||||
|
|
@ -56,8 +64,6 @@ impl SchemaValidator {
|
|||
jsonschema::options()
|
||||
.with_retriever(ModuleSchemaRetriever)
|
||||
.build(&schema)
|
||||
.into_diagnostic()
|
||||
.with_context(|| format!("Failed to build validator for schema {url}"))
|
||||
}
|
||||
})
|
||||
.await
|
||||
|
|
@ -65,181 +71,231 @@ impl SchemaValidator {
|
|||
);
|
||||
|
||||
Ok(Self {
|
||||
schema,
|
||||
validator,
|
||||
url,
|
||||
all_errors,
|
||||
})
|
||||
})
|
||||
.await
|
||||
.expect("Should join task")
|
||||
}
|
||||
|
||||
pub fn apply<'a, 'b>(&'a self, value: &'b Value) -> Output<'a, 'b> {
|
||||
self.validator.apply(value)
|
||||
}
|
||||
|
||||
pub fn iter_errors<'a>(&'a self, value: &'a Value) -> ErrorIterator<'a> {
|
||||
self.validator.iter_errors(value)
|
||||
}
|
||||
|
||||
pub fn schema(&self) -> Arc<Value> {
|
||||
self.schema.clone()
|
||||
}
|
||||
|
||||
pub const fn url(&self) -> &'static str {
|
||||
self.url
|
||||
}
|
||||
|
||||
pub fn process_validation(
|
||||
pub fn process_validation<P>(
|
||||
&self,
|
||||
path: &Path,
|
||||
path: P,
|
||||
file: Arc<String>,
|
||||
all_errors: bool,
|
||||
) -> Result<Option<Report>> {
|
||||
let recipe_path_display = path.display().to_string().bold().italic();
|
||||
) -> Result<(), SchemaValidateError>
|
||||
where
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let path = path.as_ref();
|
||||
let spans = self.get_spans(&file, path)?;
|
||||
|
||||
self.spans_to_report(spans, file, path)
|
||||
}
|
||||
|
||||
fn get_spans(
|
||||
&self,
|
||||
file: &Arc<String>,
|
||||
path: &Path,
|
||||
) -> Result<Vec<LabeledSpan>, SchemaValidateError> {
|
||||
let recipe_path_display = path.display().to_string().bold().italic();
|
||||
let spanner = YamlSpan::builder().file(file.clone()).build()?;
|
||||
let instance: Value = serde_yaml::from_str(&file)
|
||||
.into_diagnostic()
|
||||
.with_context(|| format!("Failed to deserialize recipe {recipe_path_display}"))?;
|
||||
let instance: Value = serde_yaml::from_str(file)
|
||||
.map_err(|e| SchemaValidateError::SerdeYaml(e, path.to_path_buf()))?;
|
||||
trace!("{recipe_path_display}:\n{file}");
|
||||
|
||||
Ok(if all_errors {
|
||||
self.process_basic_output(self.apply(&instance).basic(), file, &spanner, path)
|
||||
Ok(if self.all_errors {
|
||||
process_basic_output(self.validator.apply(&instance).basic(), &spanner)
|
||||
} else {
|
||||
self.process_err(self.iter_errors(&instance), path, file, &spanner)
|
||||
process_err(self.validator.iter_errors(&instance), &spanner)
|
||||
})
|
||||
}
|
||||
|
||||
fn process_basic_output(
|
||||
fn spans_to_report(
|
||||
&self,
|
||||
out: BasicOutput<'_>,
|
||||
labels: Vec<LabeledSpan>,
|
||||
file: Arc<String>,
|
||||
spanner: &YamlSpan,
|
||||
path: &Path,
|
||||
) -> Option<Report> {
|
||||
match out {
|
||||
BasicOutput::Valid(_) => None,
|
||||
BasicOutput::Invalid(errors) => {
|
||||
let mut collection: IndexMap<Location, Vec<String>> = IndexMap::new();
|
||||
let errors = {
|
||||
let mut e = errors.into_iter().collect::<Vec<_>>();
|
||||
e.sort_by(|e1, e2| {
|
||||
e1.instance_location()
|
||||
.as_str()
|
||||
.cmp(e2.instance_location().as_str())
|
||||
});
|
||||
e
|
||||
};
|
||||
let errors: Vec<(Location, String)> = {
|
||||
let e = errors
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
(
|
||||
Location::from(e.instance_location()),
|
||||
remove_json(&e.error_description().to_string()).to_string(),
|
||||
)
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
let mut e = e.into_iter().collect::<Vec<_>>();
|
||||
e.sort_by(|e1, e2| e1.0.as_str().cmp(e2.0.as_str()));
|
||||
e
|
||||
};
|
||||
|
||||
for (instance_path, err) in errors {
|
||||
collection
|
||||
.entry(instance_path)
|
||||
.and_modify(|errs| {
|
||||
errs.push(format!("- {}", err.bold().red()));
|
||||
})
|
||||
.or_insert_with(|| vec![format!("- {}", err.bold().red())]);
|
||||
}
|
||||
|
||||
let spans = collection
|
||||
.into_iter()
|
||||
.map(|(key, value)| {
|
||||
LabeledSpan::new_with_span(
|
||||
Some(value.join("\n")),
|
||||
spanner.get_span(&key).unwrap(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Some(
|
||||
miette!(
|
||||
labels = spans,
|
||||
help = format!(
|
||||
"Try adding these lines to the top of your file:\n{}\n{}",
|
||||
"---".bright_green(),
|
||||
format!("# yaml-language-server: $schema={}", self.url).bright_green(),
|
||||
),
|
||||
"{} error{} encountered",
|
||||
spans.len().to_string().red(),
|
||||
if spans.len() == 1 { "" } else { "s" }
|
||||
)
|
||||
.with_source_code(
|
||||
NamedSource::new(path.display().to_string(), file).with_language("yaml"),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn process_err<'a, I>(
|
||||
&self,
|
||||
errors: I,
|
||||
path: &Path,
|
||||
file: Arc<String>,
|
||||
spanner: &YamlSpan,
|
||||
) -> Option<Report>
|
||||
where
|
||||
I: Iterator<Item = ValidationError<'a>>,
|
||||
{
|
||||
let spans = errors
|
||||
.map(|err| {
|
||||
LabeledSpan::new_primary_with_span(
|
||||
Some(remove_json(&err.to_string()).bold().red().to_string()),
|
||||
spanner
|
||||
.get_span(&Location::from(err.instance_path))
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if spans.is_empty() {
|
||||
None
|
||||
) -> Result<(), SchemaValidateError> {
|
||||
if labels.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Some(
|
||||
miette!(
|
||||
labels = spans,
|
||||
help = format!(
|
||||
"Try adding these lines to the top of your file:\n{}\n{}",
|
||||
"---".bright_green(),
|
||||
format!("# yaml-language-server: $schema={}", self.url).bright_green(),
|
||||
),
|
||||
"{} error{} encountered",
|
||||
spans.len().to_string().red(),
|
||||
if spans.len() == 1 { "" } else { "s" }
|
||||
)
|
||||
.with_source_code(
|
||||
NamedSource::new(path.display().to_string(), file).with_language("yaml"),
|
||||
Err(SchemaValidateError::YamlValidate {
|
||||
src: NamedSource::new(path.display().to_string(), file).with_language("yaml"),
|
||||
labels,
|
||||
help: format!(
|
||||
"Try adding these lines to the top of your file for editor validation highlights:\n{}\n{}",
|
||||
"---".bright_green(),
|
||||
format!("# yaml-language-server: $schema={}", self.url).bright_green(),
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_json(string: &str) -> Cow<'_, str> {
|
||||
fn process_basic_output(out: BasicOutput<'_>, spanner: &YamlSpan) -> Vec<LabeledSpan> {
|
||||
match out {
|
||||
BasicOutput::Valid(_) => Vec::new(),
|
||||
BasicOutput::Invalid(errors) => {
|
||||
let errors = {
|
||||
let mut e = errors.into_iter().collect::<Vec<_>>();
|
||||
e.sort_by(|e1, e2| {
|
||||
e1.instance_location()
|
||||
.as_str()
|
||||
.cmp(e2.instance_location().as_str())
|
||||
});
|
||||
e
|
||||
};
|
||||
let errors: Vec<(Location, String)> = {
|
||||
let e = errors
|
||||
.into_iter()
|
||||
.map(|e| {
|
||||
(
|
||||
Location::from(e.instance_location()),
|
||||
remove_json(&e.error_description().to_string()),
|
||||
)
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
let mut e = e.into_iter().collect::<Vec<_>>();
|
||||
e.sort_by(|e1, e2| e1.0.as_str().cmp(e2.0.as_str()));
|
||||
e
|
||||
};
|
||||
|
||||
let mut collection: IndexMap<Location, Vec<String>> = IndexMap::new();
|
||||
|
||||
for (instance_path, err) in errors {
|
||||
collection
|
||||
.entry(instance_path)
|
||||
.and_modify(|errs| {
|
||||
errs.push(format!("- {}", err.bold().red()));
|
||||
})
|
||||
.or_insert_with(|| vec![format!("- {}", err.bold().red())]);
|
||||
}
|
||||
|
||||
collection
|
||||
.into_iter()
|
||||
.map(|(key, value)| {
|
||||
LabeledSpan::new_with_span(
|
||||
Some(value.into_iter().collect::<Vec<_>>().join("\n")),
|
||||
spanner.get_span(&key).unwrap(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn process_err<'a, I>(errors: I, spanner: &YamlSpan) -> Vec<LabeledSpan>
|
||||
where
|
||||
I: Iterator<Item = ValidationError<'a>>,
|
||||
{
|
||||
errors
|
||||
.flat_map(|err| process_anyof_error(&err).unwrap_or_else(|| vec![err]))
|
||||
.map(|err| {
|
||||
let masked_err = err.masked();
|
||||
LabeledSpan::new_primary_with_span(
|
||||
Some(masked_err.to_string().bold().red().to_string()),
|
||||
spanner
|
||||
.get_span(&Location::from(err.instance_path))
|
||||
.unwrap(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn process_anyof_error(err: &ValidationError<'_>) -> Option<Vec<ValidationError<'static>>> {
|
||||
trace!("to_processed_module_err({err:#?})");
|
||||
let ValidationError {
|
||||
instance,
|
||||
kind,
|
||||
instance_path,
|
||||
schema_path: _,
|
||||
} = err;
|
||||
|
||||
let mut path_iter = instance_path.into_iter();
|
||||
let uri = match (kind, path_iter.next_back(), path_iter.next_back()) {
|
||||
(
|
||||
jsonschema::error::ValidationErrorKind::AnyOf,
|
||||
Some(jsonschema::paths::LocationSegment::Index(_)),
|
||||
Some(jsonschema::paths::LocationSegment::Property("modules")),
|
||||
) => {
|
||||
trace!("FOUND MODULE ANYOF ERROR at {instance_path}");
|
||||
if instance.get("source").is_some() {
|
||||
Uri::parse("json-schema:///module-custom-v1.json".to_string()).ok()?
|
||||
} else if instance.get("from-file").is_some() {
|
||||
Uri::parse("json-schema:///import-v1.json".to_string()).ok()?
|
||||
} else {
|
||||
let typ = instance.get("type").and_then(Value::as_str)?;
|
||||
let typ = ModuleTypeVersion::from(typ);
|
||||
trace!("Module type: {typ}");
|
||||
Uri::parse(format!(
|
||||
"json-schema:///modules/{}-{}.json",
|
||||
typ.typ(),
|
||||
typ.version().unwrap_or("latest")
|
||||
))
|
||||
.ok()?
|
||||
}
|
||||
}
|
||||
(
|
||||
jsonschema::error::ValidationErrorKind::AnyOf,
|
||||
Some(jsonschema::paths::LocationSegment::Index(_)),
|
||||
Some(jsonschema::paths::LocationSegment::Property("stages")),
|
||||
) => {
|
||||
trace!("FOUND STAGE ANYOF ERROR at {instance_path}");
|
||||
|
||||
if instance.get("from-file").is_some() {
|
||||
Uri::parse("json-schema:///import-v1.json".to_string()).ok()?
|
||||
} else {
|
||||
Uri::parse("json-schema:///stage-v1.json".to_string()).ok()?
|
||||
}
|
||||
}
|
||||
_ => return None,
|
||||
};
|
||||
|
||||
trace!("Schema URI: {uri}");
|
||||
let schema = ASYNC_RUNTIME.block_on(cache_retrieve(&uri.borrow())).ok()?;
|
||||
|
||||
let validator = jsonschema::options()
|
||||
.with_retriever(ModuleSchemaRetriever)
|
||||
.build(&schema)
|
||||
.inspect_err(|e| warn!("{e:#?}"))
|
||||
.ok()?;
|
||||
|
||||
Some(
|
||||
validator
|
||||
.iter_errors(instance)
|
||||
.flat_map(|err| process_anyof_error(&err).unwrap_or_else(|| vec![err]))
|
||||
.map(|err| {
|
||||
let mut err = err.to_owned();
|
||||
err.instance_path = instance_path
|
||||
.into_iter()
|
||||
.chain(&err.instance_path)
|
||||
.collect();
|
||||
err
|
||||
})
|
||||
.inspect(|errs| {
|
||||
trace!("From error: {err:#?}\nTo error list: {errs:#?}");
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
|
||||
fn remove_json<S>(string: &S) -> String
|
||||
where
|
||||
S: ToString,
|
||||
{
|
||||
static REGEX_OBJECT: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"^\{.*\}\s(.*)$").unwrap());
|
||||
static REGEX_ARRAY: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"^\[.*\]\s(.*)$").unwrap());
|
||||
|
||||
let string = string.trim();
|
||||
let string = string.to_string();
|
||||
|
||||
if REGEX_OBJECT.is_match(string) {
|
||||
REGEX_OBJECT.replace_all(string, "$1")
|
||||
} else if REGEX_ARRAY.is_match(string) {
|
||||
REGEX_ARRAY.replace_all(string, "$1")
|
||||
if REGEX_OBJECT.is_match(&string) {
|
||||
REGEX_OBJECT.replace_all(string.trim(), "$1").into_owned()
|
||||
} else if REGEX_ARRAY.is_match(&string) {
|
||||
REGEX_ARRAY.replace_all(string.trim(), "$1").into_owned()
|
||||
} else {
|
||||
Cow::Borrowed(string)
|
||||
string
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -259,26 +315,283 @@ async fn cache_retrieve(uri: &Uri<&str>) -> miette::Result<Value> {
|
|||
let scheme = uri.scheme();
|
||||
let path = uri.path();
|
||||
|
||||
let uri = match scheme.as_str() {
|
||||
"json-schema" => {
|
||||
format!("{BASE_SCHEMA_URL}{path}")
|
||||
}
|
||||
"https" => uri.to_string(),
|
||||
scheme => bail!("Unknown scheme {scheme}"),
|
||||
};
|
||||
#[cfg(not(test))]
|
||||
{
|
||||
const BASE_SCHEMA_URL: &str = "https://schema.blue-build.org";
|
||||
|
||||
debug!("Retrieving schema from {}", uri.bold().italic());
|
||||
tokio::spawn(async move {
|
||||
reqwest::get(&uri)
|
||||
.await
|
||||
.into_diagnostic()
|
||||
.with_context(|| format!("Failed to retrieve schema from {uri}"))?
|
||||
.json()
|
||||
.await
|
||||
.into_diagnostic()
|
||||
.with_context(|| format!("Failed to parse json from {uri}"))
|
||||
.inspect(|value| trace!("{}:\n{value}", uri.bold().italic()))
|
||||
})
|
||||
.await
|
||||
.expect("Should join task")
|
||||
let uri = match scheme.as_str() {
|
||||
"json-schema" => {
|
||||
format!("{BASE_SCHEMA_URL}{path}")
|
||||
}
|
||||
"https" => uri.to_string(),
|
||||
scheme => miette::bail!("Unknown scheme {scheme}"),
|
||||
};
|
||||
|
||||
log::debug!("Retrieving schema from {}", uri.bold().italic());
|
||||
tokio::spawn(async move {
|
||||
reqwest::get(&uri)
|
||||
.await
|
||||
.into_diagnostic()
|
||||
.with_context(|| format!("Failed to retrieve schema from {uri}"))?
|
||||
.json()
|
||||
.await
|
||||
.into_diagnostic()
|
||||
.with_context(|| format!("Failed to parse json from {uri}"))
|
||||
.inspect(|value| trace!("{}:\n{value}", uri.bold().italic()))
|
||||
})
|
||||
.await
|
||||
.expect("Should join task")
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
{
|
||||
let uri = match scheme.as_str() {
|
||||
"json-schema" | "https" => {
|
||||
format!("test-files/schema/{path}")
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
serde_json::from_slice(
|
||||
std::fs::read_to_string(uri)
|
||||
.into_diagnostic()
|
||||
.context("Failed retrieving sub-schema")?
|
||||
.as_bytes(),
|
||||
)
|
||||
.into_diagnostic()
|
||||
.context("Failed deserializing sub-schema")
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use blue_build_process_management::ASYNC_RUNTIME;
|
||||
use rstest::rstest;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[rstest]
|
||||
#[case::recipe(
|
||||
"test-files/recipes/recipe-pass.yml",
|
||||
"test-files/schema/recipe-v1.json"
|
||||
)]
|
||||
#[case::stage("test-files/recipes/stage-pass.yml", "test-files/schema/stage-v1.json")]
|
||||
#[case::stage_list(
|
||||
"test-files/recipes/stage-list-pass.yml",
|
||||
"test-files/schema/stage-list-v1.json"
|
||||
)]
|
||||
#[case::module_list(
|
||||
"test-files/recipes/module-list-pass.yml",
|
||||
"test-files/schema/module-list-v1.json"
|
||||
)]
|
||||
#[case::akmods(
|
||||
"test-files/recipes/modules/akmods-pass.yml",
|
||||
"test-files/schema/modules/akmods-v1.json"
|
||||
)]
|
||||
#[case::bling(
|
||||
"test-files/recipes/modules/bling-pass.yml",
|
||||
"test-files/schema/modules/bling-v1.json"
|
||||
)]
|
||||
#[case::brew(
|
||||
"test-files/recipes/modules/brew-pass.yml",
|
||||
"test-files/schema/modules/brew-v1.json"
|
||||
)]
|
||||
#[case::chezmoi(
|
||||
"test-files/recipes/modules/chezmoi-pass.yml",
|
||||
"test-files/schema/modules/chezmoi-v1.json"
|
||||
)]
|
||||
#[case::containerfile(
|
||||
"test-files/recipes/modules/containerfile-pass.yml",
|
||||
"test-files/schema/modules/containerfile-v1.json"
|
||||
)]
|
||||
#[case::copy(
|
||||
"test-files/recipes/modules/copy-pass.yml",
|
||||
"test-files/schema/modules/copy-v1.json"
|
||||
)]
|
||||
#[case::default_flatpaks(
|
||||
"test-files/recipes/modules/default-flatpaks-pass.yml",
|
||||
"test-files/schema/modules/default-flatpaks-v1.json"
|
||||
)]
|
||||
#[case::files(
|
||||
"test-files/recipes/modules/files-pass.yml",
|
||||
"test-files/schema/modules/files-v1.json"
|
||||
)]
|
||||
#[case::fonts(
|
||||
"test-files/recipes/modules/fonts-pass.yml",
|
||||
"test-files/schema/modules/fonts-v1.json"
|
||||
)]
|
||||
#[case::gnome_extensions(
|
||||
"test-files/recipes/modules/gnome-extensions-pass.yml",
|
||||
"test-files/schema/modules/gnome-extensions-v1.json"
|
||||
)]
|
||||
#[case::gschema_overrides(
|
||||
"test-files/recipes/modules/gschema-overrides-pass.yml",
|
||||
"test-files/schema/modules/gschema-overrides-v1.json"
|
||||
)]
|
||||
#[case::justfiles(
|
||||
"test-files/recipes/modules/justfiles-pass.yml",
|
||||
"test-files/schema/modules/justfiles-v1.json"
|
||||
)]
|
||||
#[case::rpm_ostree(
|
||||
"test-files/recipes/modules/rpm-ostree-pass.yml",
|
||||
"test-files/schema/modules/rpm-ostree-v1.json"
|
||||
)]
|
||||
#[case::script(
|
||||
"test-files/recipes/modules/script-pass.yml",
|
||||
"test-files/schema/modules/script-v1.json"
|
||||
)]
|
||||
#[case::signing(
|
||||
"test-files/recipes/modules/signing-pass.yml",
|
||||
"test-files/schema/modules/signing-v1.json"
|
||||
)]
|
||||
#[case::systemd(
|
||||
"test-files/recipes/modules/systemd-pass.yml",
|
||||
"test-files/schema/modules/systemd-v1.json"
|
||||
)]
|
||||
#[case::yafti(
|
||||
"test-files/recipes/modules/yafti-pass.yml",
|
||||
"test-files/schema/modules/yafti-v1.json"
|
||||
)]
|
||||
fn pass_validation(#[case] file: &str, #[case] schema: &'static str) {
|
||||
let validator = ASYNC_RUNTIME
|
||||
.block_on(SchemaValidator::builder().url(schema).build())
|
||||
.unwrap();
|
||||
|
||||
let file_contents = Arc::new(std::fs::read_to_string(file).unwrap());
|
||||
|
||||
let result = validator.process_validation(file, file_contents);
|
||||
dbg!(&result);
|
||||
|
||||
assert!(result.is_ok());
|
||||
}
|
||||
|
||||
#[rstest]
|
||||
#[case::recipe(
|
||||
"test-files/recipes/recipe-fail.yml",
|
||||
"test-files/schema/recipe-v1.json",
|
||||
6
|
||||
)]
|
||||
#[case::stage(
|
||||
"test-files/recipes/stage-fail.yml",
|
||||
"test-files/schema/stage-v1.json",
|
||||
2
|
||||
)]
|
||||
#[case::stage_list(
|
||||
"test-files/recipes/stage-list-fail.yml",
|
||||
"test-files/schema/stage-list-v1.json",
|
||||
2
|
||||
)]
|
||||
#[case::module_list(
|
||||
"test-files/recipes/module-list-fail.yml",
|
||||
"test-files/schema/module-list-v1.json",
|
||||
35
|
||||
)]
|
||||
#[case::akmods(
|
||||
"test-files/recipes/modules/akmods-fail.yml",
|
||||
"test-files/schema/modules/akmods-v1.json",
|
||||
1
|
||||
)]
|
||||
#[case::bling(
|
||||
"test-files/recipes/modules/bling-fail.yml",
|
||||
"test-files/schema/modules/bling-v1.json",
|
||||
1
|
||||
)]
|
||||
#[case::brew(
|
||||
"test-files/recipes/modules/brew-fail.yml",
|
||||
"test-files/schema/modules/brew-v1.json",
|
||||
3
|
||||
)]
|
||||
#[case::chezmoi(
|
||||
"test-files/recipes/modules/chezmoi-fail.yml",
|
||||
"test-files/schema/modules/chezmoi-v1.json",
|
||||
3
|
||||
)]
|
||||
#[case::containerfile(
|
||||
"test-files/recipes/modules/containerfile-fail.yml",
|
||||
"test-files/schema/modules/containerfile-v1.json",
|
||||
2
|
||||
)]
|
||||
#[case::copy(
|
||||
"test-files/recipes/modules/copy-fail.yml",
|
||||
"test-files/schema/modules/copy-v1.json",
|
||||
2
|
||||
)]
|
||||
#[case::default_flatpaks(
|
||||
"test-files/recipes/modules/default-flatpaks-fail.yml",
|
||||
"test-files/schema/modules/default-flatpaks-v1.json",
|
||||
4
|
||||
)]
|
||||
#[case::files(
|
||||
"test-files/recipes/modules/files-fail.yml",
|
||||
"test-files/schema/modules/files-v1.json",
|
||||
1
|
||||
)]
|
||||
#[case::fonts(
|
||||
"test-files/recipes/modules/fonts-fail.yml",
|
||||
"test-files/schema/modules/fonts-v1.json",
|
||||
2
|
||||
)]
|
||||
#[case::gnome_extensions(
|
||||
"test-files/recipes/modules/gnome-extensions-fail.yml",
|
||||
"test-files/schema/modules/gnome-extensions-v1.json",
|
||||
2
|
||||
)]
|
||||
#[case::gschema_overrides(
|
||||
"test-files/recipes/modules/gschema-overrides-fail.yml",
|
||||
"test-files/schema/modules/gschema-overrides-v1.json",
|
||||
1
|
||||
)]
|
||||
#[case::justfiles(
|
||||
"test-files/recipes/modules/justfiles-fail.yml",
|
||||
"test-files/schema/modules/justfiles-v1.json",
|
||||
2
|
||||
)]
|
||||
#[case::rpm_ostree(
|
||||
"test-files/recipes/modules/rpm-ostree-fail.yml",
|
||||
"test-files/schema/modules/rpm-ostree-v1.json",
|
||||
3
|
||||
)]
|
||||
#[case::script(
|
||||
"test-files/recipes/modules/script-fail.yml",
|
||||
"test-files/schema/modules/script-v1.json",
|
||||
2
|
||||
)]
|
||||
#[case::signing(
|
||||
"test-files/recipes/modules/signing-fail.yml",
|
||||
"test-files/schema/modules/signing-v1.json",
|
||||
1
|
||||
)]
|
||||
#[case::systemd(
|
||||
"test-files/recipes/modules/systemd-fail.yml",
|
||||
"test-files/schema/modules/systemd-v1.json",
|
||||
4
|
||||
)]
|
||||
#[case::yafti(
|
||||
"test-files/recipes/modules/yafti-fail.yml",
|
||||
"test-files/schema/modules/yafti-v1.json",
|
||||
1
|
||||
)]
|
||||
fn fail_validation(#[case] file: &str, #[case] schema: &'static str, #[case] err_count: usize) {
|
||||
let validator = ASYNC_RUNTIME
|
||||
.block_on(SchemaValidator::builder().url(schema).build())
|
||||
.unwrap();
|
||||
|
||||
let file_contents = Arc::new(std::fs::read_to_string(file).unwrap());
|
||||
|
||||
let result = validator.process_validation(file, file_contents);
|
||||
dbg!(&result);
|
||||
|
||||
assert!(result.is_err());
|
||||
|
||||
let SchemaValidateError::YamlValidate {
|
||||
src: _,
|
||||
labels,
|
||||
help: _,
|
||||
} = result.unwrap_err()
|
||||
else {
|
||||
panic!("Wrong error");
|
||||
};
|
||||
|
||||
assert_eq!(labels.len(), err_count);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
55
src/commands/validate/schema_validator/error.rs
Normal file
55
src/commands/validate/schema_validator/error.rs
Normal file
|
|
@ -0,0 +1,55 @@
|
|||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use colored::Colorize;
|
||||
use miette::{Diagnostic, LabeledSpan, NamedSource};
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::commands::validate::yaml_span::YamlSpanError;
|
||||
|
||||
#[derive(Error, Diagnostic, Debug)]
|
||||
pub enum SchemaValidateBuilderError {
|
||||
#[error(transparent)]
|
||||
#[cfg(not(test))]
|
||||
#[diagnostic()]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
SerdeJson(#[from] serde_json::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
#[cfg(test)]
|
||||
#[diagnostic()]
|
||||
Fs(#[from] std::io::Error),
|
||||
|
||||
#[error(transparent)]
|
||||
#[diagnostic()]
|
||||
JsonSchemaBuild(#[from] jsonschema::ValidationError<'static>),
|
||||
}
|
||||
|
||||
#[derive(Error, Diagnostic, Debug)]
|
||||
pub enum SchemaValidateError {
|
||||
#[error("Failed to deserialize file {}", .1.display().to_string().bold().italic())]
|
||||
#[diagnostic()]
|
||||
SerdeYaml(serde_yaml::Error, PathBuf),
|
||||
|
||||
#[error(
|
||||
"{} error{} encountered",
|
||||
.labels.len().to_string().red(),
|
||||
if .labels.len() == 1 { "" } else { "s" }
|
||||
)]
|
||||
#[diagnostic()]
|
||||
YamlValidate {
|
||||
#[source_code]
|
||||
src: NamedSource<Arc<String>>,
|
||||
|
||||
#[label(collection)]
|
||||
labels: Vec<LabeledSpan>,
|
||||
|
||||
#[help]
|
||||
help: String,
|
||||
},
|
||||
|
||||
#[error(transparent)]
|
||||
#[diagnostic(transparent)]
|
||||
YamlSpan(#[from] YamlSpanError),
|
||||
}
|
||||
|
|
@ -2,7 +2,7 @@ use std::sync::Arc;
|
|||
|
||||
use bon::bon;
|
||||
use jsonschema::paths::LocationSegment;
|
||||
use miette::{bail, Context, IntoDiagnostic, Result, SourceSpan};
|
||||
use miette::SourceSpan;
|
||||
use yaml_rust2::{
|
||||
parser::{MarkedEventReceiver, Parser},
|
||||
scanner::Marker,
|
||||
|
|
@ -10,12 +10,18 @@ use yaml_rust2::{
|
|||
};
|
||||
|
||||
#[cfg(not(test))]
|
||||
use log::trace;
|
||||
use log::{debug, trace};
|
||||
#[cfg(test)]
|
||||
use std::eprintln as trace;
|
||||
#[cfg(test)]
|
||||
use std::eprintln as debug;
|
||||
|
||||
use super::location::Location;
|
||||
|
||||
mod error;
|
||||
|
||||
pub use error::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct YamlSpan {
|
||||
file: Arc<String>,
|
||||
|
|
@ -25,7 +31,7 @@ pub struct YamlSpan {
|
|||
#[bon]
|
||||
impl YamlSpan {
|
||||
#[builder]
|
||||
pub fn new(file: Arc<String>) -> Result<Self> {
|
||||
pub fn new(file: Arc<String>) -> Result<Self, YamlSpanError> {
|
||||
let mut ys = Self {
|
||||
file,
|
||||
event_markers: Vec::default(),
|
||||
|
|
@ -34,14 +40,12 @@ impl YamlSpan {
|
|||
let file = ys.file.clone();
|
||||
let mut parser = Parser::new_from_str(&file);
|
||||
|
||||
parser
|
||||
.load(&mut ys, false)
|
||||
.into_diagnostic()
|
||||
.context("Failed to parse file")?;
|
||||
parser.load(&mut ys, false)?;
|
||||
Ok(ys)
|
||||
}
|
||||
|
||||
pub fn get_span(&self, path: &Location) -> Result<SourceSpan> {
|
||||
pub fn get_span(&self, path: &Location) -> Result<SourceSpan, YamlSpanError> {
|
||||
debug!("Searching {path}");
|
||||
let mut event_iter = self.event_markers.iter();
|
||||
let mut path_iter = path.into_iter();
|
||||
|
||||
|
|
@ -79,7 +83,7 @@ where
|
|||
Self { events, path }
|
||||
}
|
||||
|
||||
pub fn get_span(&mut self) -> Result<SourceSpan> {
|
||||
pub fn get_span(&mut self) -> Result<SourceSpan, YamlSpanError> {
|
||||
let mut stream_start = false;
|
||||
let mut document_start = false;
|
||||
|
||||
|
|
@ -108,12 +112,12 @@ where
|
|||
Event::MappingStart(_, _) if stream_start && document_start => {
|
||||
break self.key(key)?.into();
|
||||
}
|
||||
event => bail!("Failed to read event: {event:?}"),
|
||||
event => return Err(YamlSpanError::UnexpectedEvent(event.to_owned())),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn key(&mut self, expected_key: LocationSegment<'_>) -> Result<(usize, usize)> {
|
||||
fn key(&mut self, expected_key: LocationSegment<'_>) -> Result<(usize, usize), YamlSpanError> {
|
||||
trace!("Looking for location {expected_key:?}");
|
||||
|
||||
loop {
|
||||
|
|
@ -131,10 +135,20 @@ where
|
|||
if key != expected_key =>
|
||||
{
|
||||
trace!("Non-matching key '{key}'");
|
||||
continue;
|
||||
let (event, marker) = self.events.next().unwrap();
|
||||
|
||||
match event {
|
||||
Event::Scalar(_, _, _, _) => continue,
|
||||
Event::MappingStart(_, _) => self.skip_mapping(marker.index()),
|
||||
Event::SequenceStart(_, _) => self.skip_sequence(marker.index()),
|
||||
_ => unreachable!("{event:?}"),
|
||||
};
|
||||
}
|
||||
(Event::Scalar(key, _, _, _), LocationSegment::Index(index)) => {
|
||||
bail!("Encountered key {key} when looking for index {index}")
|
||||
return Err(YamlSpanError::ExpectIndexFoundKey {
|
||||
key: key.to_owned(),
|
||||
index,
|
||||
})
|
||||
}
|
||||
(Event::SequenceStart(_, _), LocationSegment::Index(index)) => {
|
||||
break self.sequence(index, 0);
|
||||
|
|
@ -146,7 +160,7 @@ where
|
|||
self.skip_mapping(marker.index());
|
||||
}
|
||||
(Event::MappingEnd, _) => {
|
||||
bail!("Reached end of map an haven't found key {expected_key}")
|
||||
return Err(YamlSpanError::EndOfMapNoKey(expected_key.to_string()))
|
||||
}
|
||||
event => unreachable!("{event:?}"),
|
||||
}
|
||||
|
|
@ -193,13 +207,17 @@ where
|
|||
}
|
||||
}
|
||||
|
||||
fn sequence(&mut self, index: usize, curr_index: usize) -> Result<(usize, usize)> {
|
||||
fn sequence(
|
||||
&mut self,
|
||||
index: usize,
|
||||
curr_index: usize,
|
||||
) -> Result<(usize, usize), YamlSpanError> {
|
||||
let (event, marker) = self.events.next().expect("Need events");
|
||||
trace!("{event:?} {marker:?}");
|
||||
trace!("index: {index}, curr_index: {curr_index}");
|
||||
|
||||
Ok(match event {
|
||||
Event::SequenceEnd => bail!("Reached end of sequence before reaching index {index}"),
|
||||
Event::SequenceEnd => return Err(YamlSpanError::EndOfSequenceNoIndex(index)),
|
||||
Event::Scalar(_, _, _, _) if index > curr_index => {
|
||||
self.sequence(index, curr_index + 1)?
|
||||
}
|
||||
|
|
@ -236,15 +254,19 @@ where
|
|||
})
|
||||
}
|
||||
|
||||
fn value(&mut self) -> Result<(usize, usize)> {
|
||||
fn value(&mut self) -> Result<(usize, usize), YamlSpanError> {
|
||||
let (event, marker) = self.events.next().unwrap();
|
||||
trace!("{event:?} {marker:?}");
|
||||
let key = self.path.next();
|
||||
trace!("{key:?}");
|
||||
|
||||
Ok(match (event, key) {
|
||||
(Event::Scalar(value, _, _, _), None) => (marker.index(), value.len()),
|
||||
(Event::Scalar(value, _, _, _), Some(segment)) => {
|
||||
bail!("Encountered scalar value {value} when looking for {segment}")
|
||||
return Err(YamlSpanError::UnexpectedScalar {
|
||||
value: value.to_owned(),
|
||||
segment: segment.to_string(),
|
||||
})
|
||||
}
|
||||
(Event::MappingStart(_, _), Some(LocationSegment::Property(key))) => {
|
||||
self.key(LocationSegment::Property(key))?
|
||||
|
|
|
|||
30
src/commands/validate/yaml_span/error.rs
Normal file
30
src/commands/validate/yaml_span/error.rs
Normal file
|
|
@ -0,0 +1,30 @@
|
|||
use miette::Diagnostic;
|
||||
use thiserror::Error;
|
||||
use yaml_rust2::{Event, ScanError};
|
||||
|
||||
#[derive(Error, Diagnostic, Debug)]
|
||||
pub enum YamlSpanError {
|
||||
#[error("Failed to parse file: {0}")]
|
||||
#[diagnostic()]
|
||||
ScanError(#[from] ScanError),
|
||||
|
||||
#[error("Failed to read event: {0:?}")]
|
||||
#[diagnostic()]
|
||||
UnexpectedEvent(Event),
|
||||
|
||||
#[error("Encountered key {key} when looking for index {index}")]
|
||||
#[diagnostic()]
|
||||
ExpectIndexFoundKey { key: String, index: usize },
|
||||
|
||||
#[error("Reached end of map an haven't found key {0}")]
|
||||
#[diagnostic()]
|
||||
EndOfMapNoKey(String),
|
||||
|
||||
#[error("Reached end of sequence before reaching index {0}")]
|
||||
#[diagnostic()]
|
||||
EndOfSequenceNoIndex(usize),
|
||||
|
||||
#[error("Encountered scalar value {value} when looking for {segment}")]
|
||||
#[diagnostic()]
|
||||
UnexpectedScalar { value: String, segment: String },
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue