DTAFS7PQQ6X2REJYR6EDKEM7TOKGIAX2YHIRRU354AOAFR2B3VIQC A2SYW5RCLJO6WXUGQ7DLQA76DENJE2I5TLTUG6MXEI27ADPG2FVAC FGD37V6XCUCI6IJGJP4SU4EFNEE7F3GQVBYJNK4XT2AYY34OCUTQC 6WWZUDNYHVYEQJHHJCUBMOZME2LG4SATAXSADBECKPBUGR3QLYTAC 4MK5VE34SUDTEN2C2VVFBEONHGFY3P3EDTEK2WPU7RH5IWDAT3DQC FWDHYYDCRS27NN2LVAP3VBCAFJAQ32X6WF4KRRD5ZBXAKZPX6TFQC SCCMQCSX6BA6DQKARGW25PVWQPU3MZNZS4RNZ53FXYFCKMI24RGQC //// Return any karbon.{yml, yaml} file if a directory// is provided. Else return the path as is.// /!\ Success doesn't mean that the file is valid /!\//fn find_resource_file(path: &PathBuf) -> PathBuf {let mut path = path.canonicalize().expect("Failed to canonicalize the path provided.");// If path is a directory try to find a path/karbon.yml file.if path.is_dir() {path.push("karbon.yml");}
if p.is_file() {let url = Url::from_file_path(p).expect("Unable to convert {p} to a URL");
// If path/karbon.yml is not a file, try .yaml extension.if !path.is_file() {path.set_extension("yaml");}
return vec!(url);} else if p.is_dir() {let entries = fs::read_dir(p.to_owned()).expect("Unable to list the content of {p}");let mut result = Vec::new();
// At that point give up and panic.if !path.is_file() {panic!("Not a valid file path or no karbonfile found.");}
for e in entries {let e_path = e.unwrap().path().to_path_buf();let extension = e_path.extension().and_then(OsStr::to_str).unwrap_or("");let stem = e_path.file_stem().and_then(OsStr::to_str).unwrap_or("");
fn get_resource_type(path: &PathBuf) -> ResourceType {let file = fs::File::open(path).expect("Failed to open the file.");
// If a Karbonfile is found in the directory ignore everything else// and return it for further processing.if stem == "karbonfile" {return vec!(f_url);}
fn is_karbonfile(path: &PathBuf) -> bool {match get_resource_type(path).kind.as_str() {"Karbonfile" => return true,_ => return false,};}
return result;} else {panic!("{p:#?} is neither a file nor a directory");}}
fn merge_relative_path(first: &PathBuf, second: &PathBuf) -> PathBuf {match (first.is_file(), first.parent(), second.is_relative()) {// First is a file path and as a parent directory.// Second is a relative path.// Append Second to First and canonicalize the result.(true, Some(path), true) => {return path.join(second).canonicalize().expect("Failed to canonitize the merged paths");}// First is a file and it's doesn't have a parent (Not even "/").// This shouldn't happen.(true, None, _) => {panic!("Failed to merge paths. The first path doesn't have any parent");}// First is a directory and second is relative.(false, _, true) => {return first.join(second).canonicalize().expect("Failed to canonitize the merged paths");}// Second is not a relative path.// In this case just return the Second path.(_, _, false) => {return second.clone();}}
fn to_url(path: &PathBuf) -> Url {let canonical = path.canonicalize().expect("Failed to canonicalize {path}");Url::from_file_path(canonical).expect("Failed to convert {canonical} into a URL")
if is_karbonfile(&canonical) {let karbonfile = parse_karbonfile(&canonical);let resources: Vec<Resource> = karbonfile.resources.iter().map(|(k, _)| k.clone()).collect();
let maybeKarbonfile = serde_yaml::from_value::<Karbonfile>(doc);println!("{:#?}", maybeKarbonfile);if let Ok(karbonfile) = maybeKarbonfile {let resources = get_resources(&karbonfile);
// leaf since it's a karbonfile but there// is no resources in it.// Set the path and push the branch to resulttset.set_path(canonical.clone());result.push(tset);
println!("Doesn't have resources");leaf_is_karbonfile_without_resources(result, &variant, &path);
// Leaf since it's not a karbonfile.// Set the path and push the branch to resulttset.set_original(parse_resource(&canonical));tset.set_path(canonical.clone());tset.apply_transformations();result.push(tset);
leaf_is_not_karbonfile(result, &variant, &path);
v.original = Some(parse(path));v.url = Some(to_url(path));v.apply_transformations();result.push(v);}fn leaf_is_karbonfile_without_resources(result: &mut VariantVec, variant: &Variant, path: &PathBuf) {// leaf since it's a karbonfile but there// is no resources in it.// Set the path and push the branch to resultlet mut v = variant.to_owned();v.url = Some(to_url(path));result.push(v);}
#[derive(Deserialize, Clone, Debug)]#[serde(rename_all = "lowercase")]#[serde(deny_unknown_fields)]pub enum Resource {Files(PathBuf),Karbonfile(PathBuf),HttpFile(Url),HttpKarbonfile(Url),}impl Resource {pub fn path(&self) -> PathBuf {use Resource::*;match self {Files(p) => { p.clone() }Karbonfile(p) => { p.clone() }}}}impl PartialEq for Resource {fn eq(&self, other: &Self) -> bool {self.path() == other.path()}}
pub type Resource = Url;
#[serde(rename_all = "camelCase")]pub struct ResourceType {pub api_version: String,pub kind: String,
#[serde(deny_unknown_fields)]pub struct Transformation {filter: Option<Filter>,add: Option<TransAdd>,replace: Option<TransReplace>,remove: Option<TransRemove>,prefix: Option<TransPrefix>,suffix: Option<TransSuffix>,
pub transformed: Option<Value>,pub transformations: OrderedMap,
// Resulting variant after all transformationspub result: Option<Value>,// Chain of transformations to apply with the Url of// the Karbonfile they originate from.pub transformations: Option<Vec<(Url, Vec<Transformation>)>>,
pub fn set_path(&mut self, p: PathBuf) {self.path = Some(p);}pub fn set_original(&mut self, r: Value) {self.original = Some(r);}fn set_transformed(&mut self, r: Value) {self.transformed = Some(r);}pub fn add_transformation(&mut self, p: PathBuf, t: Vec<Transformation>) {self.transformations.push(p, t);}
for k in self.transformations.keys() {if let Some(ts) = self.transformations.get(&k) {for t in ts {if let Some(f) = t.filter {if !is_filter_match(&result, &f) {// if the filter is provided but doesn't match// stop processing this transformation and// move to the next one.break;}}// This will run unless the filter is provided and doesn't match// labels: Option<HashMap<String, String>>,// add: Option<serde_yaml::Mapping>,// replace: Option<serde_yaml::Mapping>,// prefix: Option<serde_yaml::Mapping>,// suffix: Option<serde_yaml::Mapping>,if let Some(labels) = t.labels {//for path in label_fields {// for (k, v) in labels.iter() {// *pointer_mut(&mut result, &format!("{path}/{k}")).unwrap() = Value::String(v.into());// }//}}if let Some(add) = t.add {for (ptr, v) in add.iter() {let mut p = ptr.as_str().unwrap();if p.ends_with('-') {// When the pointer ends with '-' append the value to the end// of the sequence. If the sequence doesn't exist, create it.p = p.trim_end_matches("/-");let updated = json_pointer::insert(&mut result, p).unwrap();if let Some(s) = updated.as_sequence_mut() {s.push(v.to_owned());} else {let mut value = Sequence::new();value.push(v.to_owned());*updated = serde_yaml::Value::Sequence(value);}} else {let updated = json_pointer::insert(&mut result, p).unwrap();*updated = v.to_owned();}}}if let Some(replace) = t.replace {for (ptr, v) in replace.iter() {let updated = json_pointer::update(&mut result, ptr.as_str().unwrap()).unwrap();*updated = v.to_owned();}}if let Some(prefix) = t.prefix {for (ptr, value) in prefix.iter() {let p: &str = ptr.as_str().unwrap();let v: &str = value.as_str().unwrap();if let Some(Value::String(current)) = json_pointer::get(&result, &p) {let new = format!("{v}{current}");*json_pointer::update(&mut result, &p).unwrap() = Value::String(new.into());}}}if let Some(suffix) = t.suffix {for (ptr, value) in suffix.iter() {let p: &str = ptr.as_str().unwrap();let v: &str = value.as_str().unwrap();if let Some(Value::String(current)) = json_pointer::get(&result, &p) {let new = format!("{current}{v}");*json_pointer::update(&mut result, &p).unwrap() = Value::String(new.into());}}}
self.transformations.iter().flatten()// Ignore the Karbonfile path..map(|(_, ts)| ts)// Flatten all the transformations of all the// Karbonfiles into a single Vec..flatten().for_each(|t| {if matches_filter(&result, t.filter.as_ref()) {t_add(&mut result, t.add.as_ref());t_replace(&mut result, t.replace.as_ref());t_prefix(&mut result, t.prefix.as_ref());t_suffix(&mut result, t.suffix.as_ref());
fn is_filter_match(doc: &Value, filter: &Mapping) -> bool {for (field, value) in filter.iter() {if let Value::String(f) = field {if json_pointer::get(doc, f).unwrap() != value {
fn matches_filter(doc: &Value, filter: Option<&Filter>) -> bool {if let Some(f) = filter {// Check each field from the filter and return// false as soon as one doesn't matchfor (field, value) in f.iter() {if json_pointer::get(doc, field) != Some(value) {
}#[derive(Clone, Debug)]pub struct OrderedMap {index: Vec<PathBuf>,map: HashMap<PathBuf, Vec<Transformation>>,}impl OrderedMap {pub fn new() -> OrderedMap {OrderedMap {index: Vec::new(),map: HashMap::new(),}}pub fn push(&mut self, k: PathBuf, v: Vec<Transformation>) {self.map.entry(k.clone()).or_insert(v);self.index.push(k)}fn to_vec(&self) -> Vec<(PathBuf, Vec<Transformation>)> {self.index.iter().map( |k| self.map.get_key_value(k).unwrap() ).map( |(k, v)| (k.to_owned(), v.to_owned()) ).collect()}fn keys(&self) -> Vec<PathBuf> {self.index.clone()}fn get(&self, k: &PathBuf) -> Option<Vec<Transformation>> {self.map.get(k).map(|v| v.to_owned())}
#[derive(Deserialize, Clone, Debug)]#[serde(deny_unknown_fields)]pub struct Transformation {#[serde(default)]filter: Option<Mapping>,#[serde(default)]labels: Option<HashMap<String, String>>,#[serde(default)]add: Option<Mapping>,#[serde(default)]replace: Option<Mapping>,#[serde(default)]remove: Option<Mapping>,#[serde(default)]prefix: Option<Mapping>,#[serde(default)]suffix: Option<Mapping>,}const label_fields: [&'static str; 30] = ["/metadata/labels","/spec/selector","/spec/template/metadata/labels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/template/spec/affinity/podAffinity/preferredDuringSchedulingIgnoredDuringExecution/podAffinityTerm/labelSelector/matchLabels","/spec/template/spec/affinity/podAffinity/requiredDuringSchedulingIgnoredDuringExecution/labelSelector/matchLabels","/spec/template/spec/affinity/podAntiAffinity/preferredDuringSchedulingIgnoredDuringExecution/podAffinityTerm/labelSelector/matchLabels","/spec/template/spec/affinity/podAntiAffinity/requiredDuringSchedulingIgnoredDuringExecution/labelSelector/matchLabels","/spec/template/spec/topologySpreadConstraints/labelSelector/matchLabels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/template/spec/affinity/podAffinity/preferredDuringSchedulingIgnoredDuringExecution/podAffinityTerm/labelSelector/matchLabels","/spec/template/spec/affinity/podAffinity/requiredDuringSchedulingIgnoredDuringExecution/labelSelector/matchLabels","/spec/template/spec/affinity/podAntiAffinity/preferredDuringSchedulingIgnoredDuringExecution/podAffinityTerm/labelSelector/matchLabels","/spec/template/spec/affinity/podAntiAffinity/requiredDuringSchedulingIgnoredDuringExecution/labelSelector/matchLabels","/spec/template/spec/topologySpreadConstraints/labelSelector/matchLabels",//"/spec/volumeClaimTemplates[]/metadata/labels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/jobTemplate/spec/selector/matchLabels","/spec/jobTemplate/metadata/labels","/spec/jobTemplate/spec/template/metadata/labels","/spec/selector/matchLabels","/spec/podSelector/matchLabels","/spec/ingress/from/podSelector/matchLabels","/spec/egress/to/podSelector/matchLabels",];
fn t_add(result: &mut Value, t: Option<&TransAdd>) {}fn t_replace(result: &mut Value, t: Option<&TransReplace>) {}fn t_prefix(result: &mut Value, t: Option<&TransPrefix>) {}fn t_suffix(result: &mut Value, t: Option<&TransSuffix>) {}
use std::path::{Path, PathBuf};use std::collections::HashMap;use crate::json_pointer;use url::Url;use yaml_rust::Yaml;use yaml_rust::yaml::Hash;use linked_hash_map::LinkedHashMap;pub struct Karbonfile {api_version: String,kind: String,resources: LinkedHashMap<Yaml, Yaml>,generators: LinkedHashMap<Yaml, Yaml>,transformations: Vec<Transformation>,}impl Karbonfile {pub fn from_yaml(doc: Yaml) -> Karbonfile {let r = optional(&doc, "resources", Yaml::as_hash);let t = optional(&doc, "transformations", Yaml::as_vec).into_iter().map(|x| Transformation::from_yaml(x)).collect();Karbonfile {api_version: mandatory(&doc, "apiVersion"),kind: mandatory(&doc, "kind"),resources: r,generators: LinkedHashMap::new(),transformations: t,}}}fn mandatory(doc: &Yaml, field: &str) -> String {doc[field].as_str().expect("The field '{field}' is missing or its value is not valid").to_string()}pub struct Variant {// Original resource file path.pub path: Option<PathBuf>,// Parsed content of the original resource file.pub original: Option<Yaml>,// Resulting variant after all transformationspub transformed: Option<Yaml>,// Chain of transformations to apply with the path of// the Karbonfile they originate from.pub transformations: Option<Vec<(PathBuf, Vec<Transformation>)>>,}impl Variant {pub fn new() -> Variant {Variant {path: None,original: None,transformed: None,transformations: None,}}pub fn set_path(&mut self, p: PathBuf) {self.path = Some(p);}pub fn set_original(&mut self, r: Yaml) {self.original = Some(r);}pub fn push_transformation(&mut self, p: PathBuf, t: Vec<Transformation>) {match self.transformations {None => self.transformations = Some(vec!((p, t))),Some(mut ts) => ts.push((p, t)),}}pub fn apply_transformations(&mut self) {if let Some(mut result) = self.original.to_owned() {self.transformations.iter().flatten()// Ignore the Karbonfile path..map(|(_, ts)| ts)// Flatten all the transformations of all the// Karbonfiles into a single Vec..flatten()// Drop Transformation which don't apply to// this resource..filter(|t| matches_filter(&result, t.filter))// Apply each type of transformation..for_each(|t| {t_add(&result, t.add);t_replace(&result, t.replace);t_prefix(&result, t.prefix);t_suffix(&result, t.suffix);});self.transformed = Some(result);}}}type Filter = LinkedHashMap<String, Yaml>;fn matches_filter(doc: &Yaml, filter: Option<Filter>) -> bool {if let Some(f) = filter {// Check each field from the filter and return// false as soon as one doesn't matchfor (field, value) in f.iter() {if json_pointer::get(doc, field) != Some(value) {return false;}}}// If all fields match or if no fields were provided// keep this transformation in the list to apply.true}fn t_add(doc: &Yaml, add: Option<TransAdd>) {}fn t_replace(doc: &Yaml, add: Option<TransReplace>) {}fn t_prefix(doc: &Yaml, add: Option<TransPrefix>) {}fn t_suffix(doc: &Yaml, add: Option<TransSuffix>) {}type TransAdd = LinkedHashMap<String, Yaml>;type TransReplace = LinkedHashMap<String, Yaml>;type TransRemove = LinkedHashMap<String, ()>;type TransPrefix = LinkedHashMap<String, String>;type TransSuffix = LinkedHashMap<String, String>;pub struct Transformation {filter: Option<Filter>,add: Option<TransAdd>,replace: Option<TransReplace>,remove: Option<TransRemove>,prefix: Option<TransPrefix>,suffix: Option<TransSuffix>,}impl Transformation {fn from_yaml(doc: Yaml) -> Transformation {Transformation {filter: optional(&doc, "filter", Yaml::as_hash),add: optional(&doc, "add", Yaml::as_hash),replace: optional(&doc, "replace", Yaml::as_hash),remove: optional(&doc, "remove", Yaml::as_hash),prefix: optional(&doc, "prefix", Yaml::as_hash),suffix: optional(&doc, "suffix", Yaml::as_hash),}}}fn optional<T, F>(doc: &Yaml, field: &str, func: F) -> Option<T>where F: FnOnce(&Yaml) -> Option<&T>{doc[field].func().map(|x| x.to_owned())}const label_fields: [&'static str; 30] = ["/metadata/labels","/spec/selector","/spec/template/metadata/labels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/template/spec/affinity/podAffinity/preferredDuringSchedulingIgnoredDuringExecution/podAffinityTerm/labelSelector/matchLabels","/spec/template/spec/affinity/podAffinity/requiredDuringSchedulingIgnoredDuringExecution/labelSelector/matchLabels","/spec/template/spec/affinity/podAntiAffinity/preferredDuringSchedulingIgnoredDuringExecution/podAffinityTerm/labelSelector/matchLabels","/spec/template/spec/affinity/podAntiAffinity/requiredDuringSchedulingIgnoredDuringExecution/labelSelector/matchLabels","/spec/template/spec/topologySpreadConstraints/labelSelector/matchLabels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/template/spec/affinity/podAffinity/preferredDuringSchedulingIgnoredDuringExecution/podAffinityTerm/labelSelector/matchLabels","/spec/template/spec/affinity/podAffinity/requiredDuringSchedulingIgnoredDuringExecution/labelSelector/matchLabels","/spec/template/spec/affinity/podAntiAffinity/preferredDuringSchedulingIgnoredDuringExecution/podAffinityTerm/labelSelector/matchLabels","/spec/template/spec/affinity/podAntiAffinity/requiredDuringSchedulingIgnoredDuringExecution/labelSelector/matchLabels","/spec/template/spec/topologySpreadConstraints/labelSelector/matchLabels",//"/spec/volumeClaimTemplates[]/metadata/labels","/spec/selector/matchLabels","/spec/template/metadata/labels","/spec/jobTemplate/spec/selector/matchLabels","/spec/jobTemplate/metadata/labels","/spec/jobTemplate/spec/template/metadata/labels","/spec/selector/matchLabels","/spec/podSelector/matchLabels","/spec/ingress/from/podSelector/matchLabels","/spec/egress/to/podSelector/matchLabels",];
pub fn insert<'a>(doc: &'a mut Value, pointer: &'a str) -> Option<&'a mut Value> {// Taken from serde JSONif pointer.is_empty() {return Some(doc);}if !pointer.starts_with('/') {return None;}pointer.split('/').skip(1).map(|x| x.replace("~1", "/").replace("~0", "~")).try_fold(doc, |target, token| {match target {Value::Mapping(map) => {let t: Value = serde_yaml::from_str(&token).unwrap();if !map.contains_key(&t) {let next = Mapping::new();map.insert(t.to_owned(), Value::Mapping(next));}map.get_mut(&t)}Value::Sequence(seq) => {let t = token.parse::<usize>().unwrap();seq.get_mut(t)}_ => None,}})}pub fn update<'a>(doc: &'a mut Value, pointer: &'a str) -> Option<&'a mut Value> {// Taken from serde JSONif pointer.is_empty() {return Some(doc);}if !pointer.starts_with('/') {return None;}pointer.split('/').skip(1).map(|x| x.replace("~1", "/").replace("~0", "~")).try_fold(doc, |target, token| {match target {Value::Mapping(map) => {let t: Value = serde_yaml::from_str(&token).unwrap();map.get_mut(&t)}Value::Sequence(seq) => {let t: usize = token.parse().unwrap();seq.get_mut(t)}_ => None,}})}pub fn upsert<'a>(doc: &'a mut Value, pointer: &'a str) -> Option<&'a mut Value> {// Taken from serde JSONif pointer.is_empty() {return Some(doc);}if !pointer.starts_with('/') {return None;}pointer.split('/').skip(1).map(|x| x.replace("~1", "/").replace("~0", "~")).try_fold(doc, |target, token| {match target {Value::Mapping(map) => {let t: Value = serde_yaml::from_str(&token).unwrap();if !map.contains_key(&t) {let next = Mapping::new();map.insert(t.to_owned(), Value::Mapping(next));}map.get_mut(&t)}Value::Sequence(seq) => {let t: usize = token.parse().unwrap();seq.get_mut(t)}_ => None,}})}
//pub fn insert<'a>(doc: &'a mut Yaml, pointer: &'a str) -> Option<&'a mut Yaml> {// // Taken from serde JSON//// if pointer.is_empty() {// return Some(doc);// }// if !pointer.starts_with('/') {// return None;// }//// pointer// .split('/')// .skip(1)// .map(|x| x.replace("~1", "/").replace("~0", "~"))// .try_fold(doc, |target, token| {// match target {// Yaml::Hash(map) => {// let t: Yaml = Yaml::from_str(&token);//// if !map.contains_key(&t) {// let next = Yaml::Hash(HashMap::new());//// map.insert(t.to_owned(), Yaml::Hash(next));// }//// map.get_mut(&t)// }// Yaml::Sequence(seq) => {// let t = token.parse::<usize>().unwrap();//// seq.get_mut(t)// }// _ => None,// }// })//}//////pub fn update<'a>(doc: &'a mut Yaml, pointer: &'a str) -> Option<&'a mut Yaml> {// // Taken from serde JSON// if pointer.is_empty() {// return Some(doc);// }// if !pointer.starts_with('/') {// return None;// }// pointer// .split('/')// .skip(1)// .map(|x| x.replace("~1", "/").replace("~0", "~"))// .try_fold(doc, |target, token| {// match target {// Yaml::Hash(map) => {// let t: Yaml = Yaml::from_str(&token).unwrap();// map.get_mut(&t)// }// Yaml::Sequence(seq) => {// let t: usize = token.parse().unwrap();// seq.get_mut(t)// }// _ => None,// }// })//}//////pub fn upsert<'a>(doc: &'a mut Yaml, pointer: &'a str) -> Option<&'a mut Yaml> {// // Taken from serde JSON// if pointer.is_empty() {// return Some(doc);// }// if !pointer.starts_with('/') {// return None;// }// pointer// .split('/')// .skip(1)// .map(|x| x.replace("~1", "/").replace("~0", "~"))// .try_fold(doc, |target, token| {// match target {// Yaml::Hash(map) => {// let t: Yaml = Yaml::from_str(&token).unwrap();//// if !map.contains_key(&t) {// let next = Yaml::Hash();//// map.insert(t.to_owned(), Yaml::Hash(next));// }//// map.get_mut(&t)// }// Yaml::Sequence(seq) => {// let t: usize = token.parse().unwrap();// seq.get_mut(t)// }// _ => None,// }// })//}
# This template contains all of the possible sections and their default values# Note that all fields that take a lint level have these possible values:# * deny - An error will be produced and the check will fail# * warn - A warning will be produced, but the check will not fail# * allow - No warning or error will be produced, though in some cases a note# will be# The values provided in this template are the default values that will be used# when any section or field is not specified in your own configuration# If 1 or more target triples (and optionally, target_features) are specified,# only the specified targets will be checked when running `cargo deny check`.# This means, if a particular package is only ever used as a target specific# dependency, such as, for example, the `nix` crate only being used via the# `target_family = "unix"` configuration, that only having windows targets in# this list would mean the nix crate, as well as any of its exclusive# dependencies not shared by any other crates, would be ignored, as the target# list here is effectively saying which targets you are building for.targets = [# The triple can be any string, but only the target triples built in to# rustc (as of 1.40) can be checked against actual config expressions#{ triple = "x86_64-unknown-linux-musl" },# You can also specify which target_features you promise are enabled for a# particular target. target_features are currently not validated against# the actual valid features supported by the target architecture.#{ triple = "wasm32-unknown-unknown", features = ["atomics"] },]# This section is considered when running `cargo deny check advisories`# More documentation for the advisories section can be found here:# https://embarkstudios.github.io/cargo-deny/checks/advisories/cfg.html[advisories]# The path where the advisory database is cloned/fetched intodb-path = "~/.cargo/advisory-db"# The url(s) of the advisory databases to usedb-urls = ["https://github.com/rustsec/advisory-db"]# The lint level for security vulnerabilitiesvulnerability = "deny"# The lint level for unmaintained cratesunmaintained = "warn"# The lint level for crates that have been yanked from their source registryyanked = "warn"# The lint level for crates with security notices. Note that as of# 2019-12-17 there are no security notice advisories in# https://github.com/rustsec/advisory-dbnotice = "warn"# A list of advisory IDs to ignore. Note that ignored advisories will still# output a note when they are encountered.ignore = [#"RUSTSEC-0000-0000",]# Threshold for security vulnerabilities, any vulnerability with a CVSS score# lower than the range specified will be ignored. Note that ignored advisories# will still output a note when they are encountered.# * None - CVSS Score 0.0# * Low - CVSS Score 0.1 - 3.9# * Medium - CVSS Score 4.0 - 6.9# * High - CVSS Score 7.0 - 8.9# * Critical - CVSS Score 9.0 - 10.0#severity-threshold =# This section is considered when running `cargo deny check licenses`# More documentation for the licenses section can be found here:# https://embarkstudios.github.io/cargo-deny/checks/licenses/cfg.html[licenses]# The lint level for crates which do not have a detectable licenseunlicensed = "deny"# List of explicitly allowed licenses# See https://spdx.org/licenses/ for list of possible licenses# [possible values: any SPDX 3.11 short identifier (+ optional exception)].allow = [#"MIT",#"Apache-2.0",#"Apache-2.0 WITH LLVM-exception",]# List of explicitly disallowed licenses# See https://spdx.org/licenses/ for list of possible licenses# [possible values: any SPDX 3.11 short identifier (+ optional exception)].deny = [#"Nokia",]# Lint level for licenses considered copyleftcopyleft = "warn"# Blanket approval or denial for OSI-approved or FSF Free/Libre licenses# * both - The license will be approved if it is both OSI-approved *AND* FSF# * either - The license will be approved if it is either OSI-approved *OR* FSF# * osi-only - The license will be approved if is OSI-approved *AND NOT* FSF# * fsf-only - The license will be approved if is FSF *AND NOT* OSI-approved# * neither - This predicate is ignored and the default lint level is usedallow-osi-fsf-free = "neither"# Lint level used when no other predicates are matched# 1. License isn't in the allow or deny lists# 2. License isn't copyleft# 3. License isn't OSI/FSF, or allow-osi-fsf-free = "neither"default = "deny"# The confidence threshold for detecting a license from license text.# The higher the value, the more closely the license text must be to the# canonical license text of a valid SPDX license file.# [possible values: any between 0.0 and 1.0].confidence-threshold = 0.8# Allow 1 or more licenses on a per-crate basis, so that particular licenses# aren't accepted for every possible crate as with the normal allow listexceptions = [# Each entry is the crate and version constraint, and its specific allow# list#{ allow = ["Zlib"], name = "adler32", version = "*" },]# Some crates don't have (easily) machine readable licensing information,# adding a clarification entry for it allows you to manually specify the# licensing information#[[licenses.clarify]]# The name of the crate the clarification applies to#name = "ring"# The optional version constraint for the crate#version = "*"# The SPDX expression for the license requirements of the crate#expression = "MIT AND ISC AND OpenSSL"# One or more files in the crate's source used as the "source of truth" for# the license expression. If the contents match, the clarification will be used# when running the license check, otherwise the clarification will be ignored# and the crate will be checked normally, which may produce warnings or errors# depending on the rest of your configuration#license-files = [# Each entry is a crate relative path, and the (opaque) hash of its contents#{ path = "LICENSE", hash = 0xbd0eed23 }#][licenses.private]# If true, ignores workspace crates that aren't published, or are only# published to private registries.# To see how to mark a crate as unpublished (to the official registry),# visit https://doc.rust-lang.org/cargo/reference/manifest.html#the-publish-field.ignore = false# One or more private registries that you might publish crates to, if a crate# is only published to private registries, and ignore is true, the crate will# not have its license(s) checkedregistries = [#"https://sekretz.com/registry]# This section is considered when running `cargo deny check bans`.# More documentation about the 'bans' section can be found here:# https://embarkstudios.github.io/cargo-deny/checks/bans/cfg.html[bans]# Lint level for when multiple versions of the same crate are detectedmultiple-versions = "warn"# Lint level for when a crate version requirement is `*`wildcards = "allow"# The graph highlighting used when creating dotgraphs for crates# with multiple versions# * lowest-version - The path to the lowest versioned duplicate is highlighted# * simplest-path - The path to the version with the fewest edges is highlighted# * all - Both lowest-version and simplest-path are usedhighlight = "all"# List of crates that are allowed. Use with care!allow = [#{ name = "ansi_term", version = "=0.11.0" },]# List of crates to denydeny = [# Each entry the name of a crate and a version range. If version is# not specified, all versions will be matched.#{ name = "ansi_term", version = "=0.11.0" },## Wrapper crates can optionally be specified to allow the crate when it# is a direct dependency of the otherwise banned crate#{ name = "ansi_term", version = "=0.11.0", wrappers = [] },]# Certain crates/versions that will be skipped when doing duplicate detection.skip = [#{ name = "ansi_term", version = "=0.11.0" },]# Similarly to `skip` allows you to skip certain crates during duplicate# detection. Unlike skip, it also includes the entire tree of transitive# dependencies starting at the specified crate, up to a certain depth, which is# by default infiniteskip-tree = [#{ name = "ansi_term", version = "=0.11.0", depth = 20 },]# This section is considered when running `cargo deny check sources`.# More documentation about the 'sources' section can be found here:# https://embarkstudios.github.io/cargo-deny/checks/sources/cfg.html[sources]# Lint level for what to happen when a crate from a crate registry that is not# in the allow list is encounteredunknown-registry = "warn"# Lint level for what to happen when a crate from a git repository that is not# in the allow list is encounteredunknown-git = "warn"# List of URLs for allowed crate registries. Defaults to the crates.io index# if not specified. If it is specified but empty, no registries are allowed.allow-registry = ["https://github.com/rust-lang/crates.io-index"]# List of URLs for allowed Git repositoriesallow-git = [][sources.allow-org]# 1 or more github.com organizations to allow git sources forgithub = [""]# 1 or more gitlab.com organizations to allow git sources forgitlab = [""]# 1 or more bitbucket.org organizations to allow git sources forbitbucket = [""]
serde = { version = "1.0", features = ["derive"] }serde_yaml = "0.8"