mirror of
https://forge.katzen.cafe/katzen-cafe/iowo.git
synced 2024-11-22 05:34:41 +01:00
run cargo clippy --fix
This commit is contained in:
parent
8c52d3668e
commit
a896e66cca
|
@ -1,4 +1,4 @@
|
||||||
use std::cell::RefCell;
|
|
||||||
|
|
||||||
use codespan_reporting::{
|
use codespan_reporting::{
|
||||||
diagnostic::{Diagnostic, Label},
|
diagnostic::{Diagnostic, Label},
|
||||||
|
@ -49,35 +49,35 @@ impl Errors {
|
||||||
pub fn into_diag(
|
pub fn into_diag(
|
||||||
&self,
|
&self,
|
||||||
file_id: usize,
|
file_id: usize,
|
||||||
file_db: &SimpleFiles<&str, String>,
|
_file_db: &SimpleFiles<&str, String>,
|
||||||
) -> Diagnostic<usize> {
|
) -> Diagnostic<usize> {
|
||||||
let Errors { kind, locs } = self;
|
let Errors { kind, locs } = self;
|
||||||
|
|
||||||
match kind {
|
match kind {
|
||||||
ErrorKind::InvalidToken => simple_diag(locs.to_vec(), file_id, "invalid tokens"),
|
ErrorKind::InvalidToken => simple_diag(locs.clone(), file_id, "invalid tokens"),
|
||||||
ErrorKind::SyntaxError(syntax_error) => match syntax_error {
|
ErrorKind::SyntaxError(syntax_error) => match syntax_error {
|
||||||
SyntaxErrorKind::MissingStreamer => simple_diag(
|
SyntaxErrorKind::MissingStreamer => simple_diag(
|
||||||
locs.to_vec(),
|
locs.clone(),
|
||||||
file_id,
|
file_id,
|
||||||
"pipeline is missing an input provider",
|
"pipeline is missing an input provider",
|
||||||
),
|
),
|
||||||
SyntaxErrorKind::MissingSink => {
|
SyntaxErrorKind::MissingSink => {
|
||||||
simple_diag(locs.to_vec(), file_id, "pipeline is missing a sink")
|
simple_diag(locs.clone(), file_id, "pipeline is missing a sink")
|
||||||
}
|
}
|
||||||
SyntaxErrorKind::MissingFilter => {
|
SyntaxErrorKind::MissingFilter => {
|
||||||
simple_diag(locs.to_vec(), file_id, "missing filters in pipeline")
|
simple_diag(locs.clone(), file_id, "missing filters in pipeline")
|
||||||
}
|
}
|
||||||
SyntaxErrorKind::LiteralAsSink => {
|
SyntaxErrorKind::LiteralAsSink => {
|
||||||
simple_diag(locs.to_vec(), file_id, "pipelines can't end in a literal")
|
simple_diag(locs.clone(), file_id, "pipelines can't end in a literal")
|
||||||
}
|
}
|
||||||
SyntaxErrorKind::LiteralAsFilter => {
|
SyntaxErrorKind::LiteralAsFilter => {
|
||||||
simple_diag(locs.to_vec(), file_id, "literals can't filter data")
|
simple_diag(locs.clone(), file_id, "literals can't filter data")
|
||||||
}
|
}
|
||||||
SyntaxErrorKind::LiteralWithArgs => {
|
SyntaxErrorKind::LiteralWithArgs => {
|
||||||
simple_diag(locs.to_vec(), file_id, "literals can't take arguments")
|
simple_diag(locs.clone(), file_id, "literals can't take arguments")
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
ErrorKind::CommandNotFound => simple_diag(locs.to_vec(), file_id, "command not found"),
|
ErrorKind::CommandNotFound => simple_diag(locs.clone(), file_id, "command not found"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -94,8 +94,8 @@ impl GlobalNamespace {
|
||||||
} else {
|
} else {
|
||||||
self.commands.borrow_mut().push(InternalCommand {
|
self.commands.borrow_mut().push(InternalCommand {
|
||||||
name: name.to_owned(),
|
name: name.to_owned(),
|
||||||
input: input.map(|def| def.into()),
|
input: input.map(std::convert::Into::into),
|
||||||
output: output.map(|def| def.into()),
|
output: output.map(std::convert::Into::into),
|
||||||
});
|
});
|
||||||
let id = self.traits.borrow().len() - 1;
|
let id = self.traits.borrow().len() - 1;
|
||||||
let _ = self
|
let _ = self
|
||||||
|
|
|
@ -30,12 +30,12 @@ impl<'a> TypeDef<'a> {
|
||||||
),
|
),
|
||||||
},
|
},
|
||||||
InternalTypeDef::List(list) => TypeDef::List(
|
InternalTypeDef::List(list) => TypeDef::List(
|
||||||
list.into_iter()
|
list.iter()
|
||||||
.map(|def| Self::from_internal(ns, def))
|
.map(|def| Self::from_internal(ns, def))
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
InternalTypeDef::Record(rec) => TypeDef::Record(
|
InternalTypeDef::Record(rec) => TypeDef::Record(
|
||||||
rec.into_iter()
|
rec.iter()
|
||||||
.map(|(name, def)| (name.clone(), Self::from_internal(ns, def)))
|
.map(|(name, def)| (name.clone(), Self::from_internal(ns, def)))
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
|
@ -112,7 +112,7 @@ impl From<TypeDef<'_>> for InternalTypeDef {
|
||||||
match value {
|
match value {
|
||||||
TypeDef::Type(val) => Self::Single(TypeNamespaceId::Types(val.id)),
|
TypeDef::Type(val) => Self::Single(TypeNamespaceId::Types(val.id)),
|
||||||
TypeDef::Trait(val) => Self::Single(TypeNamespaceId::Traits(val.id)),
|
TypeDef::Trait(val) => Self::Single(TypeNamespaceId::Traits(val.id)),
|
||||||
TypeDef::List(list) => Self::List(list.into_iter().map(|def| def.into()).collect()),
|
TypeDef::List(list) => Self::List(list.into_iter().map(std::convert::Into::into).collect()),
|
||||||
TypeDef::Record(rec) => Self::Record(
|
TypeDef::Record(rec) => Self::Record(
|
||||||
rec.into_iter()
|
rec.into_iter()
|
||||||
.map(|(name, typ)| (name, typ.into()))
|
.map(|(name, typ)| (name, typ.into()))
|
||||||
|
|
|
@ -88,7 +88,7 @@ fn check_missing_filters(syntax: &[PipelineElement]) -> Result<(), Vec<logos::Sp
|
||||||
}),
|
}),
|
||||||
) = (syntax.get(i), syntax.get(i + 1))
|
) = (syntax.get(i), syntax.get(i + 1))
|
||||||
{
|
{
|
||||||
missing_filter_locs.push(span.start..span1.end)
|
missing_filter_locs.push(span.start..span1.end);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ fn test_check_missing_streamer() {
|
||||||
let test_data = "| invert | save \"./image_processed.jpg\"";
|
let test_data = "| invert | save \"./image_processed.jpg\"";
|
||||||
let syntax = parse_syntax(test_data).unwrap();
|
let syntax = parse_syntax(test_data).unwrap();
|
||||||
|
|
||||||
assert_eq!(check_missing_streamer(&syntax), Err(0..1))
|
assert_eq!(check_missing_streamer(&syntax), Err(0..1));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -19,7 +19,7 @@ fn test_check_missing_filters() {
|
||||||
let test_data = "meow | | test | awa | | nya";
|
let test_data = "meow | | test | awa | | nya";
|
||||||
let syntax = parse_syntax(test_data).unwrap();
|
let syntax = parse_syntax(test_data).unwrap();
|
||||||
|
|
||||||
assert_eq!(check_missing_filters(&syntax), Err(vec![5..8, 20..25]))
|
assert_eq!(check_missing_filters(&syntax), Err(vec![5..8, 20..25]));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -27,7 +27,7 @@ fn test_check_missing_sink() {
|
||||||
let test_data = "meow | invert | ";
|
let test_data = "meow | invert | ";
|
||||||
let syntax = parse_syntax(test_data).unwrap();
|
let syntax = parse_syntax(test_data).unwrap();
|
||||||
|
|
||||||
assert_eq!(check_missing_sink(&syntax), Err(14..15))
|
assert_eq!(check_missing_sink(&syntax), Err(14..15));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -35,7 +35,7 @@ fn test_check_literal_as_sink() {
|
||||||
let test_data = "meow | test | 3";
|
let test_data = "meow | test | 3";
|
||||||
let syntax = parse_syntax(test_data).unwrap();
|
let syntax = parse_syntax(test_data).unwrap();
|
||||||
|
|
||||||
assert_eq!(check_literal_as_sink(&syntax), Err(14..15))
|
assert_eq!(check_literal_as_sink(&syntax), Err(14..15));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -46,7 +46,7 @@ fn test_check_literal_as_filter() {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
check_literal_as_filter(&syntax),
|
check_literal_as_filter(&syntax),
|
||||||
Err(vec![7..12, 15..17, 20..24])
|
Err(vec![7..12, 15..17, 20..24])
|
||||||
)
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -54,7 +54,7 @@ fn test_check_literal_as_filter_positive_on_sink() {
|
||||||
let test_data = "meow | 42";
|
let test_data = "meow | 42";
|
||||||
let syntax = parse_syntax(test_data).unwrap();
|
let syntax = parse_syntax(test_data).unwrap();
|
||||||
|
|
||||||
assert_eq!(check_literal_as_filter(&syntax), Ok(()))
|
assert_eq!(check_literal_as_filter(&syntax), Ok(()));
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -62,5 +62,5 @@ fn test_check_literal_with_args() {
|
||||||
let test_data = "14 12 | sink";
|
let test_data = "14 12 | sink";
|
||||||
let syntax = parse_syntax(test_data).unwrap();
|
let syntax = parse_syntax(test_data).unwrap();
|
||||||
|
|
||||||
assert_eq!(check_literal_with_args(&syntax), Err(0..5))
|
assert_eq!(check_literal_with_args(&syntax), Err(0..5));
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::mem;
|
|
||||||
|
|
||||||
use logos::Logos;
|
use logos::Logos;
|
||||||
use logos::Span;
|
use logos::Span;
|
||||||
|
@ -40,7 +40,7 @@ pub fn parse_syntax(input: &str) -> Result<Vec<PipelineElement>, Vec<logos::Span
|
||||||
let mut r = Vec::new();
|
let mut r = Vec::new();
|
||||||
|
|
||||||
let mut partial_command: Vec<CommandPart> = Vec::new();
|
let mut partial_command: Vec<CommandPart> = Vec::new();
|
||||||
for (tok, span) in lexer.spanned().into_iter() {
|
for (tok, span) in lexer.spanned() {
|
||||||
if let Ok(tok) = tok {
|
if let Ok(tok) = tok {
|
||||||
match tok {
|
match tok {
|
||||||
Token::Pipe => {
|
Token::Pipe => {
|
||||||
|
@ -52,10 +52,7 @@ pub fn parse_syntax(input: &str) -> Result<Vec<PipelineElement>, Vec<logos::Span
|
||||||
let span = partial_command.first().unwrap().span.start
|
let span = partial_command.first().unwrap().span.start
|
||||||
..partial_command.last().unwrap().span.end;
|
..partial_command.last().unwrap().span.end;
|
||||||
r.push(PipelineElement {
|
r.push(PipelineElement {
|
||||||
kind: PipelineElementKind::Command(mem::replace(
|
kind: PipelineElementKind::Command(std::mem::take(&mut partial_command)),
|
||||||
&mut partial_command,
|
|
||||||
Vec::new(),
|
|
||||||
)),
|
|
||||||
span,
|
span,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -83,7 +80,7 @@ pub fn parse_syntax(input: &str) -> Result<Vec<PipelineElement>, Vec<logos::Span
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
errs.push(span)
|
errs.push(span);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -110,5 +107,5 @@ pub fn parse_syntax(input: &str) -> Result<Vec<PipelineElement>, Vec<logos::Span
|
||||||
#[test]
|
#[test]
|
||||||
fn test_invalid_toks() {
|
fn test_invalid_toks() {
|
||||||
let test_data = "meow | gay $ error!";
|
let test_data = "meow | gay $ error!";
|
||||||
assert_eq!(parse_syntax(test_data), Err(vec![11..12, 18..19]))
|
assert_eq!(parse_syntax(test_data), Err(vec![11..12, 18..19]));
|
||||||
}
|
}
|
||||||
|
|
|
@ -81,7 +81,7 @@ pub fn into_typed_repr(
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
span,
|
span,
|
||||||
})
|
});
|
||||||
} else {
|
} else {
|
||||||
let Some(CommandPart {
|
let Some(CommandPart {
|
||||||
kind: CommandPartKind::Word(name),
|
kind: CommandPartKind::Word(name),
|
||||||
|
@ -119,7 +119,7 @@ pub fn into_typed_repr(
|
||||||
.collect(),
|
.collect(),
|
||||||
},
|
},
|
||||||
span: span.clone(),
|
span: span.clone(),
|
||||||
})
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
PipelineElementKind::Pipe => {}
|
PipelineElementKind::Pipe => {}
|
||||||
|
|
Loading…
Reference in a new issue