diff --git a/procedural-masquerade/lib.rs b/procedural-masquerade/lib.rs index 4b51e149..4d29b20b 100644 --- a/procedural-masquerade/lib.rs +++ b/procedural-masquerade/lib.rs @@ -253,33 +253,33 @@ macro_rules! define_invoke_proc_macro { #[doc(hidden)] #[macro_export] macro_rules! $macro_name { - ($proc_macro_name: ident ! $paren: tt) => { - #[derive($proc_macro_name)] - #[allow(unused)] - enum ProceduralMasqueradeDummyType { - // The magic happens here. - // - // We use an `enum` with an explicit discriminant - // because that is the only case where a type definition - // can contain a (const) expression. - // - // `(0, "foo").0` evalutes to 0, with the `"foo"` part ignored. - // - // By the time the `#[proc_macro_derive]` function - // implementing `#[derive($proc_macro_name)]` is called, - // `$paren` has already been replaced with the input of this inner macro, - // but `stringify!` has not been expanded yet. - // - // This how arbitrary tokens can be inserted - // in the input to the `#[proc_macro_derive]` function. - // - // Later, `stringify!(...)` is expanded into a string literal - // which is then ignored. - // Using `stringify!` enables passing arbitrary tokens - // rather than only what can be parsed as a const expression. - Input = (0, stringify! $paren ).0, - } - } + ($proc_macro_name: ident ! $paren: tt) => { + #[derive($proc_macro_name)] + #[allow(unused)] + enum ProceduralMasqueradeDummyType { + // The magic happens here. + // + // We use an `enum` with an explicit discriminant + // because that is the only case where a type definition + // can contain a (const) expression. + // + // `(0, "foo").0` evalutes to 0, with the `"foo"` part ignored. + // + // By the time the `#[proc_macro_derive]` function + // implementing `#[derive($proc_macro_name)]` is called, + // `$paren` has already been replaced with the input of this inner macro, + // but `stringify!` has not been expanded yet. + // + // This how arbitrary tokens can be inserted + // in the input to the `#[proc_macro_derive]` function. + // + // Later, `stringify!(...)` is expanded into a string literal + // which is then ignored. + // Using `stringify!` enables passing arbitrary tokens + // rather than only what can be parsed as a const expression. + Input = (0, stringify! $paren ).0, } + } + } }; } diff --git a/src/color.rs b/src/color.rs index a7d72057..8d0d521f 100644 --- a/src/color.rs +++ b/src/color.rs @@ -291,7 +291,7 @@ impl Color { let name = name.clone(); return input.parse_nested_block(|arguments| { parse_color_function(component_parser, &*name, arguments) - }) + }); } _ => Err(()), } diff --git a/src/nth.rs b/src/nth.rs index 063875f7..a0c0a007 100644 --- a/src/nth.rs +++ b/src/nth.rs @@ -73,12 +73,12 @@ pub fn parse_nth<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(i32, i32), Basic ref token => { let token = token.clone(); Err(input.new_basic_unexpected_token_error(token)) - }, + } }, ref token => { let token = token.clone(); Err(input.new_basic_unexpected_token_error(token)) - }, + } } } diff --git a/src/parser.rs b/src/parser.rs index 21beb6b2..8b2a231b 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -933,17 +933,15 @@ impl<'i: 't, 't> Parser<'i, 't> { Ok(&Token::Function(_)) | Ok(&Token::ParenthesisBlock) | Ok(&Token::SquareBracketBlock) - | Ok(&Token::CurlyBracketBlock) => { - self.parse_nested_block(|input| { - input.expect_no_error_token().map_err(Into::into) - }).map_err(ParseError::<()>::basic)? - } + | Ok(&Token::CurlyBracketBlock) => self + .parse_nested_block(|input| input.expect_no_error_token().map_err(Into::into)) + .map_err(ParseError::<()>::basic)?, Ok(t) => { // FIXME: maybe these should be separate variants of // BasicParseError instead? if t.is_parse_error() { let token = t.clone(); - return Err(self.new_basic_unexpected_token_error(token)) + return Err(self.new_basic_unexpected_token_error(token)); } } Err(_) => return Ok(()), diff --git a/src/rules_and_declarations.rs b/src/rules_and_declarations.rs index 6034452f..7d68f431 100644 --- a/src/rules_and_declarations.rs +++ b/src/rules_and_declarations.rs @@ -267,7 +267,9 @@ where loop { let start = self.input.state(); match self.input.next_including_whitespace_and_comments() { - Ok(&Token::WhiteSpace(_)) | Ok(&Token::Comment(_)) | Ok(&Token::Semicolon) => continue, + Ok(&Token::WhiteSpace(_)) | Ok(&Token::Comment(_)) | Ok(&Token::Semicolon) => { + continue + } Ok(&Token::Ident(ref name)) => { let name = name.clone(); let result = { @@ -288,9 +290,7 @@ where Ok(token) => { let token = token.clone(); let result = self.input.parse_until_after(Delimiter::Semicolon, |_| { - Err(start - .source_location() - .new_unexpected_token_error(token)) + Err(start.source_location().new_unexpected_token_error(token)) }); return Some(result.map_err(|e| (e, self.input.slice_from(start.position())))); } @@ -370,15 +370,13 @@ where let start = self.input.state(); let at_keyword = match self.input.next_byte()? { - b'@' => { - match self.input.next_including_whitespace_and_comments() { - Ok(&Token::AtKeyword(ref name)) => Some(name.clone()), - _ => { - self.input.reset(&start); - None - }, + b'@' => match self.input.next_including_whitespace_and_comments() { + Ok(&Token::AtKeyword(ref name)) => Some(name.clone()), + _ => { + self.input.reset(&start); + None } - } + }, _ => None, }; diff --git a/src/tests.rs b/src/tests.rs index e1114eb1..0c93652d 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -469,11 +469,10 @@ fn serializer(preserve_comments: bool) { _ => None, }; if let Some(closing_token) = closing_token { - let result: Result<_, ParseError<()>> = - input.parse_nested_block(|input| { - write_to(previous_token, input, string, preserve_comments); - Ok(()) - }); + let result: Result<_, ParseError<()>> = input.parse_nested_block(|input| { + write_to(previous_token, input, string, preserve_comments); + Ok(()) + }); result.unwrap(); closing_token.to_css(string).unwrap(); }