From 5a01b547078e45cc1a96a062334d8571f129ddc2 Mon Sep 17 00:00:00 2001 From: Jon Gjengset Date: Fri, 24 May 2019 13:52:06 -0400 Subject: [PATCH 01/10] Add std::mem::take as suggested in #61129 The name `swap_default` was suggested but rejected. @SimonSapin observed that this operation isn't really a `swap` in the same sense as `mem::swap`; it is a `replace`. Since `replace_default` is a bit misleading, the "correct" name would be `replace_with_default`, which is quite verbose. @czipperz observed that we have precedence for using `take` to refer to methods that replace with `Default` in `Cell::take` and `Option::take`, so this reverts commit 99c00591c29b472c8a87c4a9342d0e0c508647a3 to return to the original `take` method name. The name `replace_with_default` was suggested, but was deemed too verbose, especially given that we use `take` for methods that replace with `Default` elsewhere. --- src/libcore/mem/mod.rs | 55 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) diff --git a/src/libcore/mem/mod.rs b/src/libcore/mem/mod.rs index 91449f09936aa..c4d8e40a5e682 100644 --- a/src/libcore/mem/mod.rs +++ b/src/libcore/mem/mod.rs @@ -503,6 +503,61 @@ pub fn swap(x: &mut T, y: &mut T) { } } +/// Replace `dest` with the default value of `T`, and return the previous `dest` value. +/// +/// # Examples +/// +/// A simple example: +/// +/// ``` +/// use std::mem; +/// +/// let mut v: Vec = vec![1, 2]; +/// +/// let old_v = mem::take(&mut v); +/// assert_eq!(vec![1, 2], old_v); +/// assert!(v.is_empty()); +/// ``` +/// +/// `take` allows taking ownership of a struct field by replacing it with an "empty" value. +/// Without `take` you can run into issues like these: +/// +/// ```compile_fail,E0507 +/// struct Buffer { buf: Vec } +/// +/// impl Buffer { +/// fn get_and_reset(&mut self) -> Vec { +/// // error: cannot move out of dereference of `&mut`-pointer +/// let buf = self.buf; +/// self.buf = Vec::new(); +/// buf +/// } +/// } +/// ``` +/// +/// Note that `T` does not necessarily implement [`Clone`], so it can't even clone and reset +/// `self.buf`. But `take` can be used to disassociate the original value of `self.buf` from +/// `self`, allowing it to be returned: +/// +/// ``` +/// # #![allow(dead_code)] +/// use std::mem; +/// +/// # struct Buffer { buf: Vec } +/// impl Buffer { +/// fn get_and_reset(&mut self) -> Vec { +/// mem::take(&mut self.buf) +/// } +/// } +/// ``` +/// +/// [`Clone`]: ../../std/clone/trait.Clone.html +#[inline] +#[unstable(feature = "mem_take", issue = "61129")] +pub fn take(dest: &mut T) -> T { + replace(dest, T::default()) +} + /// Moves `src` into the referenced `dest`, returning the previous `dest` value. /// /// Neither value is dropped. From 41078f34f70259fdf516518e57f45d6b1c048c9f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Thu, 6 Jun 2019 22:17:22 +0300 Subject: [PATCH 02/10] Bump dirs, rand and redox_users --- Cargo.lock | 61 +++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 49 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 719321574435a..7b657b5fc73fc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -755,11 +755,11 @@ dependencies = [ [[package]] name = "dirs" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", - "redox_users 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_users 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -960,6 +960,11 @@ dependencies = [ "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "fuchsia-cprng" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "fuchsia-zircon" version = "0.3.3" @@ -1931,7 +1936,7 @@ version = "0.7.22" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "phf_shared 0.7.22 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2087,11 +2092,13 @@ dependencies = [ [[package]] name = "rand" -version = "0.4.3" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -2127,6 +2134,11 @@ name = "rand_core" version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "rand_core" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "rand_hc" version = "0.1.0" @@ -2143,6 +2155,19 @@ dependencies = [ "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rand_os" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", + "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rand_pcg" version = "0.1.1" @@ -2178,7 +2203,15 @@ dependencies = [ "lazy_static 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "rdrand" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -2196,12 +2229,12 @@ dependencies = [ [[package]] name = "redox_users" -version = "0.2.0" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "argon2rs 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", "redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -3130,7 +3163,7 @@ dependencies = [ "cargo_metadata 0.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "derive-new 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)", "diff 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", - "dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)", + "dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "env_logger 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)", "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.19 (registry+https://github.com/rust-lang/crates.io-index)", @@ -4173,7 +4206,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum difference 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "524cbf6897b527295dff137cec09ecf3a05f4fddffd7dfcd1585403449e74198" "checksum digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90" "checksum directories 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "72d337a64190607d4fcca2cb78982c5dd57f4916e19696b48a575fa746b6cb0f" -"checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a" +"checksum dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3fd78930633bd1c6e35c4b42b1df7b0cbc6bc191146e512bb3bedf243fcc3901" "checksum dlmalloc 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "f283302e035e61c23f2b86b3093e8c6273a4c3125742d6087e96ade001ca5e63" "checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0" "checksum elasticlunr-rs 2.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "a99a310cd1f9770e7bf8e48810c7bcbb0e078c8fb23a8c7bcf0da4c2bf61a455" @@ -4196,6 +4229,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum fs2 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "9564fc758e15025b46aa6643b1b77d047d1a56a1aea6e01002ac0c7026876213" "checksum fs_extra 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5f2a4a2034423744d2cc7ca2068453168dcdb82c438419e639a26bd87839c674" "checksum fst 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d94485a00b1827b861dd9d1a2cc9764f9044d4c535514c0760a5a2012ef3399f" +"checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" "checksum futf 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "7c9c1ce3fa9336301af935ab852c437817d14cd33690446569392e65170aac3b" @@ -4311,19 +4345,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" "checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a" "checksum quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "53fa22a1994bd0f9372d7a816207d8a2677ad0325b073f5c5332760f0fb62b5c" "checksum racer 2.1.22 (registry+https://github.com/rust-lang/crates.io-index)" = "1e4323343f25bc372dc9293ac6b5cd3034b32784af1e7de9366b4db71466d8c7" -"checksum rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8356f47b32624fef5b3301c1be97e5944ecdd595409cc5da11d05f211db6cfbd" +"checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" "checksum rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ae9d223d52ae411a33cf7e54ec6034ec165df296ccd23533d671a28252b6f66a" "checksum rand_chacha 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "771b009e3a508cb67e8823dda454aaa5368c7bc1c16829fb77d3e980440dd34a" "checksum rand_core 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0905b6b7079ec73b314d4c748701f6931eb79fd97c668caa3f1899b22b32c6db" +"checksum rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0e7a549d590831370895ab7ba4ea0c1b6b011d106b5ff2da6eee112615e6dc0" "checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" "checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" +"checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" "checksum rand_pcg 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "086bd09a33c7044e56bb44d5bdde5a60e7f119a9e95b0775f545de759a32fe05" "checksum rand_xorshift 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "effa3fcaa47e18db002bdde6060944b6d2f9cfd8db471c30e873448ad9187be3" "checksum rayon 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "80e811e76f1dbf68abf87a759083d34600017fc4e10b6bd5ad84a700f9dba4b1" "checksum rayon-core 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9d24ad214285a7729b174ed6d3bcfcb80177807f959d95fafd5bfc5c4f201ac8" +"checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" "checksum redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)" = "679da7508e9a6390aeaf7fbd02a800fdc64b73fe2204dd2c8ae66d22d9d5ad5d" "checksum redox_termios 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7e891cfe48e9100a70a3b6eb652fef28920c117d366339687bd5576160db0f76" -"checksum redox_users 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "214a97e49be64fd2c86f568dd0cb2c757d2cc53de95b273b6ad0a1c908482f26" +"checksum redox_users 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fe5204c3a17e97dde73f285d49be585df59ed84b50a872baf416e73b62c3828" "checksum regex 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "9329abc99e39129fcceabd24cf5d85b4671ef7c29c50e972bc5afe32438ec384" "checksum regex 1.1.6 (registry+https://github.com/rust-lang/crates.io-index)" = "8f0a0bcab2fd7d1d7c54fa9eae6f43eddeb9ce2e7352f8518a814a4f65d60c58" "checksum regex-syntax 0.5.6 (registry+https://github.com/rust-lang/crates.io-index)" = "7d707a4fa2637f2dca2ef9fd02225ec7661fe01a53623c1e6515b6916511f7a7" From 3da094319c705e123e1176553d9c5b5955ff0642 Mon Sep 17 00:00:00 2001 From: Vadim Petrochenkov Date: Fri, 7 Jun 2019 13:31:13 +0300 Subject: [PATCH 03/10] parser: `self.span` -> `self.token.span` --- src/libsyntax/attr/mod.rs | 4 +- src/libsyntax/config.rs | 2 +- src/libsyntax/ext/expand.rs | 2 +- src/libsyntax/ext/source_util.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 14 +- src/libsyntax/ext/tt/macro_rules.rs | 4 +- src/libsyntax/parse/attr.rs | 12 +- src/libsyntax/parse/diagnostics.rs | 70 ++--- src/libsyntax/parse/literal.rs | 4 +- src/libsyntax/parse/mod.rs | 4 +- src/libsyntax/parse/parser.rs | 371 ++++++++++++++------------- src/libsyntax_ext/assert.rs | 4 +- src/libsyntax_ext/format.rs | 4 +- 13 files changed, 255 insertions(+), 242 deletions(-) diff --git a/src/libsyntax/attr/mod.rs b/src/libsyntax/attr/mod.rs index edfe097c72f61..b5d9b761773b4 100644 --- a/src/libsyntax/attr/mod.rs +++ b/src/libsyntax/attr/mod.rs @@ -735,9 +735,9 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) - raw_attr.clone(), ); - let start_span = parser.span; + let start_span = parser.token.span; let (path, tokens) = panictry!(parser.parse_meta_item_unrestricted()); - let end_span = parser.span; + let end_span = parser.token.span; if parser.token != token::Eof { parse_sess.span_diagnostic .span_err(start_span.to(end_span), "invalid crate attribute"); diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index fc413caa428dd..6123e95ccf821 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -121,7 +121,7 @@ impl<'a> StripUnconfigured<'a> { let mut expanded_attrs = Vec::with_capacity(1); while !parser.check(&token::CloseDelim(token::Paren)) { - let lo = parser.span.lo(); + let lo = parser.token.span.lo(); let (path, tokens) = parser.parse_meta_item_unrestricted()?; expanded_attrs.push((path, tokens, parser.prev_span.with_lo(lo))); parser.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Paren)])?; diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 7cd847eac4690..9960539555332 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1041,7 +1041,7 @@ impl<'a> Parser<'a> { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); // Avoid emitting backtrace info twice. - let def_site_span = self.span.with_ctxt(SyntaxContext::empty()); + let def_site_span = self.token.span.with_ctxt(SyntaxContext::empty()); let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); err.span_label(span, "caused by the macro expansion here"); let msg = format!( diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index e1cb90d9e71d6..4e2aab46542d2 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -105,7 +105,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstrea while self.p.token != token::Eof { match panictry!(self.p.parse_item()) { Some(item) => ret.push(item), - None => self.p.diagnostic().span_fatal(self.p.span, + None => self.p.diagnostic().span_fatal(self.p.token.span, &format!("expected item, found `{}`", self.p.this_token_to_string())) .raise() diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 82cc9e8ac2280..f98e1433356c2 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -675,7 +675,7 @@ pub fn parse( // // This MatcherPos instance is allocated on the stack. All others -- and // there are frequently *no* others! -- are allocated on the heap. - let mut initial = initial_matcher_pos(ms, parser.span); + let mut initial = initial_matcher_pos(ms, parser.token.span); let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)]; let mut next_items = Vec::new(); @@ -721,15 +721,15 @@ pub fn parse( return nameize(sess, ms, matches); } else if eof_items.len() > 1 { return Error( - parser.span, + parser.token.span, "ambiguity: multiple successful parses".to_string(), ); } else { return Failure( - Token::new(token::Eof, if parser.span.is_dummy() { - parser.span + Token::new(token::Eof, if parser.token.span.is_dummy() { + parser.token.span } else { - sess.source_map().next_point(parser.span) + sess.source_map().next_point(parser.token.span) }), "missing tokens in macro arguments", ); @@ -753,7 +753,7 @@ pub fn parse( .join(" or "); return Error( - parser.span, + parser.token.span, format!( "local ambiguity: multiple parsing options: {}", match next_items.len() { @@ -927,7 +927,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: Symbol) -> Nonterminal { sym::ty => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one sym::ident => if let Some((name, is_raw)) = get_macro_name(&p.token) { - let span = p.span; + let span = p.token.span; p.bump(); token::NtIdent(Ident::new(name, span), is_raw) } else { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 7ab51c1eb20c9..4998129fdee51 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -47,7 +47,7 @@ impl<'a> ParserAnyMacro<'a> { let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| { if parser.token == token::Eof && e.message().ends_with(", found ``") { if !e.span.is_dummy() { // early end of macro arm (#52866) - e.replace_span_with(parser.sess.source_map().next_point(parser.span)); + e.replace_span_with(parser.sess.source_map().next_point(parser.token.span)); } let msg = &e.message[0]; e.message[0] = ( @@ -63,7 +63,7 @@ impl<'a> ParserAnyMacro<'a> { if parser.sess.source_map().span_to_filename(arm_span).is_real() { e.span_label(arm_span, "in this macro arm"); } - } else if !parser.sess.source_map().span_to_filename(parser.span).is_real() { + } else if !parser.sess.source_map().span_to_filename(parser.token.span).is_real() { e.span_label(site_span, "in this macro invocation"); } e diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index d83b76f4d2366..77a87e26e60d5 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -39,7 +39,7 @@ impl<'a> Parser<'a> { just_parsed_doc_comment = false; } token::DocComment(s) => { - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span); if attr.style != ast::AttrStyle::Outer { let mut err = self.fatal("expected outer doc comment"); err.note("inner doc comments like this (starting with \ @@ -83,7 +83,7 @@ impl<'a> Parser<'a> { self.token); let (span, path, tokens, style) = match self.token.kind { token::Pound => { - let lo = self.span; + let lo = self.token.span; self.bump(); if let InnerAttributeParsePolicy::Permitted = inner_parse_policy { @@ -93,7 +93,7 @@ impl<'a> Parser<'a> { self.bump(); if let InnerAttributeParsePolicy::NotPermitted { reason } = inner_parse_policy { - let span = self.span; + let span = self.token.span; self.diagnostic() .struct_span_err(span, reason) .note("inner attributes, like `#![no_std]`, annotate the item \ @@ -201,7 +201,7 @@ impl<'a> Parser<'a> { } token::DocComment(s) => { // we need to get the position of this token before we bump. - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.span); + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, self.token.span); if attr.style == ast::AttrStyle::Inner { attrs.push(attr); self.bump(); @@ -249,7 +249,7 @@ impl<'a> Parser<'a> { return Ok(meta); } - let lo = self.span; + let lo = self.token.span; let path = self.parse_path(PathStyle::Mod)?; let node = self.parse_meta_item_kind()?; let span = lo.to(self.prev_span); @@ -284,7 +284,7 @@ impl<'a> Parser<'a> { let found = self.this_token_to_string(); let msg = format!("expected unsuffixed literal or identifier, found `{}`", found); - Err(self.diagnostic().struct_span_err(self.span, &msg)) + Err(self.diagnostic().struct_span_err(self.token.span, &msg)) } /// matches meta_seq = ( COMMASEP(meta_item_inner) ) diff --git a/src/libsyntax/parse/diagnostics.rs b/src/libsyntax/parse/diagnostics.rs index 7f0bf4a90508b..c4db9a9df45a9 100644 --- a/src/libsyntax/parse/diagnostics.rs +++ b/src/libsyntax/parse/diagnostics.rs @@ -162,7 +162,7 @@ impl RecoverQPath for Expr { impl<'a> Parser<'a> { pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { - self.span_fatal(self.span, m) + self.span_fatal(self.token.span, m) } pub fn span_fatal>(&self, sp: S, m: &str) -> DiagnosticBuilder<'a> { @@ -174,7 +174,7 @@ impl<'a> Parser<'a> { } pub fn bug(&self, m: &str) -> ! { - self.sess.span_diagnostic.span_bug(self.span, m) + self.sess.span_diagnostic.span_bug(self.token.span, m) } pub fn span_err>(&self, sp: S, m: &str) { @@ -199,13 +199,13 @@ impl<'a> Parser<'a> { crate fn expected_ident_found(&self) -> DiagnosticBuilder<'a> { let mut err = self.struct_span_err( - self.span, + self.token.span, &format!("expected identifier, found {}", self.this_token_descr()), ); if let token::Ident(name, false) = self.token.kind { - if Ident::new(name, self.span).is_raw_guess() { + if Ident::new(name, self.token.span).is_raw_guess() { err.span_suggestion( - self.span, + self.token.span, "you can escape reserved keywords to use them as identifiers", format!("r#{}", name), Applicability::MaybeIncorrect, @@ -213,12 +213,12 @@ impl<'a> Parser<'a> { } } if let Some(token_descr) = self.token_descr() { - err.span_label(self.span, format!("expected identifier, found {}", token_descr)); + err.span_label(self.token.span, format!("expected identifier, found {}", token_descr)); } else { - err.span_label(self.span, "expected identifier"); + err.span_label(self.token.span, "expected identifier"); if self.token == token::Comma && self.look_ahead(1, |t| t.is_ident()) { err.span_suggestion( - self.span, + self.token.span, "remove this comma", String::new(), Applicability::MachineApplicable, @@ -277,11 +277,11 @@ impl<'a> Parser<'a> { (self.sess.source_map().next_point(self.prev_span), format!("expected {} here", expect))) }; - self.last_unexpected_token_span = Some(self.span); + self.last_unexpected_token_span = Some(self.token.span); let mut err = self.fatal(&msg_exp); if self.token.is_ident_named(sym::and) { err.span_suggestion_short( - self.span, + self.token.span, "use `&&` instead of `and` for the boolean operator", "&&".to_string(), Applicability::MaybeIncorrect, @@ -289,7 +289,7 @@ impl<'a> Parser<'a> { } if self.token.is_ident_named(sym::or) { err.span_suggestion_short( - self.span, + self.token.span, "use `||` instead of `or` for the boolean operator", "||".to_string(), Applicability::MaybeIncorrect, @@ -326,7 +326,7 @@ impl<'a> Parser<'a> { self.token.is_keyword(kw::While) ); let cm = self.sess.source_map(); - match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) { + match (cm.lookup_line(self.token.span.lo()), cm.lookup_line(sp.lo())) { (Ok(ref a), Ok(ref b)) if a.line != b.line && is_semi_suggestable => { // The spans are in different lines, expected `;` and found `let` or `return`. // High likelihood that it is only a missing `;`. @@ -352,16 +352,16 @@ impl<'a> Parser<'a> { // | -^^^^^ unexpected token // | | // | expected one of 8 possible tokens here - err.span_label(self.span, label_exp); + err.span_label(self.token.span, label_exp); } _ if self.prev_span == syntax_pos::DUMMY_SP => { // Account for macro context where the previous span might not be // available to avoid incorrect output (#54841). - err.span_label(self.span, "unexpected token"); + err.span_label(self.token.span, "unexpected token"); } _ => { err.span_label(sp, label_exp); - err.span_label(self.span, "unexpected token"); + err.span_label(self.token.span, "unexpected token"); } } Err(err) @@ -429,7 +429,7 @@ impl<'a> Parser<'a> { // Keep the span at the start so we can highlight the sequence of `>` characters to be // removed. - let lo = self.span; + let lo = self.token.span; // We need to look-ahead to see if we have `>` characters without moving the cursor forward // (since we might have the field access case and the characters we're eating are @@ -474,7 +474,7 @@ impl<'a> Parser<'a> { // Eat from where we started until the end token so that parsing can continue // as if we didn't have those extra angle brackets. self.eat_to_tokens(&[&end]); - let span = lo.until(self.span); + let span = lo.until(self.token.span); let plural = number_of_gt > 1 || number_of_shr >= 1; self.diagnostic() @@ -502,7 +502,7 @@ impl<'a> Parser<'a> { match lhs.node { ExprKind::Binary(op, _, _) if op.node.is_comparison() => { // respan to include both operators - let op_span = op.span.to(self.span); + let op_span = op.span.to(self.token.span); let mut err = self.diagnostic().struct_span_err(op_span, "chained comparison operators require parentheses"); if op.node == BinOpKind::Lt && @@ -734,15 +734,15 @@ impl<'a> Parser<'a> { let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { // Point at the end of the macro call when reaching end of macro arguments. (token::Eof, Some(_)) => { - let sp = self.sess.source_map().next_point(self.span); + let sp = self.sess.source_map().next_point(self.token.span); (sp, sp) } // We don't want to point at the following span after DUMMY_SP. // This happens when the parser finds an empty TokenStream. - _ if self.prev_span == DUMMY_SP => (self.span, self.span), + _ if self.prev_span == DUMMY_SP => (self.token.span, self.token.span), // EOF, don't want to point at the following char, but rather the last token. - (token::Eof, None) => (self.prev_span, self.span), - _ => (self.sess.source_map().next_point(self.prev_span), self.span), + (token::Eof, None) => (self.prev_span, self.token.span), + _ => (self.sess.source_map().next_point(self.prev_span), self.token.span), }; let msg = format!( "expected `{}`, found {}", @@ -789,7 +789,7 @@ impl<'a> Parser<'a> { // interpreting `await { }?` as `?.await`. self.parse_block_expr( None, - self.span, + self.token.span, BlockCheckMode::Default, ThinVec::new(), ) @@ -819,9 +819,9 @@ impl<'a> Parser<'a> { self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren)) { // future.await() - let lo = self.span; + let lo = self.token.span; self.bump(); // ( - let sp = lo.to(self.span); + let sp = lo.to(self.token.span); self.bump(); // ) self.struct_span_err(sp, "incorrect use of `await`") .span_suggestion( @@ -854,7 +854,7 @@ impl<'a> Parser<'a> { next_sp: Span, maybe_path: bool, ) { - err.span_label(self.span, "expecting a type here because of type ascription"); + err.span_label(self.token.span, "expecting a type here because of type ascription"); let cm = self.sess.source_map(); let next_pos = cm.lookup_char_pos(next_sp.lo()); let op_pos = cm.lookup_char_pos(cur_op_span.hi()); @@ -911,7 +911,7 @@ impl<'a> Parser<'a> { // we want to use the last closing delim that would apply for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() { if tokens.contains(&token::CloseDelim(unmatched.expected_delim)) - && Some(self.span) > unmatched.unclosed_span + && Some(self.token.span) > unmatched.unclosed_span { pos = Some(i); } @@ -1070,28 +1070,28 @@ impl<'a> Parser<'a> { crate fn expected_semi_or_open_brace(&mut self) -> PResult<'a, ast::TraitItem> { let token_str = self.this_token_descr(); let mut err = self.fatal(&format!("expected `;` or `{{`, found {}", token_str)); - err.span_label(self.span, "expected `;` or `{`"); + err.span_label(self.token.span, "expected `;` or `{`"); Err(err) } crate fn eat_incorrect_doc_comment(&mut self, applied_to: &str) { if let token::DocComment(_) = self.token.kind { let mut err = self.diagnostic().struct_span_err( - self.span, + self.token.span, &format!("documentation comments cannot be applied to {}", applied_to), ); - err.span_label(self.span, "doc comments are not allowed here"); + err.span_label(self.token.span, "doc comments are not allowed here"); err.emit(); self.bump(); } else if self.token == token::Pound && self.look_ahead(1, |t| { *t == token::OpenDelim(token::Bracket) }) { - let lo = self.span; + let lo = self.token.span; // Skip every token until next possible arg. while self.token != token::CloseDelim(token::Bracket) { self.bump(); } - let sp = lo.to(self.span); + let sp = lo.to(self.token.span); self.bump(); let mut err = self.diagnostic().struct_span_err( sp, @@ -1217,16 +1217,16 @@ impl<'a> Parser<'a> { crate fn expected_expression_found(&self) -> DiagnosticBuilder<'a> { let (span, msg) = match (&self.token.kind, self.subparser_name) { (&token::Eof, Some(origin)) => { - let sp = self.sess.source_map().next_point(self.span); + let sp = self.sess.source_map().next_point(self.token.span); (sp, format!("expected expression, found end of {}", origin)) } - _ => (self.span, format!( + _ => (self.token.span, format!( "expected expression, found {}", self.this_token_descr(), )), }; let mut err = self.struct_span_err(span, &msg); - let sp = self.sess.source_map().start_point(self.span); + let sp = self.sess.source_map().start_point(self.token.span); if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow().get(&sp) { self.sess.expr_parentheses_needed(&mut err, *sp, None); } diff --git a/src/libsyntax/parse/literal.rs b/src/libsyntax/parse/literal.rs index 7d5356ffe4d8d..be8d11c45ff6d 100644 --- a/src/libsyntax/parse/literal.rs +++ b/src/libsyntax/parse/literal.rs @@ -275,10 +275,10 @@ impl<'a> Parser<'a> { if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = t.kind { let next_span = self.look_ahead_span(1); - if self.span.hi() == next_span.lo() { + if self.token.span.hi() == next_span.lo() { let s = String::from("0.") + &symbol.as_str(); let kind = TokenKind::lit(token::Float, Symbol::intern(&s), suffix); - return Some(Token::new(kind, self.span.to(next_span))); + return Some(Token::new(kind, self.token.span.to(next_span))); } } None diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 063823bbf4d11..1d708d39a1379 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -239,8 +239,8 @@ fn maybe_source_file_to_parser( let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; - if parser.token == token::Eof && parser.span.is_dummy() { - parser.token.span = Span::new(end_pos, end_pos, parser.span.ctxt()); + if parser.token == token::Eof && parser.token.span.is_dummy() { + parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt()); } Ok(parser) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 43e7c9330e418..f36fb0731300c 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -34,6 +34,7 @@ use crate::ast::{BinOpKind, UnOp}; use crate::ast::{RangeEnd, RangeSyntax}; use crate::{ast, attr}; use crate::ext::base::DummyResult; +use crate::ext::hygiene::SyntaxContext; use crate::source_map::{self, SourceMap, Spanned, respan}; use crate::parse::{SeqSep, classify, literal, token}; use crate::parse::lexer::UnmatchedBrace; @@ -132,12 +133,16 @@ macro_rules! maybe_whole_expr { token::NtPath(path) => { let path = path.clone(); $p.bump(); - return Ok($p.mk_expr($p.span, ExprKind::Path(None, path), ThinVec::new())); + return Ok($p.mk_expr( + $p.token.span, ExprKind::Path(None, path), ThinVec::new() + )); } token::NtBlock(block) => { let block = block.clone(); $p.bump(); - return Ok($p.mk_expr($p.span, ExprKind::Block(block, None), ThinVec::new())); + return Ok($p.mk_expr( + $p.token.span, ExprKind::Block(block, None), ThinVec::new() + )); } _ => {}, }; @@ -514,8 +519,9 @@ impl<'a> Parser<'a> { if let Some(directory) = directory { parser.directory = directory; - } else if !parser.span.is_dummy() { - if let FileName::Real(mut path) = sess.source_map().span_to_unmapped_path(parser.span) { + } else if !parser.token.span.is_dummy() { + if let FileName::Real(mut path) = + sess.source_map().span_to_unmapped_path(parser.token.span) { path.pop(); parser.directory.path = Cow::from(path); } @@ -596,7 +602,7 @@ impl<'a> Parser<'a> { } else if inedible.contains(&self.token) { // leave it in the input Ok(false) - } else if self.last_unexpected_token_span == Some(self.span) { + } else if self.last_unexpected_token_span == Some(self.token.span) { FatalError.raise(); } else { self.expected_one_of_not_found(edible, inedible) @@ -632,7 +638,7 @@ impl<'a> Parser<'a> { return Err(err); } } - let span = self.span; + let span = self.token.span; self.bump(); Ok(Ident::new(name, span)) } @@ -748,7 +754,7 @@ impl<'a> Parser<'a> { true } token::BinOpEq(token::Plus) => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); self.bump_with(token::Eq, span); true } @@ -779,7 +785,7 @@ impl<'a> Parser<'a> { Ok(()) } token::AndAnd => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::And), span)) } _ => self.unexpected() @@ -796,7 +802,7 @@ impl<'a> Parser<'a> { Ok(()) } token::OrOr => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Ok(self.bump_with(token::BinOp(token::Or), span)) } _ => self.unexpected() @@ -821,12 +827,12 @@ impl<'a> Parser<'a> { true } token::BinOp(token::Shl) => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); self.bump_with(token::Lt, span); true } token::LArrow => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); self.bump_with(token::BinOp(token::Minus), span); true } @@ -861,15 +867,15 @@ impl<'a> Parser<'a> { Some(()) } token::BinOp(token::Shr) => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Some(self.bump_with(token::Gt, span)) } token::BinOpEq(token::Shr) => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Some(self.bump_with(token::Ge, span)) } token::Ge => { - let span = self.span.with_lo(self.span.lo() + BytePos(1)); + let span = self.token.span.with_lo(self.token.span.lo() + BytePos(1)); Some(self.bump_with(token::Eq, span)) } _ => None, @@ -1018,7 +1024,7 @@ impl<'a> Parser<'a> { self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } - self.prev_span = self.meta_var_span.take().unwrap_or(self.span); + self.prev_span = self.meta_var_span.take().unwrap_or(self.token.span); // Record last token kind for possible error recovery. self.prev_token_kind = match self.token.kind { @@ -1041,7 +1047,7 @@ impl<'a> Parser<'a> { /// Advance the parser using provided token as a next one. Use this when /// consuming a part of a token. For example a single `<` from `<<`. fn bump_with(&mut self, next: TokenKind, span: Span) { - self.prev_span = self.span.with_hi(span.lo()); + self.prev_span = self.token.span.with_hi(span.lo()); // It would be incorrect to record the kind of the current token, but // fortunately for tokens currently using `bump_with`, the // prev_token_kind will be of no use anyway. @@ -1070,7 +1076,7 @@ impl<'a> Parser<'a> { crate fn look_ahead_span(&self, dist: usize) -> Span { if dist == 0 { - return self.span + return self.token.span } match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) { @@ -1171,7 +1177,7 @@ impl<'a> Parser<'a> { fn parse_trait_item_(&mut self, at_end: &mut bool, mut attrs: Vec) -> PResult<'a, TraitItem> { - let lo = self.span; + let lo = self.token.span; self.eat_bad_pub(); let (name, node, generics) = if self.eat_keyword(kw::Type) { self.parse_trait_item_assoc_ty()? @@ -1204,7 +1210,7 @@ impl<'a> Parser<'a> { // definition... // We don't allow argument names to be left off in edition 2018. - p.parse_arg_general(p.span.rust_2018(), true, false) + p.parse_arg_general(p.token.span.rust_2018(), true, false) })?; generics.where_clause = self.parse_where_clause()?; @@ -1268,7 +1274,7 @@ impl<'a> Parser<'a> { if self.eat(&token::RArrow) { Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true, false)?)) } else { - Ok(FunctionRetTy::Default(self.span.shrink_to_lo())) + Ok(FunctionRetTy::Default(self.token.span.shrink_to_lo())) } } @@ -1292,7 +1298,7 @@ impl<'a> Parser<'a> { maybe_recover_from_interpolated_ty_qpath!(self, allow_qpath_recovery); maybe_whole!(self, NtTy, |x| x); - let lo = self.span; + let lo = self.token.span; let mut impl_dyn_multi = false; let node = if self.eat(&token::OpenDelim(token::Paren)) { // `(TYPE)` is a parenthesized type. @@ -1376,7 +1382,7 @@ impl<'a> Parser<'a> { // Function pointer type or bound list (trait object type) starting with a poly-trait. // `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T` // `for<'lt> Trait1<'lt> + Trait2 + 'a` - let lo = self.span; + let lo = self.token.span; let lifetime_defs = self.parse_late_bound_lifetime_defs()?; if self.token_is_bare_fn_keyword() { self.parse_ty_bare_fn(lifetime_defs)? @@ -1391,7 +1397,7 @@ impl<'a> Parser<'a> { impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus; TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds) } else if self.check_keyword(kw::Dyn) && - (self.span.rust_2018() || + (self.token.span.rust_2018() || self.look_ahead(1, |t| t.can_begin_bound() && !can_continue_type_after_non_fn_ident(t))) { self.bump(); // `dyn` @@ -1604,9 +1610,9 @@ impl<'a> Parser<'a> { crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P> { maybe_whole_expr!(self); - let minus_lo = self.span; + let minus_lo = self.token.span; let minus_present = self.eat(&token::BinOp(token::Minus)); - let lo = self.span; + let lo = self.token.span; let literal = self.parse_lit()?; let hi = self.prev_span; let expr = self.mk_expr(lo.to(hi), ExprKind::Lit(literal), ThinVec::new()); @@ -1623,7 +1629,7 @@ impl<'a> Parser<'a> { fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { match self.token.kind { token::Ident(name, _) if name.is_path_segment_keyword() => { - let span = self.span; + let span = self.token.span; self.bump(); Ok(Ident::new(name, span)) } @@ -1634,7 +1640,7 @@ impl<'a> Parser<'a> { fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> { match self.token.kind { token::Ident(name, false) if name == kw::Underscore => { - let span = self.span; + let span = self.token.span; self.bump(); Ok(Ident::new(name, span)) } @@ -1662,11 +1668,11 @@ impl<'a> Parser<'a> { // span in the case of something like `::Bar`. let (mut path, path_span); if self.eat_keyword(kw::As) { - let path_lo = self.span; + let path_lo = self.token.span; path = self.parse_path(PathStyle::Type)?; path_span = path_lo.to(self.prev_span); } else { - path_span = self.span.to(self.span); + path_span = self.token.span.to(self.token.span); path = ast::Path { segments: Vec::new(), span: path_span }; } @@ -1704,9 +1710,9 @@ impl<'a> Parser<'a> { path }); - let lo = self.meta_var_span.unwrap_or(self.span); + let lo = self.meta_var_span.unwrap_or(self.token.span); let mut segments = Vec::new(); - let mod_sep_ctxt = self.span.ctxt(); + let mod_sep_ctxt = self.token.span.ctxt(); if self.eat(&token::ModSep) { segments.push(PathSegment::path_root(lo.shrink_to_lo().with_ctxt(mod_sep_ctxt))); } @@ -1797,7 +1803,7 @@ impl<'a> Parser<'a> { // Generic arguments are found - `<`, `(`, `::<` or `::(`. self.eat(&token::ModSep); - let lo = self.span; + let lo = self.token.span; let args = if self.eat_lt() { // `<'a, T, A = U>` let (args, constraints) = @@ -1840,17 +1846,17 @@ impl<'a> Parser<'a> { /// Parses a single lifetime `'a` or panics. crate fn expect_lifetime(&mut self) -> Lifetime { if let Some(ident) = self.token.lifetime() { - let span = self.span; + let span = self.token.span; self.bump(); Lifetime { ident: Ident::new(ident.name, span), id: ast::DUMMY_NODE_ID } } else { - self.span_bug(self.span, "not a lifetime") + self.span_bug(self.token.span, "not a lifetime") } } fn eat_label(&mut self) -> Option