diff --git a/mk/crates.mk b/mk/crates.mk index f830e1a9d9585..c0fd04401bcda 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -70,7 +70,7 @@ DEPS_graphviz := std DEPS_green := std native:context_switch DEPS_rustuv := std native:uv native:uv_support DEPS_native := std -DEPS_syntax := std term serialize log fmt_macros debug +DEPS_syntax := std term serialize log fmt_macros debug arena DEPS_rustc := syntax flate arena serialize getopts rbml \ time log graphviz debug rustc_llvm rustc_back DEPS_rustc_llvm := native:rustllvm libc std diff --git a/src/libfourcc/lib.rs b/src/libfourcc/lib.rs index 9e46da56a8e5d..3aa4005879232 100644 --- a/src/libfourcc/lib.rs +++ b/src/libfourcc/lib.rs @@ -63,10 +63,9 @@ use syntax::ext::base::{ExtCtxt, MacExpr}; use syntax::ext::build::AstBuilder; use syntax::parse::token; use syntax::parse::token::InternedString; +use syntax::ptr::P; use rustc::plugin::Registry; -use std::gc::Gc; - #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("fourcc", expand_syntax_ext); @@ -135,7 +134,7 @@ struct Ident { } fn parse_tts(cx: &ExtCtxt, - tts: &[ast::TokenTree]) -> (Gc, Option) { + tts: &[ast::TokenTree]) -> (P, Option) { let p = &mut cx.new_parser_from_tts(tts); let ex = p.parse_expr(); let id = if p.token == token::EOF { @@ -156,7 +155,7 @@ fn parse_tts(cx: &ExtCtxt, fn target_endian_little(cx: &ExtCtxt, sp: Span) -> bool { let meta = cx.meta_name_value(sp, InternedString::new("target_endian"), ast::LitStr(InternedString::new("little"), ast::CookedStr)); - contains(cx.cfg().as_slice(), meta) + contains(cx.cfg().as_slice(), &*meta) } // FIXME (10872): This is required to prevent an LLVM assert on Windows diff --git a/src/libhexfloat/lib.rs b/src/libhexfloat/lib.rs index 03bd96fc260bc..ae7a3e66dfdf8 100644 --- a/src/libhexfloat/lib.rs +++ b/src/libhexfloat/lib.rs @@ -57,10 +57,9 @@ use syntax::ext::base; use syntax::ext::base::{ExtCtxt, MacExpr}; use syntax::ext::build::AstBuilder; use syntax::parse::token; +use syntax::ptr::P; use rustc::plugin::Registry; -use std::gc::Gc; - #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("hexfloat", expand_syntax_ext); @@ -122,7 +121,7 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let s = match expr.node { // expression is a literal - ast::ExprLit(lit) => match lit.node { + ast::ExprLit(ref lit) => match lit.node { // string literal ast::LitStr(ref s, _) => { s.clone() @@ -167,7 +166,7 @@ struct Ident { } fn parse_tts(cx: &ExtCtxt, - tts: &[ast::TokenTree]) -> (Gc, Option) { + tts: &[ast::TokenTree]) -> (P, Option) { let p = &mut cx.new_parser_from_tts(tts); let ex = p.parse_expr(); let id = if p.token == token::EOF { diff --git a/src/libregex_macros/lib.rs b/src/libregex_macros/lib.rs index 8aa9a2fc8fb94..cd00e459a7998 100644 --- a/src/libregex_macros/lib.rs +++ b/src/libregex_macros/lib.rs @@ -26,7 +26,6 @@ extern crate syntax; extern crate rustc; use std::rc::Rc; -use std::gc::{Gc, GC}; use syntax::ast; use syntax::codemap; @@ -35,6 +34,7 @@ use syntax::ext::base::{ExtCtxt, MacResult, MacExpr, DummyResult}; use syntax::parse::token; use syntax::print::pprust; use syntax::fold::Folder; +use syntax::ptr::P; use rustc::plugin::Registry; @@ -111,7 +111,7 @@ struct NfaGen<'a> { } impl<'a> NfaGen<'a> { - fn code(&mut self) -> Gc { + fn code(&mut self) -> P { // Most or all of the following things are used in the quasiquoted // expression returned. let num_cap_locs = 2 * self.prog.num_captures(); @@ -332,7 +332,7 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str, // Generates code for the `add` method, which is responsible for adding // zero-width states to the next queue of states to visit. - fn add_insts(&self) -> Gc { + fn add_insts(&self) -> P { let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| { let nextpc = pc + 1; let body = match *inst { @@ -433,7 +433,7 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str, // Generates the code for the `step` method, which processes all states // in the current queue that consume a single character. - fn step_insts(&self) -> Gc { + fn step_insts(&self) -> P { let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| { let nextpc = pc + 1; let body = match *inst { @@ -524,9 +524,7 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str, // Translates a character class into a match expression. // This avoids a binary search (and is hopefully replaced by a jump // table). - fn match_class(&self, casei: bool, ranges: &[(char, char)]) -> Gc { - let expr_true = quote_expr!(self.cx, true); - + fn match_class(&self, casei: bool, ranges: &[(char, char)]) -> P { let mut arms = ranges.iter().map(|&(mut start, mut end)| { if casei { start = start.to_uppercase(); @@ -534,7 +532,7 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str, } let pat = self.cx.pat(self.sp, ast::PatRange(quote_expr!(self.cx, $start), quote_expr!(self.cx, $end))); - self.cx.arm(self.sp, vec!(pat), expr_true) + self.cx.arm(self.sp, vec!(pat), quote_expr!(self.cx, true)) }).collect::>(); arms.push(self.wild_arm_expr(quote_expr!(self.cx, false))); @@ -546,7 +544,7 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str, // Generates code for checking a literal prefix of the search string. // The code is only generated if the regex *has* a literal prefix. // Otherwise, a no-op is returned. - fn check_prefix(&self) -> Gc { + fn check_prefix(&self) -> P { if self.prog.prefix.len() == 0 { self.empty_block() } else { @@ -570,32 +568,32 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str, // A wild-card arm is automatically added that executes a no-op. It will // never be used, but is added to satisfy the compiler complaining about // non-exhaustive patterns. - fn match_insts(&self, mut arms: Vec) -> Gc { + fn match_insts(&self, mut arms: Vec) -> P { arms.push(self.wild_arm_expr(self.empty_block())); self.cx.expr_match(self.sp, quote_expr!(self.cx, pc), arms) } - fn empty_block(&self) -> Gc { + fn empty_block(&self) -> P { quote_expr!(self.cx, {}) } // Creates a match arm for the instruction at `pc` with the expression // `body`. - fn arm_inst(&self, pc: uint, body: Gc) -> ast::Arm { + fn arm_inst(&self, pc: uint, body: P) -> ast::Arm { let pc_pat = self.cx.pat_lit(self.sp, quote_expr!(self.cx, $pc)); self.cx.arm(self.sp, vec!(pc_pat), body) } // Creates a wild-card match arm with the expression `body`. - fn wild_arm_expr(&self, body: Gc) -> ast::Arm { + fn wild_arm_expr(&self, body: P) -> ast::Arm { ast::Arm { attrs: vec!(), - pats: vec!(box(GC) ast::Pat{ + pats: vec!(P(ast::Pat{ id: ast::DUMMY_NODE_ID, span: self.sp, node: ast::PatWild(ast::PatWildSingle), - }), + })), guard: None, body: body, } @@ -605,8 +603,8 @@ fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str, // Converts `xs` to a `[x1, x2, .., xN]` expression by calling `to_expr` // on each element in `xs`. fn vec_expr>(&self, xs: It, - to_expr: |&ExtCtxt, T| -> Gc) - -> Gc { + to_expr: |&ExtCtxt, T| -> P) + -> P { let exprs = xs.map(|x| to_expr(self.cx, x)).collect(); self.cx.expr_vec(self.sp, exprs) } @@ -618,13 +616,13 @@ fn parse(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> Option { let mut parser = cx.new_parser_from_tts(tts); let entry = cx.expander().fold_expr(parser.parse_expr()); let regex = match entry.node { - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match lit.node { ast::LitStr(ref s, _) => s.to_string(), _ => { cx.span_err(entry.span, format!( "expected string literal but got `{}`", - pprust::lit_to_string(&*lit)).as_slice()); + pprust::lit_to_string(&**lit)).as_slice()); return None } } diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index 018bfecd369a7..b80d53922f8a5 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -35,6 +35,7 @@ use std::io::fs; use std::os; use arena::TypedArena; use syntax::ast; +use syntax::ast_map; use syntax::attr; use syntax::attr::{AttrMetaMethods}; use syntax::diagnostics; @@ -65,7 +66,7 @@ pub fn compile_input(sess: Session, // large chunks of memory alive and we want to free them as soon as // possible to keep the peak memory usage low let (outputs, trans, sess) = { - let (outputs, expanded_crate, ast_map, id) = { + let (outputs, expanded_crate, id) = { let krate = phase_1_parse_input(&sess, cfg, input); if stop_after_phase_1(&sess) { return; } let outputs = build_output_filenames(input, @@ -75,25 +76,28 @@ pub fn compile_input(sess: Session, &sess); let id = link::find_crate_name(Some(&sess), krate.attrs.as_slice(), input); - let (expanded_crate, ast_map) + let expanded_crate = match phase_2_configure_and_expand(&sess, krate, id.as_slice(), addl_plugins) { None => return, - Some(p) => p, + Some(k) => k }; - (outputs, expanded_crate, ast_map, id) + (outputs, expanded_crate, id) }; + + let mut forest = ast_map::Forest::new(expanded_crate); + let ast_map = assign_node_ids_and_map(&sess, &mut forest); + write_out_deps(&sess, input, &outputs, id.as_slice()); if stop_after_phase_2(&sess) { return; } let type_arena = TypedArena::new(); - let analysis = phase_3_run_analysis_passes(sess, &expanded_crate, - ast_map, &type_arena, id); - phase_save_analysis(&analysis.ty_cx.sess, &expanded_crate, &analysis, outdir); + let analysis = phase_3_run_analysis_passes(sess, ast_map, &type_arena, id); + phase_save_analysis(&analysis.ty_cx.sess, analysis.ty_cx.map.krate(), &analysis, outdir); if stop_after_phase_3(&analysis.ty_cx.sess) { return; } - let (tcx, trans) = phase_4_translate_to_llvm(expanded_crate, analysis); + let (tcx, trans) = phase_4_translate_to_llvm(analysis); // Discard interned strings as they are no longer required. token::get_ident_interner().clear(); @@ -182,7 +186,7 @@ pub fn phase_2_configure_and_expand(sess: &Session, mut krate: ast::Crate, crate_name: &str, addl_plugins: Option) - -> Option<(ast::Crate, syntax::ast_map::Map)> { + -> Option { let time_passes = sess.time_passes(); *sess.crate_types.borrow_mut() = @@ -294,20 +298,37 @@ pub fn phase_2_configure_and_expand(sess: &Session, krate = time(time_passes, "prelude injection", krate, |krate| front::std_inject::maybe_inject_prelude(sess, krate)); - let (krate, map) = time(time_passes, "assigning node ids and indexing ast", krate, |krate| - front::assign_node_ids_and_map::assign_node_ids_and_map(sess, krate)); + time(time_passes, "checking that all macro invocations are gone", &krate, |krate| + syntax::ext::expand::check_for_macros(&sess.parse_sess, krate)); + + Some(krate) +} + +pub fn assign_node_ids_and_map<'ast>(sess: &Session, + forest: &'ast mut ast_map::Forest) + -> ast_map::Map<'ast> { + struct NodeIdAssigner<'a> { + sess: &'a Session + } + + impl<'a> ast_map::FoldOps for NodeIdAssigner<'a> { + fn new_id(&self, old_id: ast::NodeId) -> ast::NodeId { + assert_eq!(old_id, ast::DUMMY_NODE_ID); + self.sess.next_node_id() + } + } + + let map = time(sess.time_passes(), "assigning node ids and indexing ast", forest, |forest| + ast_map::map_crate(forest, NodeIdAssigner { sess: sess })); if sess.opts.debugging_opts & config::AST_JSON != 0 { let mut stdout = io::BufferedWriter::new(io::stdout()); let mut json = json::PrettyEncoder::new(&mut stdout); // unwrapping so IoError isn't ignored - krate.encode(&mut json).unwrap(); + map.krate().encode(&mut json).unwrap(); } - time(time_passes, "checking that all macro invocations are gone", &krate, |krate| - syntax::ext::expand::check_for_macros(&sess.parse_sess, krate)); - - Some((krate, map)) + map } pub struct CrateAnalysis<'tcx> { @@ -324,11 +345,11 @@ pub struct CrateAnalysis<'tcx> { /// miscellaneous analysis passes on the crate. Return various /// structures carrying the results of the analysis. pub fn phase_3_run_analysis_passes<'tcx>(sess: Session, - krate: &ast::Crate, - ast_map: syntax::ast_map::Map, + ast_map: ast_map::Map<'tcx>, type_arena: &'tcx TypedArena, name: String) -> CrateAnalysis<'tcx> { let time_passes = sess.time_passes(); + let krate = ast_map.krate(); time(time_passes, "external crate/lib resolution", (), |_| creader::read_crates(&sess, krate)); @@ -353,7 +374,7 @@ pub fn phase_3_run_analysis_passes<'tcx>(sess: Session, |_| middle::resolve_lifetime::krate(&sess, krate)); time(time_passes, "looking for entry point", (), - |_| middle::entry::find_entry_point(&sess, krate, &ast_map)); + |_| middle::entry::find_entry_point(&sess, &ast_map)); sess.plugin_registrar_fn.set( time(time_passes, "looking for plugin registrar", (), |_| @@ -385,43 +406,43 @@ pub fn phase_3_run_analysis_passes<'tcx>(sess: Session, stability_index); // passes are timed inside typeck - typeck::check_crate(&ty_cx, trait_map, krate); + typeck::check_crate(&ty_cx, trait_map); time(time_passes, "check static items", (), |_| - middle::check_static::check_crate(&ty_cx, krate)); + middle::check_static::check_crate(&ty_cx)); // These next two const passes can probably be merged time(time_passes, "const marking", (), |_| - middle::const_eval::process_crate(krate, &ty_cx)); + middle::const_eval::process_crate(&ty_cx)); time(time_passes, "const checking", (), |_| - middle::check_const::check_crate(krate, &ty_cx)); + middle::check_const::check_crate(&ty_cx)); let maps = (external_exports, last_private_map); let (exported_items, public_items) = time(time_passes, "privacy checking", maps, |(a, b)| - middle::privacy::check_crate(&ty_cx, &exp_map2, a, b, krate)); + middle::privacy::check_crate(&ty_cx, &exp_map2, a, b)); time(time_passes, "intrinsic checking", (), |_| - middle::intrinsicck::check_crate(&ty_cx, krate)); + middle::intrinsicck::check_crate(&ty_cx)); time(time_passes, "effect checking", (), |_| - middle::effect::check_crate(&ty_cx, krate)); + middle::effect::check_crate(&ty_cx)); time(time_passes, "match checking", (), |_| - middle::check_match::check_crate(&ty_cx, krate)); + middle::check_match::check_crate(&ty_cx)); time(time_passes, "liveness checking", (), |_| - middle::liveness::check_crate(&ty_cx, krate)); + middle::liveness::check_crate(&ty_cx)); time(time_passes, "borrow checking", (), |_| - middle::borrowck::check_crate(&ty_cx, krate)); + middle::borrowck::check_crate(&ty_cx)); time(time_passes, "rvalue checking", (), |_| middle::check_rvalues::check_crate(&ty_cx, krate)); time(time_passes, "kind checking", (), |_| - kind::check_crate(&ty_cx, krate)); + kind::check_crate(&ty_cx)); let reachable_map = time(time_passes, "reachability checking", (), |_| @@ -430,12 +451,11 @@ pub fn phase_3_run_analysis_passes<'tcx>(sess: Session, time(time_passes, "death checking", (), |_| { middle::dead::check_crate(&ty_cx, &exported_items, - &reachable_map, - krate) + &reachable_map) }); time(time_passes, "lint checking", (), |_| - lint::check_crate(&ty_cx, krate, &exported_items)); + lint::check_crate(&ty_cx, &exported_items)); CrateAnalysis { exp_map2: exp_map2, @@ -475,16 +495,16 @@ pub struct CrateTranslation { /// Run the translation phase to LLVM, after which the AST and analysis can /// be discarded. -pub fn phase_4_translate_to_llvm(krate: ast::Crate, - analysis: CrateAnalysis) -> (ty::ctxt, CrateTranslation) { +pub fn phase_4_translate_to_llvm<'tcx>(analysis: CrateAnalysis<'tcx>) + -> (ty::ctxt<'tcx>, CrateTranslation) { let time_passes = analysis.ty_cx.sess.time_passes(); time(time_passes, "resolving dependency formats", (), |_| dependency_format::calculate(&analysis.ty_cx)); // Option dance to work around the lack of stack once closures. - time(time_passes, "translation", (krate, analysis), |(krate, analysis)| - trans::base::trans_crate(krate, analysis)) + time(time_passes, "translation", analysis, |analysis| + trans::base::trans_crate(analysis)) } /// Run LLVM itself, producing a bitcode file, assembly file or object file diff --git a/src/librustc/driver/pretty.rs b/src/librustc/driver/pretty.rs index a3227e4dbf1d4..7c27f23d4857a 100644 --- a/src/librustc/driver/pretty.rs +++ b/src/librustc/driver/pretty.rs @@ -91,13 +91,13 @@ pub fn parse_pretty(sess: &Session, name: &str) -> (PpMode, Option(&self, - sess: Session, - krate: &ast::Crate, - ast_map: Option, - id: String, - payload: B, - f: |&PrinterSupport, B| -> A) -> A { + fn call_with_pp_support<'tcx, A, B>(&self, + sess: Session, + ast_map: Option>, + type_arena: &'tcx TypedArena, + id: String, + payload: B, + f: |&PrinterSupport, B| -> A) -> A { match *self { PpmNormal | PpmExpanded => { let annotation = NoAnn { sess: sess, ast_map: ast_map }; @@ -114,9 +114,8 @@ impl PpSourceMode { } PpmTyped => { let ast_map = ast_map.expect("--pretty=typed missing ast_map"); - let type_arena = TypedArena::new(); - let analysis = driver::phase_3_run_analysis_passes(sess, krate, ast_map, - &type_arena, id); + let analysis = driver::phase_3_run_analysis_passes(sess, ast_map, + type_arena, id); let annotation = TypedAnnotation { analysis: analysis }; f(&annotation, payload) } @@ -124,69 +123,51 @@ impl PpSourceMode { } } -trait SessionCarrier { +trait PrinterSupport<'ast>: pprust::PpAnn { /// Provides a uniform interface for re-extracting a reference to a /// `Session` from a value that now owns it. fn sess<'a>(&'a self) -> &'a Session; -} -trait AstMapCarrier { /// Provides a uniform interface for re-extracting a reference to an /// `ast_map::Map` from a value that now owns it. - fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map>; -} + fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map<'ast>>; -trait PrinterSupport : SessionCarrier + AstMapCarrier { /// Produces the pretty-print annotation object. /// - /// Usually implemented via `self as &pprust::PpAnn`. - /// /// (Rust does not yet support upcasting from a trait object to /// an object for one of its super-traits.) - fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn; + fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self as &pprust::PpAnn } } -struct NoAnn { +struct NoAnn<'ast> { sess: Session, - ast_map: Option, -} - -impl PrinterSupport for NoAnn { - fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self as &pprust::PpAnn } + ast_map: Option> } -impl SessionCarrier for NoAnn { +impl<'ast> PrinterSupport<'ast> for NoAnn<'ast> { fn sess<'a>(&'a self) -> &'a Session { &self.sess } -} -impl AstMapCarrier for NoAnn { - fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map> { + fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map<'ast>> { self.ast_map.as_ref() } } -impl pprust::PpAnn for NoAnn {} +impl<'ast> pprust::PpAnn for NoAnn<'ast> {} -struct IdentifiedAnnotation { +struct IdentifiedAnnotation<'ast> { sess: Session, - ast_map: Option, -} - -impl PrinterSupport for IdentifiedAnnotation { - fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self as &pprust::PpAnn } + ast_map: Option>, } -impl SessionCarrier for IdentifiedAnnotation { +impl<'ast> PrinterSupport<'ast> for IdentifiedAnnotation<'ast> { fn sess<'a>(&'a self) -> &'a Session { &self.sess } -} -impl AstMapCarrier for IdentifiedAnnotation { - fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map> { + fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map<'ast>> { self.ast_map.as_ref() } } -impl pprust::PpAnn for IdentifiedAnnotation { +impl<'ast> pprust::PpAnn for IdentifiedAnnotation<'ast> { fn pre(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::IoResult<()> { @@ -222,26 +203,20 @@ impl pprust::PpAnn for IdentifiedAnnotation { } } -struct HygieneAnnotation { +struct HygieneAnnotation<'ast> { sess: Session, - ast_map: Option, -} - -impl PrinterSupport for HygieneAnnotation { - fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self as &pprust::PpAnn } + ast_map: Option>, } -impl SessionCarrier for HygieneAnnotation { +impl<'ast> PrinterSupport<'ast> for HygieneAnnotation<'ast> { fn sess<'a>(&'a self) -> &'a Session { &self.sess } -} -impl AstMapCarrier for HygieneAnnotation { - fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map> { + fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map<'ast>> { self.ast_map.as_ref() } } -impl pprust::PpAnn for HygieneAnnotation { +impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> { fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::IoResult<()> { @@ -266,16 +241,10 @@ struct TypedAnnotation<'tcx> { analysis: CrateAnalysis<'tcx>, } -impl<'tcx> PrinterSupport for TypedAnnotation<'tcx> { - fn pp_ann<'a>(&'a self) -> &'a pprust::PpAnn { self as &pprust::PpAnn } -} - -impl<'tcx> SessionCarrier for TypedAnnotation<'tcx> { +impl<'tcx> PrinterSupport<'tcx> for TypedAnnotation<'tcx> { fn sess<'a>(&'a self) -> &'a Session { &self.analysis.ty_cx.sess } -} -impl<'tcx> AstMapCarrier for TypedAnnotation<'tcx> { - fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map> { + fn ast_map<'a>(&'a self) -> Option<&'a ast_map::Map<'tcx>> { Some(&self.analysis.ty_cx.map) } } @@ -347,12 +316,12 @@ impl FromStr for UserIdentifiedItem { } } -enum NodesMatchingUII<'a> { +enum NodesMatchingUII<'a, 'ast: 'a> { NodesMatchingDirect(option::Item), - NodesMatchingSuffix(ast_map::NodesMatchingSuffix<'a, String>), + NodesMatchingSuffix(ast_map::NodesMatchingSuffix<'a, 'ast, String>), } -impl<'a> Iterator for NodesMatchingUII<'a> { +impl<'a, 'ast> Iterator for NodesMatchingUII<'a, 'ast> { fn next(&mut self) -> Option { match self { &NodesMatchingDirect(ref mut iter) => iter.next(), @@ -369,7 +338,8 @@ impl UserIdentifiedItem { } } - fn all_matching_node_ids<'a>(&'a self, map: &'a ast_map::Map) -> NodesMatchingUII<'a> { + fn all_matching_node_ids<'a, 'ast>(&'a self, map: &'a ast_map::Map<'ast>) + -> NodesMatchingUII<'a, 'ast> { match *self { ItemViaNode(node_id) => NodesMatchingDirect(Some(node_id).move_iter()), @@ -443,15 +413,24 @@ pub fn pretty_print_input(sess: Session, let id = link::find_crate_name(Some(&sess), krate.attrs.as_slice(), input); let is_expanded = needs_expansion(&ppm); - let (krate, ast_map) = if needs_ast_map(&ppm, &opt_uii) { - let k = driver::phase_2_configure_and_expand(&sess, krate, id.as_slice(), None); - let (krate, ast_map) = match k { + let compute_ast_map = needs_ast_map(&ppm, &opt_uii); + let krate = if compute_ast_map { + match driver::phase_2_configure_and_expand(&sess, krate, id.as_slice(), None) { None => return, - Some(p) => p, - }; - (krate, Some(ast_map)) + Some(k) => k + } + } else { + krate + }; + + let mut forest = ast_map::Forest::new(krate); + let type_arena = TypedArena::new(); + + let (krate, ast_map) = if compute_ast_map { + let map = driver::assign_node_ids_and_map(&sess, &mut forest); + (map.krate(), Some(map)) } else { - (krate, None) + (forest.krate(), None) }; let src_name = driver::source_name(input); @@ -476,12 +455,12 @@ pub fn pretty_print_input(sess: Session, match (ppm, opt_uii) { (PpmSource(s), None) => s.call_with_pp_support( - sess, &krate, ast_map, id, out, |annotation, out| { + sess, ast_map, &type_arena, id, out, |annotation, out| { debug!("pretty printing source code {}", s); let sess = annotation.sess(); pprust::print_crate(sess.codemap(), sess.diagnostic(), - &krate, + krate, src_name.to_string(), &mut rdr, out, @@ -491,7 +470,7 @@ pub fn pretty_print_input(sess: Session, (PpmSource(s), Some(uii)) => s.call_with_pp_support( - sess, &krate, ast_map, id, (out,uii), |annotation, (out,uii)| { + sess, ast_map, &type_arena, id, (out,uii), |annotation, (out,uii)| { debug!("pretty printing source code {}", s); let sess = annotation.sess(); let ast_map = annotation.ast_map() @@ -533,9 +512,8 @@ pub fn pretty_print_input(sess: Session, match code { Some(code) => { let variants = gather_flowgraph_variants(&sess); - let type_arena = TypedArena::new(); - let analysis = driver::phase_3_run_analysis_passes(sess, &krate, - ast_map, &type_arena, id); + let analysis = driver::phase_3_run_analysis_passes(sess, ast_map, + &type_arena, id); print_flowgraph(variants, analysis, code, out) } None => { diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index 135e21e4e0184..6f020184b336d 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -263,7 +263,6 @@ pub fn build_session_(sopts: config::Options, } // Seems out of place, but it uses session, so I'm putting it here -pub fn expect(sess: &Session, opt: Option, msg: || -> String) - -> T { +pub fn expect(sess: &Session, opt: Option, msg: || -> String) -> T { diagnostic::expect(sess.diagnostic(), opt, msg) } diff --git a/src/librustc/front/assign_node_ids_and_map.rs b/src/librustc/front/assign_node_ids_and_map.rs deleted file mode 100644 index f7c919131a809..0000000000000 --- a/src/librustc/front/assign_node_ids_and_map.rs +++ /dev/null @@ -1,29 +0,0 @@ -// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use driver::session::Session; - -use syntax::ast; -use syntax::ast_map; - -struct NodeIdAssigner<'a> { - sess: &'a Session -} - -impl<'a> ast_map::FoldOps for NodeIdAssigner<'a> { - fn new_id(&self, old_id: ast::NodeId) -> ast::NodeId { - assert_eq!(old_id, ast::DUMMY_NODE_ID); - self.sess.next_node_id() - } -} - -pub fn assign_node_ids_and_map(sess: &Session, krate: ast::Crate) -> (ast::Crate, ast_map::Map) { - ast_map::map_crate(krate, NodeIdAssigner { sess: sess }) -} diff --git a/src/librustc/front/config.rs b/src/librustc/front/config.rs index 93320caf5f281..f028f5c97b7ef 100644 --- a/src/librustc/front/config.rs +++ b/src/librustc/front/config.rs @@ -10,9 +10,8 @@ use syntax::fold::Folder; use syntax::{ast, fold, attr}; -use syntax::codemap; - -use std::gc::{Gc, GC}; +use syntax::codemap::Spanned; +use syntax::ptr::P; /// A folder that strips out items that do not belong in the current /// configuration. @@ -28,22 +27,22 @@ pub fn strip_unconfigured_items(krate: ast::Crate) -> ast::Crate { } impl<'a> fold::Folder for Context<'a> { - fn fold_mod(&mut self, module: &ast::Mod) -> ast::Mod { + fn fold_mod(&mut self, module: ast::Mod) -> ast::Mod { fold_mod(self, module) } - fn fold_block(&mut self, block: ast::P) -> ast::P { + fn fold_block(&mut self, block: P) -> P { fold_block(self, block) } - fn fold_foreign_mod(&mut self, foreign_mod: &ast::ForeignMod) -> ast::ForeignMod { + fn fold_foreign_mod(&mut self, foreign_mod: ast::ForeignMod) -> ast::ForeignMod { fold_foreign_mod(self, foreign_mod) } - fn fold_item_underscore(&mut self, item: &ast::Item_) -> ast::Item_ { + fn fold_item_underscore(&mut self, item: ast::Item_) -> ast::Item_ { fold_item_underscore(self, item) } - fn fold_expr(&mut self, expr: Gc) -> Gc { + fn fold_expr(&mut self, expr: P) -> P { fold_expr(self, expr) } - fn fold_mac(&mut self, mac: &ast::Mac) -> ast::Mac { + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { fold::noop_fold_mac(mac, self) } } @@ -57,34 +56,32 @@ pub fn strip_items(krate: ast::Crate, ctxt.fold_crate(krate) } -fn filter_view_item<'r>(cx: &mut Context, view_item: &'r ast::ViewItem) - -> Option<&'r ast::ViewItem> { - if view_item_in_cfg(cx, view_item) { +fn filter_view_item(cx: &mut Context, view_item: ast::ViewItem) -> Option { + if view_item_in_cfg(cx, &view_item) { Some(view_item) } else { None } } -fn fold_mod(cx: &mut Context, m: &ast::Mod) -> ast::Mod { - let filtered_items: Vec<&Gc> = m.items.iter() - .filter(|a| item_in_cfg(cx, &***a)) - .collect(); - let flattened_items = filtered_items.move_iter() - .flat_map(|&x| cx.fold_item(x).move_iter()) - .collect(); - let filtered_view_items = m.view_items.iter().filter_map(|a| { - filter_view_item(cx, a).map(|x| cx.fold_view_item(x)) - }).collect(); +fn fold_mod(cx: &mut Context, ast::Mod {inner, view_items, items}: ast::Mod) -> ast::Mod { ast::Mod { - inner: m.inner, - view_items: filtered_view_items, - items: flattened_items + inner: inner, + view_items: view_items.move_iter().filter_map(|a| { + filter_view_item(cx, a).map(|x| cx.fold_view_item(x)) + }).collect(), + items: items.move_iter().filter_map(|a| { + if item_in_cfg(cx, &*a) { + Some(cx.fold_item(a)) + } else { + None + } + }).flat_map(|x| x.move_iter()).collect() } } -fn filter_foreign_item(cx: &mut Context, item: Gc) - -> Option> { +fn filter_foreign_item(cx: &mut Context, item: P) + -> Option> { if foreign_item_in_cfg(cx, &*item) { Some(item) } else { @@ -92,134 +89,135 @@ fn filter_foreign_item(cx: &mut Context, item: Gc) } } -fn fold_foreign_mod(cx: &mut Context, nm: &ast::ForeignMod) -> ast::ForeignMod { - let filtered_items = nm.items - .iter() - .filter_map(|a| filter_foreign_item(cx, *a)) - .collect(); - let filtered_view_items = nm.view_items.iter().filter_map(|a| { - filter_view_item(cx, a).map(|x| cx.fold_view_item(x)) - }).collect(); +fn fold_foreign_mod(cx: &mut Context, ast::ForeignMod {abi, view_items, items}: ast::ForeignMod) + -> ast::ForeignMod { ast::ForeignMod { - abi: nm.abi, - view_items: filtered_view_items, - items: filtered_items + abi: abi, + view_items: view_items.move_iter().filter_map(|a| { + filter_view_item(cx, a).map(|x| cx.fold_view_item(x)) + }).collect(), + items: items.move_iter() + .filter_map(|a| filter_foreign_item(cx, a)) + .collect() } } -fn fold_item_underscore(cx: &mut Context, item: &ast::Item_) -> ast::Item_ { - let item = match *item { - ast::ItemImpl(ref a, ref b, c, ref impl_items) => { - let impl_items = impl_items.iter() - .filter(|ii| { - impl_item_in_cfg(cx, &**ii) - }) - .map(|x| *x) +fn fold_item_underscore(cx: &mut Context, item: ast::Item_) -> ast::Item_ { + let item = match item { + ast::ItemImpl(a, b, c, impl_items) => { + let impl_items = impl_items.move_iter() + .filter(|ii| impl_item_in_cfg(cx, ii)) .collect(); - ast::ItemImpl((*a).clone(), (*b).clone(), c, impl_items) + ast::ItemImpl(a, b, c, impl_items) } - ast::ItemTrait(ref a, ref b, ref c, ref methods) => { - let methods = methods.iter() - .filter(|m| trait_method_in_cfg(cx, *m) ) - .map(|x| (*x).clone()) + ast::ItemTrait(a, b, c, methods) => { + let methods = methods.move_iter() + .filter(|m| trait_method_in_cfg(cx, m)) .collect(); - ast::ItemTrait((*a).clone(), (*b).clone(), (*c).clone(), methods) + ast::ItemTrait(a, b, c, methods) } - ast::ItemStruct(ref def, ref generics) => { - ast::ItemStruct(fold_struct(cx, &**def), generics.clone()) + ast::ItemStruct(def, generics) => { + ast::ItemStruct(fold_struct(cx, def), generics) } - ast::ItemEnum(ref def, ref generics) => { - let mut variants = def.variants.iter().map(|c| c.clone()). - filter_map(|v| { + ast::ItemEnum(def, generics) => { + let mut variants = def.variants.move_iter().filter_map(|v| { if !(cx.in_cfg)(v.node.attrs.as_slice()) { None } else { - Some(match v.node.kind { - ast::TupleVariantKind(..) => v, - ast::StructVariantKind(ref def) => { - let def = fold_struct(cx, &**def); - box(GC) codemap::Spanned { - node: ast::Variant_ { - kind: ast::StructVariantKind(def.clone()), - ..v.node.clone() - }, - ..*v + Some(v.map(|Spanned {node: ast::Variant_ {id, name, attrs, kind, + disr_expr, vis}, span}| { + Spanned { + node: ast::Variant_ { + id: id, + name: name, + attrs: attrs, + kind: match kind { + ast::TupleVariantKind(..) => kind, + ast::StructVariantKind(def) => { + ast::StructVariantKind(fold_struct(cx, def)) } - } - }) - } - }); + }, + disr_expr: disr_expr, + vis: vis + }, + span: span + } + })) + } + }); ast::ItemEnum(ast::EnumDef { variants: variants.collect(), - }, generics.clone()) + }, generics) } - ref item => item.clone(), + item => item, }; - fold::noop_fold_item_underscore(&item, cx) + fold::noop_fold_item_underscore(item, cx) } -fn fold_struct(cx: &mut Context, def: &ast::StructDef) -> Gc { - let mut fields = def.fields.iter().map(|c| c.clone()).filter(|m| { - (cx.in_cfg)(m.node.attrs.as_slice()) - }); - box(GC) ast::StructDef { - fields: fields.collect(), - ctor_id: def.ctor_id, - super_struct: def.super_struct.clone(), - is_virtual: def.is_virtual, - } +fn fold_struct(cx: &mut Context, def: P) -> P { + def.map(|ast::StructDef {fields, ctor_id, super_struct, is_virtual}| { + ast::StructDef { + fields: fields.move_iter().filter(|m| { + (cx.in_cfg)(m.node.attrs.as_slice()) + }).collect(), + ctor_id: ctor_id, + super_struct: super_struct, + is_virtual: is_virtual, + } + }) } -fn retain_stmt(cx: &mut Context, stmt: Gc) -> bool { +fn retain_stmt(cx: &mut Context, stmt: &ast::Stmt) -> bool { match stmt.node { - ast::StmtDecl(decl, _) => { - match decl.node { - ast::DeclItem(ref item) => { - item_in_cfg(cx, &**item) - } - _ => true + ast::StmtDecl(ref decl, _) => { + match decl.node { + ast::DeclItem(ref item) => { + item_in_cfg(cx, &**item) + } + _ => true + } } - } - _ => true + _ => true } } -fn fold_block(cx: &mut Context, b: ast::P) -> ast::P { - let resulting_stmts: Vec<&Gc> = - b.stmts.iter().filter(|&a| retain_stmt(cx, *a)).collect(); - let resulting_stmts = resulting_stmts.move_iter() - .flat_map(|stmt| cx.fold_stmt(&**stmt).move_iter()) - .collect(); - let filtered_view_items = b.view_items.iter().filter_map(|a| { - filter_view_item(cx, a).map(|x| cx.fold_view_item(x)) - }).collect(); - ast::P(ast::Block { - view_items: filtered_view_items, - stmts: resulting_stmts, - expr: b.expr.map(|x| cx.fold_expr(x)), - id: b.id, - rules: b.rules, - span: b.span, +fn fold_block(cx: &mut Context, b: P) -> P { + b.map(|ast::Block {id, view_items, stmts, expr, rules, span}| { + let resulting_stmts: Vec> = + stmts.move_iter().filter(|a| retain_stmt(cx, &**a)).collect(); + let resulting_stmts = resulting_stmts.move_iter() + .flat_map(|stmt| cx.fold_stmt(stmt).move_iter()) + .collect(); + let filtered_view_items = view_items.move_iter().filter_map(|a| { + filter_view_item(cx, a).map(|x| cx.fold_view_item(x)) + }).collect(); + ast::Block { + id: id, + view_items: filtered_view_items, + stmts: resulting_stmts, + expr: expr.map(|x| cx.fold_expr(x)), + rules: rules, + span: span, + } }) } -fn fold_expr(cx: &mut Context, expr: Gc) -> Gc { - let expr = match expr.node { - ast::ExprMatch(ref m, ref arms) => { - let arms = arms.iter() - .filter(|a| (cx.in_cfg)(a.attrs.as_slice())) - .map(|a| a.clone()) - .collect(); - box(GC) ast::Expr { - id: expr.id, - span: expr.span.clone(), - node: ast::ExprMatch(m.clone(), arms), - } - } - _ => expr.clone() - }; - fold::noop_fold_expr(expr, cx) +fn fold_expr(cx: &mut Context, expr: P) -> P { + expr.map(|ast::Expr {id, span, node}| { + fold::noop_fold_expr(ast::Expr { + id: id, + node: match node { + ast::ExprMatch(m, arms) => { + ast::ExprMatch(m, arms.move_iter() + .filter(|a| (cx.in_cfg)(a.attrs.as_slice())) + .collect()) + } + _ => node + }, + span: span + }, cx) + }) } fn item_in_cfg(cx: &mut Context, item: &ast::Item) -> bool { @@ -237,19 +235,19 @@ fn view_item_in_cfg(cx: &mut Context, item: &ast::ViewItem) -> bool { fn trait_method_in_cfg(cx: &mut Context, meth: &ast::TraitItem) -> bool { match *meth { ast::RequiredMethod(ref meth) => (cx.in_cfg)(meth.attrs.as_slice()), - ast::ProvidedMethod(meth) => (cx.in_cfg)(meth.attrs.as_slice()) + ast::ProvidedMethod(ref meth) => (cx.in_cfg)(meth.attrs.as_slice()) } } fn impl_item_in_cfg(cx: &mut Context, impl_item: &ast::ImplItem) -> bool { match *impl_item { - ast::MethodImplItem(meth) => (cx.in_cfg)(meth.attrs.as_slice()), + ast::MethodImplItem(ref meth) => (cx.in_cfg)(meth.attrs.as_slice()), } } // Determine if an item should be translated in the current crate // configuration based on the item's attributes -fn in_cfg(cfg: &[Gc], attrs: &[ast::Attribute]) -> bool { - attr::test_cfg(cfg, attrs.iter().map(|x| *x)) +fn in_cfg(cfg: &[P], attrs: &[ast::Attribute]) -> bool { + attr::test_cfg(cfg, attrs.iter()) } diff --git a/src/librustc/front/feature_gate.rs b/src/librustc/front/feature_gate.rs index 7750ddc91e1a6..13a40aba93078 100644 --- a/src/librustc/front/feature_gate.rs +++ b/src/librustc/front/feature_gate.rs @@ -33,6 +33,7 @@ use syntax::parse::token; use driver::session::Session; use std::cell::Cell; +use std::slice; /// This is a list of all known features since the beginning of time. This list /// can never shrink, it may only be expanded (in order to prevent old programs @@ -220,7 +221,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> { } } - ast::ItemStruct(struct_definition, _) => { + ast::ItemStruct(ref struct_definition, _) => { if attr::contains_name(i.attrs.as_slice(), "simd") { self.gate_feature("simd", i.span, "SIMD types are experimental and possibly buggy"); @@ -310,7 +311,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> { fn visit_ty(&mut self, t: &ast::Ty) { match t.node { - ast::TyClosure(closure) if closure.onceness == ast::Once => { + ast::TyClosure(ref closure) if closure.onceness == ast::Once => { self.gate_feature("once_fns", t.span, "once functions are \ experimental and likely to be removed"); @@ -352,7 +353,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> { fn visit_generics(&mut self, generics: &ast::Generics) { for type_parameter in generics.ty_params.iter() { match type_parameter.default { - Some(ty) => { + Some(ref ty) => { self.gate_feature("default_type_params", ty.span, "default type parameters are \ experimental and possibly buggy"); @@ -364,7 +365,7 @@ impl<'a, 'v> Visitor<'v> for Context<'a> { } fn visit_attribute(&mut self, attr: &ast::Attribute) { - if attr::contains_name([*attr], "lang") { + if attr::contains_name(slice::ref_slice(attr), "lang") { self.gate_feature("lang_items", attr.span, "language items are subject to change"); @@ -420,7 +421,7 @@ pub fn check_crate(sess: &Session, krate: &ast::Crate) { expected #![feature(...)]"); } Some(list) => { - for &mi in list.iter() { + for mi in list.iter() { let name = match mi.node { ast::MetaWord(ref word) => (*word).clone(), _ => { diff --git a/src/librustc/front/std_inject.rs b/src/librustc/front/std_inject.rs index 32e0c323d1f94..748641ba70c2b 100644 --- a/src/librustc/front/std_inject.rs +++ b/src/librustc/front/std_inject.rs @@ -21,10 +21,10 @@ use syntax::owned_slice::OwnedSlice; use syntax::parse::token::InternedString; use syntax::parse::token::special_idents; use syntax::parse::token; +use syntax::ptr::P; use syntax::util::small_vector::SmallVector; use std::mem; -use std::gc::{Gc, GC}; pub fn maybe_inject_crates_ref(sess: &Session, krate: ast::Crate) -> ast::Crate { @@ -149,7 +149,6 @@ impl<'a> fold::Folder for PreludeInjector<'a> { if !no_prelude(krate.attrs.as_slice()) { // only add `use std::prelude::*;` if there wasn't a // `#![no_implicit_prelude]` at the crate level. - // fold_mod() will insert glob path. let globs_attr = attr::mk_attr_inner(attr::mk_attr_id(), attr::mk_list_item( @@ -161,23 +160,23 @@ impl<'a> fold::Folder for PreludeInjector<'a> { attr::mark_used(&globs_attr); krate.attrs.push(globs_attr); - krate.module = self.fold_mod(&krate.module); + krate.module = self.fold_mod(krate.module); } krate } - fn fold_item(&mut self, item: Gc) -> SmallVector> { + fn fold_item(&mut self, item: P) -> SmallVector> { if !no_prelude(item.attrs.as_slice()) { // only recur if there wasn't `#![no_implicit_prelude]` // on this item, i.e. this means that the prelude is not // implicitly imported though the whole subtree - fold::noop_fold_item(&*item, self) + fold::noop_fold_item(item, self) } else { SmallVector::one(item) } } - fn fold_mod(&mut self, module: &ast::Mod) -> ast::Mod { + fn fold_mod(&mut self, ast::Mod {inner, view_items, items}: ast::Mod) -> ast::Mod { let prelude_path = ast::Path { span: DUMMY_SP, global: false, @@ -194,44 +193,41 @@ impl<'a> fold::Folder for PreludeInjector<'a> { }), }; - let vp = box(GC) codemap::dummy_spanned(ast::ViewPathGlob(prelude_path, - ast::DUMMY_NODE_ID)); - let vi2 = ast::ViewItem { + let (crates, uses) = view_items.partitioned(|x| { + match x.node { + ast::ViewItemExternCrate(..) => true, + _ => false, + } + }); + + // add prelude after any `extern crate` but before any `use` + let mut view_items = crates; + let vp = P(codemap::dummy_spanned(ast::ViewPathGlob(prelude_path, ast::DUMMY_NODE_ID))); + view_items.push(ast::ViewItem { node: ast::ViewItemUse(vp), - attrs: vec!(ast::Attribute { + attrs: vec![ast::Attribute { span: DUMMY_SP, node: ast::Attribute_ { id: attr::mk_attr_id(), style: ast::AttrOuter, - value: box(GC) ast::MetaItem { + value: P(ast::MetaItem { span: DUMMY_SP, node: ast::MetaWord(token::get_name( special_idents::prelude_import.name)), - }, + }), is_sugared_doc: false, }, - }), + }], vis: ast::Inherited, span: DUMMY_SP, - }; - - let (crates, uses) = module.view_items.partitioned(|x| { - match x.node { - ast::ViewItemExternCrate(..) => true, - _ => false, - } }); - - // add vi2 after any `extern crate` but before any `use` - let mut view_items = crates; - view_items.push(vi2); view_items.push_all_move(uses); - let new_module = ast::Mod { + fold::noop_fold_mod(ast::Mod { + inner: inner, view_items: view_items, - ..(*module).clone() - }; - fold::noop_fold_mod(&new_module, self) + items: items + }, self) } } diff --git a/src/librustc/front/test.rs b/src/librustc/front/test.rs index 63e93d266c770..a8c9c50009588 100644 --- a/src/librustc/front/test.rs +++ b/src/librustc/front/test.rs @@ -16,10 +16,10 @@ use driver::session::Session; use front::config; -use std::gc::{Gc, GC}; use std::slice; use std::mem; use std::vec; +use syntax::{ast, ast_util}; use syntax::ast_util::*; use syntax::attr::AttrMetaMethods; use syntax::attr; @@ -28,13 +28,13 @@ use syntax::codemap; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; -use syntax::fold::Folder; +use syntax::fold::{Folder, MoveMap}; use syntax::fold; use syntax::owned_slice::OwnedSlice; use syntax::parse::token::InternedString; use syntax::parse::token; use syntax::print::pprust; -use syntax::{ast, ast_util}; +use syntax::ptr::P; use syntax::util::small_vector::SmallVector; struct Test { @@ -105,12 +105,12 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { folded } - fn fold_item(&mut self, i: Gc) -> SmallVector> { + fn fold_item(&mut self, i: P) -> SmallVector> { self.cx.path.push(i.ident); debug!("current path: {}", ast_util::path_name_i(self.cx.path.as_slice())); - if is_test_fn(&self.cx, i) || is_bench_fn(&self.cx, i) { + if is_test_fn(&self.cx, &*i) || is_bench_fn(&self.cx, &*i) { match i.node { ast::ItemFn(_, ast::UnsafeFn, _, _, _) => { let sess = self.cx.sess; @@ -123,9 +123,9 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { let test = Test { span: i.span, path: self.cx.path.clone(), - bench: is_bench_fn(&self.cx, i), - ignore: is_ignored(&self.cx, i), - should_fail: should_fail(i) + bench: is_bench_fn(&self.cx, &*i), + ignore: is_ignored(&self.cx, &*i), + should_fail: should_fail(&*i) }; self.cx.testfns.push(test); self.tests.push(i.ident); @@ -138,14 +138,14 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { // We don't want to recurse into anything other than mods, since // mods or tests inside of functions will break things let res = match i.node { - ast::ItemMod(..) => fold::noop_fold_item(&*i, self), + ast::ItemMod(..) => fold::noop_fold_item(i, self), _ => SmallVector::one(i), }; self.cx.path.pop(); res } - fn fold_mod(&mut self, m: &ast::Mod) -> ast::Mod { + fn fold_mod(&mut self, m: ast::Mod) -> ast::Mod { let tests = mem::replace(&mut self.tests, Vec::new()); let tested_submods = mem::replace(&mut self.tested_submods, Vec::new()); let mut mod_folded = fold::noop_fold_mod(m, self); @@ -155,22 +155,25 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { // Remove any #[main] from the AST so it doesn't clash with // the one we're going to add. Only if compiling an executable. - fn nomain(item: Gc) -> Gc { - box(GC) ast::Item { - attrs: item.attrs.iter().filter_map(|attr| { - if !attr.check_name("main") { - Some(*attr) - } else { - None - } - }).collect(), - .. (*item).clone() - } - } + mod_folded.items = mem::replace(&mut mod_folded.items, vec![]).move_map(|item| { + item.map(|ast::Item {id, ident, attrs, node, vis, span}| { + ast::Item { + id: id, + ident: ident, + attrs: attrs.move_iter().filter_map(|attr| { + if !attr.check_name("main") { + Some(attr) + } else { + None + } + }).collect(), + node: node, + vis: vis, + span: span + } + }) + }); - for i in mod_folded.items.mut_iter() { - *i = nomain(*i); - } if !tests.is_empty() || !tested_submods.is_empty() { let (it, sym) = mk_reexport_mod(&mut self.cx, tests, tested_submods); mod_folded.items.push(it); @@ -188,7 +191,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { } fn mk_reexport_mod(cx: &mut TestCtxt, tests: Vec, - tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (Gc, ast::Ident) { + tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (P, ast::Ident) { let mut view_items = Vec::new(); let super_ = token::str_to_ident("super"); @@ -208,14 +211,14 @@ fn mk_reexport_mod(cx: &mut TestCtxt, tests: Vec, }; let sym = token::gensym_ident("__test_reexports"); - let it = box(GC) ast::Item { + let it = P(ast::Item { ident: sym.clone(), attrs: Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemMod(reexport_mod), vis: ast::Public, span: DUMMY_SP, - }; + }); (it, sym) } @@ -266,10 +269,10 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate { }) } -fn is_test_fn(cx: &TestCtxt, i: Gc) -> bool { +fn is_test_fn(cx: &TestCtxt, i: &ast::Item) -> bool { let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test"); - fn has_test_signature(i: Gc) -> bool { + fn has_test_signature(i: &ast::Item) -> bool { match &i.node { &ast::ItemFn(ref decl, _, _, ref generics, _) => { let no_output = match decl.output.node { @@ -295,10 +298,10 @@ fn is_test_fn(cx: &TestCtxt, i: Gc) -> bool { return has_test_attr && has_test_signature(i); } -fn is_bench_fn(cx: &TestCtxt, i: Gc) -> bool { +fn is_bench_fn(cx: &TestCtxt, i: &ast::Item) -> bool { let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench"); - fn has_test_signature(i: Gc) -> bool { + fn has_test_signature(i: &ast::Item) -> bool { match i.node { ast::ItemFn(ref decl, _, _, ref generics, _) => { let input_cnt = decl.inputs.len(); @@ -325,19 +328,19 @@ fn is_bench_fn(cx: &TestCtxt, i: Gc) -> bool { return has_bench_attr && has_test_signature(i); } -fn is_ignored(cx: &TestCtxt, i: Gc) -> bool { +fn is_ignored(cx: &TestCtxt, i: &ast::Item) -> bool { i.attrs.iter().any(|attr| { // check ignore(cfg(foo, bar)) attr.check_name("ignore") && match attr.meta_item_list() { Some(ref cfgs) => { - attr::test_cfg(cx.config.as_slice(), cfgs.iter().map(|x| *x)) + attr::test_cfg(cx.config.as_slice(), cfgs.iter()) } None => true } }) } -fn should_fail(i: Gc) -> bool { +fn should_fail(i: &ast::Item) -> bool { attr::contains_name(i.attrs.as_slice(), "should_fail") } @@ -362,9 +365,9 @@ fn mk_std(cx: &TestCtxt) -> ast::ViewItem { let id_test = token::str_to_ident("test"); let (vi, vis) = if cx.is_test_crate { (ast::ViewItemUse( - box(GC) nospan(ast::ViewPathSimple(id_test, - path_node(vec!(id_test)), - ast::DUMMY_NODE_ID))), + P(nospan(ast::ViewPathSimple(id_test, + path_node(vec!(id_test)), + ast::DUMMY_NODE_ID)))), ast::Public) } else { (ast::ViewItemExternCrate(id_test, None, ast::DUMMY_NODE_ID), @@ -378,7 +381,7 @@ fn mk_std(cx: &TestCtxt) -> ast::ViewItem { } } -fn mk_test_module(cx: &mut TestCtxt) -> (Gc, Option) { +fn mk_test_module(cx: &mut TestCtxt) -> (P, Option) { // Link to test crate let view_items = vec!(mk_std(cx)); @@ -421,7 +424,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (Gc, Option) { ast::DUMMY_NODE_ID)); ast::ViewItem { - node: ast::ViewItemUse(box(GC) use_path), + node: ast::ViewItemUse(P(use_path)), attrs: vec![], vis: ast::Inherited, span: DUMMY_SP @@ -430,7 +433,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (Gc, Option) { debug!("Synthetic test module:\n{}\n", pprust::item_to_string(&item)); - (box(GC) item, reexport) + (P(item), reexport) } fn nospan(t: T) -> codemap::Spanned { @@ -449,7 +452,7 @@ fn path_node(ids: Vec ) -> ast::Path { } } -fn mk_tests(cx: &TestCtxt) -> Gc { +fn mk_tests(cx: &TestCtxt) -> P { // The vector of test_descs for this crate let test_descs = mk_test_descs(cx); @@ -483,24 +486,24 @@ fn is_test_crate(krate: &ast::Crate) -> bool { } } -fn mk_test_descs(cx: &TestCtxt) -> Gc { +fn mk_test_descs(cx: &TestCtxt) -> P { debug!("building test vector from {} tests", cx.testfns.len()); - box(GC) ast::Expr { + P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprAddrOf(ast::MutImmutable, - box(GC) ast::Expr { + P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprVec(cx.testfns.iter().map(|test| { mk_test_desc_and_fn_rec(cx, test) - }).collect()), - span: DUMMY_SP, - }), + }).collect()), + span: DUMMY_SP, + })), span: DUMMY_SP, - } + }) } -fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> Gc { +fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P { // FIXME #15962: should be using quote_expr, but that stringifies // __test_reexports, causing it to be reinterned, losing the // gensym information. diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index ce262bf449a15..fd643a70c7b95 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -119,7 +119,6 @@ pub mod front { pub mod config; pub mod test; pub mod std_inject; - pub mod assign_node_ids_and_map; pub mod feature_gate; pub mod show_span; } diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 616219a3cb995..58c05bee44373 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -36,8 +36,8 @@ use lint::{Context, LintPass, LintArray}; use std::cmp; use std::collections::HashMap; +use std::slice; use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64}; -use std::gc::Gc; use syntax::abi; use syntax::ast_map; use syntax::attr::AttrMetaMethods; @@ -45,6 +45,7 @@ use syntax::attr; use syntax::codemap::Span; use syntax::parse::token; use syntax::{ast, ast_util, visit}; +use syntax::ptr::P; use syntax::visit::Visitor; declare_lint!(WHILE_TRUE, Warn, @@ -59,9 +60,9 @@ impl LintPass for WhileTrue { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { match e.node { - ast::ExprWhile(cond, _, _) => { + ast::ExprWhile(ref cond, _, _) => { match cond.node { - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match lit.node { ast::LitBool(true) => { cx.span_lint(WHILE_TRUE, e.span, @@ -91,9 +92,9 @@ impl LintPass for UnusedCasts { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { match e.node { - ast::ExprCast(expr, ty) => { - let t_t = ast_ty_to_ty(cx, &infer::new_infer_ctxt(cx.tcx), &*ty); - if ty::get(ty::expr_ty(cx.tcx, &*expr)).sty == ty::get(t_t).sty { + ast::ExprCast(ref expr, ref ty) => { + let t_t = ast_ty_to_ty(cx, &infer::new_infer_ctxt(cx.tcx), &**ty); + if ty::get(ty::expr_ty(cx.tcx, &**expr)).sty == ty::get(t_t).sty { cx.span_lint(UNNECESSARY_TYPECAST, ty.span, "unnecessary type cast"); } } @@ -131,9 +132,9 @@ impl LintPass for TypeLimits { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { match e.node { - ast::ExprUnary(ast::UnNeg, expr) => { + ast::ExprUnary(ast::UnNeg, ref expr) => { match expr.node { - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match lit.node { ast::LitInt(_, ast::UnsignedIntLit(_)) => { cx.span_lint(UNSIGNED_NEGATE, e.span, @@ -144,7 +145,7 @@ impl LintPass for TypeLimits { } }, _ => { - let t = ty::expr_ty(cx.tcx, &*expr); + let t = ty::expr_ty(cx.tcx, &**expr); match ty::get(t).sty { ty::ty_uint(_) => { cx.span_lint(UNSIGNED_NEGATE, e.span, @@ -160,16 +161,16 @@ impl LintPass for TypeLimits { self.negated_expr_id = expr.id; } }, - ast::ExprParen(expr) if self.negated_expr_id == e.id => { + ast::ExprParen(ref expr) if self.negated_expr_id == e.id => { self.negated_expr_id = expr.id; }, - ast::ExprBinary(binop, l, r) => { - if is_comparison(binop) && !check_limits(cx.tcx, binop, &*l, &*r) { + ast::ExprBinary(binop, ref l, ref r) => { + if is_comparison(binop) && !check_limits(cx.tcx, binop, &**l, &**r) { cx.span_lint(TYPE_LIMITS, e.span, "comparison is useless due to type limits"); } }, - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match ty::get(ty::expr_ty(cx.tcx, e)).sty { ty::ty_int(t) => { match lit.node { @@ -292,7 +293,7 @@ impl LintPass for TypeLimits { ty::ty_int(int_ty) => { let (min, max) = int_ty_range(int_ty); let lit_val: i64 = match lit.node { - ast::ExprLit(li) => match li.node { + ast::ExprLit(ref li) => match li.node { ast::LitInt(v, ast::SignedIntLit(_, ast::Plus)) | ast::LitInt(v, ast::UnsuffixedIntLit(ast::Plus)) => v as i64, ast::LitInt(v, ast::SignedIntLit(_, ast::Minus)) | @@ -306,7 +307,7 @@ impl LintPass for TypeLimits { ty::ty_uint(uint_ty) => { let (min, max): (u64, u64) = uint_ty_range(uint_ty); let lit_val: u64 = match lit.node { - ast::ExprLit(li) => match li.node { + ast::ExprLit(ref li) => match li.node { ast::LitInt(v, _) => v, _ => return true }, @@ -400,8 +401,8 @@ impl LintPass for CTypes { ast::ItemForeignMod(ref nmod) if nmod.abi != abi::RustIntrinsic => { for ni in nmod.items.iter() { match ni.node { - ast::ForeignItemFn(decl, _) => check_foreign_fn(cx, &*decl), - ast::ForeignItemStatic(t, _) => check_ty(cx, &*t) + ast::ForeignItemFn(ref decl, _) => check_foreign_fn(cx, &**decl), + ast::ForeignItemStatic(ref t, _) => check_ty(cx, &**t) } } } @@ -477,7 +478,7 @@ impl LintPass for HeapMemory { // If it's a struct, we also have to check the fields' types match it.node { - ast::ItemStruct(struct_def, _) => { + ast::ItemStruct(ref struct_def, _) => { for struct_field in struct_def.fields.iter() { self.check_heap_type(cx, struct_field.span, ty::node_id_to_type(cx.tcx, struct_field.node.id)); @@ -658,7 +659,7 @@ impl LintPass for PathStatement { fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { match s.node { - ast::StmtSemi(expr, _) => { + ast::StmtSemi(ref expr, _) => { match expr.node { ast::ExprPath(_) => cx.span_lint(PATH_STATEMENT, s.span, "path statement with no effect"), @@ -685,10 +686,10 @@ impl LintPass for UnusedResult { fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { let expr = match s.node { - ast::StmtSemi(expr, _) => expr, + ast::StmtSemi(ref expr, _) => &**expr, _ => return }; - let t = ty::expr_ty(cx.tcx, &*expr); + let t = ty::expr_ty(cx.tcx, expr); match ty::get(t).sty { ty::ty_nil | ty::ty_bot | ty::ty_bool => return, _ => {} @@ -698,7 +699,7 @@ impl LintPass for UnusedResult { _ => {} } - let t = ty::expr_ty(cx.tcx, &*expr); + let t = ty::expr_ty(cx.tcx, expr); let mut warned = false; match ty::get(t).sty { ty::ty_struct(did, _) | @@ -1080,29 +1081,29 @@ impl LintPass for UnnecessaryParens { fn check_expr(&mut self, cx: &Context, e: &ast::Expr) { let (value, msg, struct_lit_needs_parens) = match e.node { - ast::ExprIf(cond, _, _) => (cond, "`if` condition", true), - ast::ExprWhile(cond, _, _) => (cond, "`while` condition", true), - ast::ExprMatch(head, _) => (head, "`match` head expression", true), - ast::ExprRet(Some(value)) => (value, "`return` value", false), - ast::ExprAssign(_, value) => (value, "assigned value", false), - ast::ExprAssignOp(_, _, value) => (value, "assigned value", false), + ast::ExprIf(ref cond, _, _) => (cond, "`if` condition", true), + ast::ExprWhile(ref cond, _, _) => (cond, "`while` condition", true), + ast::ExprMatch(ref head, _) => (head, "`match` head expression", true), + ast::ExprRet(Some(ref value)) => (value, "`return` value", false), + ast::ExprAssign(_, ref value) => (value, "assigned value", false), + ast::ExprAssignOp(_, _, ref value) => (value, "assigned value", false), _ => return }; - self.check_unnecessary_parens_core(cx, &*value, msg, struct_lit_needs_parens); + self.check_unnecessary_parens_core(cx, &**value, msg, struct_lit_needs_parens); } fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { let (value, msg) = match s.node { - ast::StmtDecl(decl, _) => match decl.node { - ast::DeclLocal(local) => match local.init { - Some(value) => (value, "assigned value"), + ast::StmtDecl(ref decl, _) => match decl.node { + ast::DeclLocal(ref local) => match local.init { + Some(ref value) => (value, "assigned value"), None => return }, _ => return }, _ => return }; - self.check_unnecessary_parens_core(cx, &*value, msg, false); + self.check_unnecessary_parens_core(cx, &**value, msg, false); } } @@ -1157,12 +1158,12 @@ declare_lint!(pub UNUSED_MUT, Warn, pub struct UnusedMut; impl UnusedMut { - fn check_unused_mut_pat(&self, cx: &Context, pats: &[Gc]) { + fn check_unused_mut_pat(&self, cx: &Context, pats: &[P]) { // collect all mutable pattern and group their NodeIDs by their Identifier to // avoid false warnings in match arms with multiple patterns let mut mutables = HashMap::new(); - for &p in pats.iter() { - pat_util::pat_bindings(&cx.tcx.def_map, &*p, |mode, id, _, path1| { + for p in pats.iter() { + pat_util::pat_bindings(&cx.tcx.def_map, &**p, |mode, id, _, path1| { let ident = path1.node; match mode { ast::BindByValue(ast::MutMutable) => { @@ -1205,10 +1206,10 @@ impl LintPass for UnusedMut { fn check_stmt(&mut self, cx: &Context, s: &ast::Stmt) { match s.node { - ast::StmtDecl(d, _) => { + ast::StmtDecl(ref d, _) => { match d.node { - ast::DeclLocal(l) => { - self.check_unused_mut_pat(cx, &[l.pat]); + ast::DeclLocal(ref l) => { + self.check_unused_mut_pat(cx, slice::ref_slice(&l.pat)); }, _ => {} } @@ -1221,7 +1222,7 @@ impl LintPass for UnusedMut { _: visit::FnKind, decl: &ast::FnDecl, _: &ast::Block, _: Span, _: ast::NodeId) { for a in decl.inputs.iter() { - self.check_unused_mut_pat(cx, &[a.pat]); + self.check_unused_mut_pat(cx, slice::ref_slice(&a.pat)); } } } diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index b1975ab913fc2..4dd944415127e 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -278,7 +278,7 @@ pub fn gather_attrs(attrs: &[ast::Attribute]) attr::mark_used(attr); - let meta = attr.node.value; + let meta = &attr.node.value; let metas = match meta.node { ast::MetaList(_, ref metas) => metas, _ => { @@ -709,8 +709,8 @@ impl LintPass for GatherNodeLevels { /// /// Consumes the `lint_store` field of the `Session`. pub fn check_crate(tcx: &ty::ctxt, - krate: &ast::Crate, exported_items: &ExportedItems) { + let krate = tcx.map.krate(); let mut cx = Context::new(tcx, krate, exported_items); // Visit the whole crate. diff --git a/src/librustc/metadata/csearch.rs b/src/librustc/metadata/csearch.rs index ac161ef8bdefe..c27f1d9ed1f53 100644 --- a/src/librustc/metadata/csearch.rs +++ b/src/librustc/metadata/csearch.rs @@ -97,18 +97,18 @@ pub fn get_item_path(tcx: &ty::ctxt, def: ast::DefId) -> Vec path.as_slice()) } -pub enum found_ast { - found(ast::InlinedItem), - found_parent(ast::DefId, ast::InlinedItem), +pub enum found_ast<'ast> { + found(&'ast ast::InlinedItem), + found_parent(ast::DefId, &'ast ast::InlinedItem), not_found, } // Finds the AST for this item in the crate metadata, if any. If the item was // not marked for inlining, then the AST will not be present and hence none // will be returned. -pub fn maybe_get_item_ast(tcx: &ty::ctxt, def: ast::DefId, - decode_inlined_item: decoder::DecodeInlinedItem) - -> found_ast { +pub fn maybe_get_item_ast<'tcx>(tcx: &ty::ctxt<'tcx>, def: ast::DefId, + decode_inlined_item: decoder::DecodeInlinedItem) + -> found_ast<'tcx> { let cstore = &tcx.sess.cstore; let cdata = cstore.get_crate_data(def.krate); decoder::maybe_get_item_ast(&*cdata, tcx, def.node, decode_inlined_item) diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 904ca2416e0ff..bcf9a4f678d0a 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -30,7 +30,6 @@ use middle::ty; use middle::typeck; use middle::astencode::vtable_decoder_helpers; -use std::gc::Gc; use std::hash::Hash; use std::hash; use std::io::extensions::u64_from_be_bytes; @@ -48,6 +47,7 @@ use syntax::parse::token; use syntax::print::pprust; use syntax::ast; use syntax::codemap; +use syntax::ptr::P; pub type Cmd<'a> = &'a crate_metadata; @@ -612,27 +612,28 @@ pub fn get_item_path(cdata: Cmd, id: ast::NodeId) -> Vec { item_path(lookup_item(id, cdata.data())) } -pub type DecodeInlinedItem<'a> = |cdata: Cmd, - tcx: &ty::ctxt, - path: Vec, - par_doc: rbml::Doc|: 'a - -> Result >; +pub type DecodeInlinedItem<'a> = <'tcx> |cdata: Cmd, + tcx: &ty::ctxt<'tcx>, + path: Vec, + par_doc: rbml::Doc|: 'a + -> Result<&'tcx ast::InlinedItem, + Vec>; -pub fn maybe_get_item_ast(cdata: Cmd, tcx: &ty::ctxt, id: ast::NodeId, - decode_inlined_item: DecodeInlinedItem) - -> csearch::found_ast { +pub fn maybe_get_item_ast<'tcx>(cdata: Cmd, tcx: &ty::ctxt<'tcx>, id: ast::NodeId, + decode_inlined_item: DecodeInlinedItem) + -> csearch::found_ast<'tcx> { debug!("Looking up item: {}", id); let item_doc = lookup_item(id, cdata.data()); let path = Vec::from_slice(item_path(item_doc).init()); match decode_inlined_item(cdata, tcx, path, item_doc) { - Ok(ref ii) => csearch::found(*ii), + Ok(ii) => csearch::found(ii), Err(path) => { match item_parent_item(item_doc) { Some(did) => { let did = translate_def_id(cdata, did); let parent_item = lookup_item(did.node, cdata.data()); match decode_inlined_item(cdata, tcx, path, parent_item) { - Ok(ref ii) => csearch::found_parent(did, *ii), + Ok(ii) => csearch::found_parent(did, ii), Err(_) => csearch::not_found } } @@ -1003,8 +1004,8 @@ pub fn get_struct_fields(intr: Rc, cdata: Cmd, id: ast::NodeId) result } -fn get_meta_items(md: rbml::Doc) -> Vec> { - let mut items: Vec> = Vec::new(); +fn get_meta_items(md: rbml::Doc) -> Vec> { + let mut items: Vec> = Vec::new(); reader::tagged_docs(md, tag_meta_item_word, |meta_item_doc| { let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let n = token::intern_and_get_ident(nd.as_str_slice()); @@ -1043,7 +1044,7 @@ fn get_attributes(md: rbml::Doc) -> Vec { // Currently it's only possible to have a single meta item on // an attribute assert_eq!(meta_items.len(), 1u); - let meta_item = *meta_items.get(0); + let meta_item = meta_items.move_iter().nth(0).unwrap(); attrs.push( codemap::Spanned { node: ast::Attribute_ { diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 209a09dbfafdf..376cccc3f10fc 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -29,7 +29,6 @@ use util::nodemap::{NodeMap, NodeSet}; use serialize::Encodable; use std::cell::RefCell; -use std::gc::Gc; use std::hash::Hash; use std::hash; use std::mem; @@ -46,6 +45,7 @@ use syntax::attr::AttrMetaMethods; use syntax::diagnostic::SpanHandler; use syntax::parse::token::special_idents; use syntax::parse::token; +use syntax::ptr::P; use syntax::visit::Visitor; use syntax::visit; use syntax; @@ -55,16 +55,11 @@ use rbml::io::SeekableMemWriter; /// A borrowed version of `ast::InlinedItem`. pub enum InlinedItemRef<'a> { IIItemRef(&'a ast::Item), - IITraitItemRef(ast::DefId, InlinedTraitItemRef<'a>), + IITraitItemRef(ast::DefId, &'a ast::TraitItem), + IIImplItemRef(ast::DefId, &'a ast::ImplItem), IIForeignRef(&'a ast::ForeignItem) } -/// A borrowed version of `ast::InlinedTraitItem`. -pub enum InlinedTraitItemRef<'a> { - ProvidedInlinedTraitItemRef(&'a Method), - RequiredInlinedTraitItemRef(&'a Method), -} - pub type Encoder<'a> = writer::Encoder<'a, SeekableMemWriter>; pub type EncodeInlinedItem<'a> = |ecx: &EncodeContext, @@ -507,7 +502,7 @@ fn encode_reexported_static_methods(ecx: &EncodeContext, /// * For enums, iterates through the node IDs of the variants. /// /// * For newtype structs, iterates through the node ID of the constructor. -fn each_auxiliary_node_id(item: Gc, callback: |NodeId| -> bool) -> bool { +fn each_auxiliary_node_id(item: &Item, callback: |NodeId| -> bool) -> bool { let mut continue_ = true; match item.node { ItemEnum(ref enum_def, _) => { @@ -518,7 +513,7 @@ fn each_auxiliary_node_id(item: Gc, callback: |NodeId| -> bool) -> bool { } } } - ItemStruct(struct_def, _) => { + ItemStruct(ref struct_def, _) => { // If this is a newtype struct, return the constructor. match struct_def.ctor_id { Some(ctor_id) if struct_def.fields.len() > 0 && @@ -587,7 +582,7 @@ fn encode_info_for_mod(ecx: &EncodeContext, rbml_w.wr_str(def_to_string(local_def(item.id)).as_slice()); rbml_w.end_tag(); - each_auxiliary_node_id(*item, |auxiliary_node_id| { + each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( auxiliary_node_id)).as_slice()); @@ -858,7 +853,7 @@ fn encode_info_for_method(ecx: &EncodeContext, impl_path: PathElems, is_default_impl: bool, parent_id: NodeId, - ast_method_opt: Option>) { + ast_item_opt: Option<&ImplItem>) { debug!("encode_info_for_method: {:?} {}", m.def_id, token::get_ident(m.ident)); @@ -877,26 +872,20 @@ fn encode_info_for_method(ecx: &EncodeContext, let elem = ast_map::PathName(m.ident.name); encode_path(rbml_w, impl_path.chain(Some(elem).move_iter())); - match ast_method_opt { - Some(ast_method) => { - encode_attributes(rbml_w, ast_method.attrs.as_slice()) - } - None => () - } - - for &ast_method in ast_method_opt.iter() { - let any_types = !pty.generics.types.is_empty(); - if any_types || is_default_impl || should_inline(ast_method.attrs.as_slice()) { - encode_inlined_item(ecx, - rbml_w, - IITraitItemRef(local_def(parent_id), - RequiredInlinedTraitItemRef( - &*ast_method))); - } - if !any_types { - encode_symbol(ecx, rbml_w, m.def_id.node); + match ast_item_opt { + Some(&ast::MethodImplItem(ref ast_method)) => { + encode_attributes(rbml_w, ast_method.attrs.as_slice()); + let any_types = !pty.generics.types.is_empty(); + if any_types || is_default_impl || should_inline(ast_method.attrs.as_slice()) { + encode_inlined_item(ecx, rbml_w, IIImplItemRef(local_def(parent_id), + ast_item_opt.unwrap())); + } + if !any_types { + encode_symbol(ecx, rbml_w, m.def_id.node); + } + encode_method_argument_names(rbml_w, ast_method.pe_fn_decl()); } - encode_method_argument_names(rbml_w, &*ast_method.pe_fn_decl()); + None => {} } rbml_w.end_tag(); @@ -1127,7 +1116,7 @@ fn encode_info_for_item(ecx: &EncodeContext, (*enum_definition).variants.as_slice(), index); } - ItemStruct(struct_def, _) => { + ItemStruct(ref struct_def, _) => { let fields = ty::lookup_struct_fields(tcx, def_id); /* First, encode the fields @@ -1178,7 +1167,7 @@ fn encode_info_for_item(ecx: &EncodeContext, None => {} } } - ItemImpl(_, ref opt_trait, ty, ref ast_items) => { + ItemImpl(_, ref opt_trait, ref ty, ref ast_items) => { // We need to encode information about the default methods we // have inherited, so we drive this based on the impl structure. let impl_items = tcx.impl_items.borrow(); @@ -1228,7 +1217,7 @@ fn encode_info_for_item(ecx: &EncodeContext, let num_implemented_methods = ast_items.len(); for (i, &trait_item_def_id) in items.iter().enumerate() { let ast_item = if i < num_implemented_methods { - Some(*ast_items.get(i)) + Some(ast_items.get(i)) } else { None }; @@ -1238,29 +1227,10 @@ fn encode_info_for_item(ecx: &EncodeContext, pos: rbml_w.writer.tell().unwrap(), }); - let trait_item_type = + let ty::MethodTraitItem(method_type) = ty::impl_or_trait_item(tcx, trait_item_def_id.def_id()); - match (trait_item_type, ast_item) { - (ty::MethodTraitItem(method_type), - Some(ast::MethodImplItem(ast_method))) => { - encode_info_for_method(ecx, - rbml_w, - &*method_type, - path.clone(), - false, - item.id, - Some(ast_method)) - } - (ty::MethodTraitItem(method_type), None) => { - encode_info_for_method(ecx, - rbml_w, - &*method_type, - path.clone(), - false, - item.id, - None) - } - } + encode_info_for_method(ecx, rbml_w, &*method_type, path.clone(), + false, item.id, ast_item) } } ItemTrait(_, _, _, ref ms) => { @@ -1345,15 +1315,16 @@ fn encode_info_for_item(ecx: &EncodeContext, } } - match ms.get(i) { - &RequiredMethod(ref tm) => { + let trait_item = ms.get(i); + match *trait_item { + RequiredMethod(ref tm) => { encode_attributes(rbml_w, tm.attrs.as_slice()); encode_item_sort(rbml_w, 'r'); encode_parent_sort(rbml_w, 't'); encode_method_argument_names(rbml_w, &*tm.decl); } - &ProvidedMethod(m) => { + ProvidedMethod(ref m) => { encode_attributes(rbml_w, m.attrs.as_slice()); // If this is a static method, we've already // encoded this. @@ -1366,14 +1337,9 @@ fn encode_info_for_item(ecx: &EncodeContext, } encode_item_sort(rbml_w, 'p'); encode_parent_sort(rbml_w, 't'); - encode_inlined_item( - ecx, - rbml_w, - IITraitItemRef( - def_id, - ProvidedInlinedTraitItemRef(&*m))); - encode_method_argument_names(rbml_w, - &*m.pe_fn_decl()); + encode_inlined_item(ecx, rbml_w, + IITraitItemRef(def_id, trait_item)); + encode_method_argument_names(rbml_w, &*m.pe_fn_decl()); } } } @@ -1571,7 +1537,7 @@ fn write_i64(writer: &mut SeekableMemWriter, &n: &i64) { wr.write_be_u32(n as u32); } -fn encode_meta_item(rbml_w: &mut Encoder, mi: Gc) { +fn encode_meta_item(rbml_w: &mut Encoder, mi: &MetaItem) { match mi.node { MetaWord(ref name) => { rbml_w.start_tag(tag_meta_item_word); @@ -1601,7 +1567,7 @@ fn encode_meta_item(rbml_w: &mut Encoder, mi: Gc) { rbml_w.writer.write(name.get().as_bytes()); rbml_w.end_tag(); for inner_item in items.iter() { - encode_meta_item(rbml_w, *inner_item); + encode_meta_item(rbml_w, &**inner_item); } rbml_w.end_tag(); } @@ -1613,7 +1579,7 @@ fn encode_attributes(rbml_w: &mut Encoder, attrs: &[Attribute]) { for attr in attrs.iter() { rbml_w.start_tag(tag_attribute); rbml_w.wr_tagged_u8(tag_attribute_is_sugared_doc, attr.node.is_sugared_doc as u8); - encode_meta_item(rbml_w, attr.node.value); + encode_meta_item(rbml_w, &*attr.node.value); rbml_w.end_tag(); } rbml_w.end_tag(); @@ -1852,12 +1818,12 @@ fn encode_misc_info(ecx: &EncodeContext, rbml_w: &mut Encoder) { rbml_w.start_tag(tag_misc_info); rbml_w.start_tag(tag_misc_info_crate_items); - for &item in krate.module.items.iter() { + for item in krate.module.items.iter() { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def(item.id)).as_slice()); rbml_w.end_tag(); - each_auxiliary_node_id(item, |auxiliary_node_id| { + each_auxiliary_node_id(&**item, |auxiliary_node_id| { rbml_w.start_tag(tag_mod_child); rbml_w.wr_str(def_to_string(local_def( auxiliary_node_id)).as_slice()); diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index b7597b50b4906..2dab3c2b1da02 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -36,6 +36,7 @@ use syntax::ast_util::PostExpansionMethod; use syntax::codemap::Span; use syntax::fold::Folder; use syntax::parse::token; +use syntax::ptr::P; use syntax; use libc; @@ -52,25 +53,20 @@ use serialize::{EncoderHelpers}; #[cfg(test)] use syntax::parse; #[cfg(test)] use syntax::print::pprust; -#[cfg(test)] use std::gc::Gc; -struct DecodeContext<'a, 'tcx: 'a> { - cdata: &'a cstore::crate_metadata, +struct DecodeContext<'a, 'b, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, -} - -struct ExtendedDecodeContext<'a, 'tcx: 'a> { - dcx: &'a DecodeContext<'a, 'tcx>, + cdata: &'b cstore::crate_metadata, from_id_range: ast_util::IdRange, to_id_range: ast_util::IdRange } trait tr { - fn tr(&self, xcx: &ExtendedDecodeContext) -> Self; + fn tr(&self, dcx: &DecodeContext) -> Self; } trait tr_intern { - fn tr_intern(&self, xcx: &ExtendedDecodeContext) -> ast::DefId; + fn tr_intern(&self, dcx: &DecodeContext) -> ast::DefId; } pub type Encoder<'a> = writer::Encoder<'a, SeekableMemWriter>; @@ -84,19 +80,21 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext, let id = match ii { e::IIItemRef(i) => i.id, e::IIForeignRef(i) => i.id, - e::IITraitItemRef(_, e::ProvidedInlinedTraitItemRef(m)) => m.id, - e::IITraitItemRef(_, e::RequiredInlinedTraitItemRef(m)) => m.id, + e::IITraitItemRef(_, &ast::ProvidedMethod(ref m)) => m.id, + e::IITraitItemRef(_, &ast::RequiredMethod(ref m)) => m.id, + e::IIImplItemRef(_, &ast::MethodImplItem(ref m)) => m.id }; debug!("> Encoding inlined item: {} ({})", ecx.tcx.map.path_to_string(id), rbml_w.writer.tell()); + // Folding could be avoided with a smarter encoder. let ii = simplify_ast(ii); let id_range = ast_util::compute_id_range_for_inlined_item(&ii); rbml_w.start_tag(c::tag_ast as uint); id_range.encode(rbml_w); - encode_ast(rbml_w, ii); + encode_ast(rbml_w, &ii); encode_side_tables_for_ii(ecx, rbml_w, &ii); rbml_w.end_tag(); @@ -105,15 +103,28 @@ pub fn encode_inlined_item(ecx: &e::EncodeContext, rbml_w.writer.tell()); } -pub fn decode_inlined_item(cdata: &cstore::crate_metadata, - tcx: &ty::ctxt, - path: Vec, - par_doc: rbml::Doc) - -> Result> { - let dcx = &DecodeContext { - cdata: cdata, - tcx: tcx, - }; +impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> { + fn new_id(&self, id: ast::NodeId) -> ast::NodeId { + if id == ast::DUMMY_NODE_ID { + // Used by ast_map to map the NodeInlinedParent. + self.tcx.sess.next_node_id() + } else { + self.tr_id(id) + } + } + fn new_def_id(&self, def_id: ast::DefId) -> ast::DefId { + self.tr_def_id(def_id) + } + fn new_span(&self, span: Span) -> Span { + self.tr_span(span) + } +} + +pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata, + tcx: &ty::ctxt<'tcx>, + path: Vec, + par_doc: rbml::Doc) + -> Result<&'tcx ast::InlinedItem, Vec> { match par_doc.opt_child(c::tag_ast) { None => Err(path), Some(ast_doc) => { @@ -127,21 +138,28 @@ pub fn decode_inlined_item(cdata: &cstore::crate_metadata, }); let mut ast_dsr = reader::Decoder::new(ast_doc); let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); - let to_id_range = reserve_id_range(&dcx.tcx.sess, from_id_range); - let xcx = &ExtendedDecodeContext { - dcx: dcx, + let to_id_range = reserve_id_range(&tcx.sess, from_id_range); + let dcx = &DecodeContext { + cdata: cdata, + tcx: tcx, from_id_range: from_id_range, to_id_range: to_id_range }; let raw_ii = decode_ast(ast_doc); - let ii = renumber_and_map_ast(xcx, &dcx.tcx.map, path, raw_ii); - let ident = match ii { - ast::IIItem(i) => i.ident, - ast::IIForeign(i) => i.ident, - ast::IITraitItem(_, iti) => { - match iti { - ast::ProvidedInlinedTraitItem(m) => m.pe_ident(), - ast::RequiredInlinedTraitItem(m) => m.pe_ident(), + let ii = ast_map::map_decoded_item(&dcx.tcx.map, path, raw_ii, dcx); + + let ident = match *ii { + ast::IIItem(ref i) => i.ident, + ast::IIForeign(ref i) => i.ident, + ast::IITraitItem(_, ref ti) => { + match *ti { + ast::ProvidedMethod(ref m) => m.pe_ident(), + ast::RequiredMethod(ref ty_m) => ty_m.ident + } + }, + ast::IIImplItem(_, ref m) => { + match *m { + ast::MethodImplItem(ref m) => m.pe_ident() } } }; @@ -149,12 +167,12 @@ pub fn decode_inlined_item(cdata: &cstore::crate_metadata, debug!("< Decoded inlined fn: {}::{}", path_as_str.unwrap(), token::get_ident(ident)); - region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, &ii); - decode_side_tables(xcx, ast_doc); - match ii { - ast::IIItem(i) => { + region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, ii); + decode_side_tables(dcx, ast_doc); + match *ii { + ast::IIItem(ref i) => { debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<", - syntax::print::pprust::item_to_string(&*i)); + syntax::print::pprust::item_to_string(&**i)); } _ => { } } @@ -176,7 +194,7 @@ fn reserve_id_range(sess: &Session, ast_util::IdRange { min: to_id_min, max: to_id_max } } -impl<'a, 'tcx> ExtendedDecodeContext<'a, 'tcx> { +impl<'a, 'b, 'tcx> DecodeContext<'a, 'b, 'tcx> { pub fn tr_id(&self, id: ast::NodeId) -> ast::NodeId { /*! * Translates an internal id, meaning a node id that is known @@ -214,7 +232,7 @@ impl<'a, 'tcx> ExtendedDecodeContext<'a, 'tcx> { * `tr_intern_def_id()` below. */ - decoder::translate_def_id(self.dcx.cdata, did) + decoder::translate_def_id(self.cdata, did) } pub fn tr_intern_def_id(&self, did: ast::DefId) -> ast::DefId { /*! @@ -233,26 +251,26 @@ impl<'a, 'tcx> ExtendedDecodeContext<'a, 'tcx> { } impl tr_intern for ast::DefId { - fn tr_intern(&self, xcx: &ExtendedDecodeContext) -> ast::DefId { - xcx.tr_intern_def_id(*self) + fn tr_intern(&self, dcx: &DecodeContext) -> ast::DefId { + dcx.tr_intern_def_id(*self) } } impl tr for ast::DefId { - fn tr(&self, xcx: &ExtendedDecodeContext) -> ast::DefId { - xcx.tr_def_id(*self) + fn tr(&self, dcx: &DecodeContext) -> ast::DefId { + dcx.tr_def_id(*self) } } impl tr for Option { - fn tr(&self, xcx: &ExtendedDecodeContext) -> Option { - self.map(|d| xcx.tr_def_id(d)) + fn tr(&self, dcx: &DecodeContext) -> Option { + self.map(|d| dcx.tr_def_id(d)) } } impl tr for Span { - fn tr(&self, xcx: &ExtendedDecodeContext) -> Span { - xcx.tr_span(*self) + fn tr(&self, dcx: &DecodeContext) -> Span { + dcx.tr_span(*self) } } @@ -267,18 +285,18 @@ impl, E> def_id_encoder_helpers for S { } trait def_id_decoder_helpers { - fn read_def_id(&mut self, xcx: &ExtendedDecodeContext) -> ast::DefId; - fn read_def_id_noxcx(&mut self, + fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId; + fn read_def_id_nodcx(&mut self, cdata: &cstore::crate_metadata) -> ast::DefId; } impl, E> def_id_decoder_helpers for D { - fn read_def_id(&mut self, xcx: &ExtendedDecodeContext) -> ast::DefId { + fn read_def_id(&mut self, dcx: &DecodeContext) -> ast::DefId { let did: ast::DefId = Decodable::decode(self).ok().unwrap(); - did.tr(xcx) + did.tr(dcx) } - fn read_def_id_noxcx(&mut self, + fn read_def_id_nodcx(&mut self, cdata: &cstore::crate_metadata) -> ast::DefId { let did: ast::DefId = Decodable::decode(self).ok().unwrap(); decoder::translate_def_id(cdata, did) @@ -300,7 +318,7 @@ impl, E> def_id_decoder_helpers for D { // We also have to adjust the spans: for now we just insert a dummy span, // but eventually we should add entries to the local codemap as required. -fn encode_ast(rbml_w: &mut Encoder, item: ast::InlinedItem) { +fn encode_ast(rbml_w: &mut Encoder, item: &ast::InlinedItem) { rbml_w.start_tag(c::tag_tree as uint); item.encode(rbml_w); rbml_w.end_tag(); @@ -309,29 +327,36 @@ fn encode_ast(rbml_w: &mut Encoder, item: ast::InlinedItem) { struct NestedItemsDropper; impl Folder for NestedItemsDropper { - fn fold_block(&mut self, blk: ast::P) -> ast::P { - let stmts_sans_items = blk.stmts.iter().filter_map(|stmt| { - match stmt.node { - ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => Some(*stmt), - ast::StmtDecl(decl, _) => { - match decl.node { - ast::DeclLocal(_) => Some(*stmt), - ast::DeclItem(_) => None, + fn fold_block(&mut self, blk: P) -> P { + blk.and_then(|ast::Block {id, stmts, expr, rules, span, ..}| { + let stmts_sans_items = stmts.move_iter().filter_map(|stmt| { + let use_stmt = match stmt.node { + ast::StmtExpr(_, _) | ast::StmtSemi(_, _) => true, + ast::StmtDecl(ref decl, _) => { + match decl.node { + ast::DeclLocal(_) => true, + ast::DeclItem(_) => false, + } } + ast::StmtMac(..) => fail!("unexpanded macro in astencode") + }; + if use_stmt { + Some(stmt) + } else { + None } - ast::StmtMac(..) => fail!("unexpanded macro in astencode") - } - }).collect(); - let blk_sans_items = ast::P(ast::Block { - view_items: Vec::new(), // I don't know if we need the view_items - // here, but it doesn't break tests! - stmts: stmts_sans_items, - expr: blk.expr, - id: blk.id, - rules: blk.rules, - span: blk.span, - }); - fold::noop_fold_block(blk_sans_items, self) + }).collect(); + let blk_sans_items = P(ast::Block { + view_items: Vec::new(), // I don't know if we need the view_items + // here, but it doesn't break tests! + stmts: stmts_sans_items, + expr: expr, + id: id, + rules: rules, + span: span, + }); + fold::noop_fold_block(blk_sans_items, self) + }) } } @@ -351,27 +376,35 @@ fn simplify_ast(ii: e::InlinedItemRef) -> ast::InlinedItem { match ii { // HACK we're not dropping items. e::IIItemRef(i) => { - ast::IIItem(fold::noop_fold_item(i, &mut fld) + ast::IIItem(fold::noop_fold_item(P(i.clone()), &mut fld) .expect_one("expected one item")) } - e::IITraitItemRef(d, iti) => { - ast::IITraitItem(d, match iti { - e::ProvidedInlinedTraitItemRef(m) => { - ast::ProvidedInlinedTraitItem( - fold::noop_fold_method(m, &mut fld) + e::IITraitItemRef(d, ti) => { + ast::IITraitItem(d, match *ti { + ast::ProvidedMethod(ref m) => { + ast::ProvidedMethod( + fold::noop_fold_method(m.clone(), &mut fld) .expect_one("noop_fold_method must produce \ exactly one method")) } - e::RequiredInlinedTraitItemRef(m) => { - ast::RequiredInlinedTraitItem( - fold::noop_fold_method(m, &mut fld) + ast::RequiredMethod(ref ty_m) => { + ast::RequiredMethod( + fold::noop_fold_type_method(ty_m.clone(), &mut fld)) + } + }) + } + e::IIImplItemRef(d, m) => { + ast::IIImplItem(d, match *m { + ast::MethodImplItem(ref m) => { + ast::MethodImplItem( + fold::noop_fold_method(m.clone(), &mut fld) .expect_one("noop_fold_method must produce \ exactly one method")) } }) } e::IIForeignRef(i) => { - ast::IIForeign(fold::noop_fold_foreign_item(i, &mut fld)) + ast::IIForeign(fold::noop_fold_foreign_item(P(i.clone()), &mut fld)) } } } @@ -382,114 +415,61 @@ fn decode_ast(par_doc: rbml::Doc) -> ast::InlinedItem { Decodable::decode(&mut d).unwrap() } -struct AstRenumberer<'a, 'tcx: 'a> { - xcx: &'a ExtendedDecodeContext<'a, 'tcx>, -} - -impl<'a, 'tcx> ast_map::FoldOps for AstRenumberer<'a, 'tcx> { - fn new_id(&self, id: ast::NodeId) -> ast::NodeId { - if id == ast::DUMMY_NODE_ID { - // Used by ast_map to map the NodeInlinedParent. - self.xcx.dcx.tcx.sess.next_node_id() - } else { - self.xcx.tr_id(id) - } - } - fn new_span(&self, span: Span) -> Span { - self.xcx.tr_span(span) - } -} - -fn renumber_and_map_ast(xcx: &ExtendedDecodeContext, - map: &ast_map::Map, - path: Vec , - ii: ast::InlinedItem) -> ast::InlinedItem { - ast_map::map_decoded_item(map, - path.move_iter().collect(), - AstRenumberer { xcx: xcx }, - |fld| { - match ii { - ast::IIItem(i) => { - ast::IIItem(fld.fold_item(i).expect_one("expected one item")) - } - ast::IITraitItem(d, iti) => { - match iti { - ast::ProvidedInlinedTraitItem(m) => { - ast::IITraitItem( - xcx.tr_def_id(d), - ast::ProvidedInlinedTraitItem( - fld.fold_method(m) - .expect_one("expected one method"))) - } - ast::RequiredInlinedTraitItem(m) => { - ast::IITraitItem( - xcx.tr_def_id(d), - ast::RequiredInlinedTraitItem( - fld.fold_method(m) - .expect_one("expected one method"))) - } - } - } - ast::IIForeign(i) => ast::IIForeign(fld.fold_foreign_item(i)) - } - }) -} - // ______________________________________________________________________ // Encoding and decoding of ast::def -fn decode_def(xcx: &ExtendedDecodeContext, doc: rbml::Doc) -> def::Def { +fn decode_def(dcx: &DecodeContext, doc: rbml::Doc) -> def::Def { let mut dsr = reader::Decoder::new(doc); let def: def::Def = Decodable::decode(&mut dsr).unwrap(); - def.tr(xcx) + def.tr(dcx) } impl tr for def::Def { - fn tr(&self, xcx: &ExtendedDecodeContext) -> def::Def { + fn tr(&self, dcx: &DecodeContext) -> def::Def { match *self { - def::DefFn(did, p) => def::DefFn(did.tr(xcx), p), + def::DefFn(did, p) => def::DefFn(did.tr(dcx), p), def::DefStaticMethod(did, wrapped_did2, p) => { - def::DefStaticMethod(did.tr(xcx), + def::DefStaticMethod(did.tr(dcx), match wrapped_did2 { def::FromTrait(did2) => { - def::FromTrait(did2.tr(xcx)) + def::FromTrait(did2.tr(dcx)) } def::FromImpl(did2) => { - def::FromImpl(did2.tr(xcx)) + def::FromImpl(did2.tr(dcx)) } }, p) } def::DefMethod(did0, did1) => { - def::DefMethod(did0.tr(xcx), did1.map(|did1| did1.tr(xcx))) + def::DefMethod(did0.tr(dcx), did1.map(|did1| did1.tr(dcx))) } - def::DefSelfTy(nid) => { def::DefSelfTy(xcx.tr_id(nid)) } - def::DefMod(did) => { def::DefMod(did.tr(xcx)) } - def::DefForeignMod(did) => { def::DefForeignMod(did.tr(xcx)) } - def::DefStatic(did, m) => { def::DefStatic(did.tr(xcx), m) } - def::DefArg(nid, b) => { def::DefArg(xcx.tr_id(nid), b) } - def::DefLocal(nid, b) => { def::DefLocal(xcx.tr_id(nid), b) } + def::DefSelfTy(nid) => { def::DefSelfTy(dcx.tr_id(nid)) } + def::DefMod(did) => { def::DefMod(did.tr(dcx)) } + def::DefForeignMod(did) => { def::DefForeignMod(did.tr(dcx)) } + def::DefStatic(did, m) => { def::DefStatic(did.tr(dcx), m) } + def::DefArg(nid, b) => { def::DefArg(dcx.tr_id(nid), b) } + def::DefLocal(nid, b) => { def::DefLocal(dcx.tr_id(nid), b) } def::DefVariant(e_did, v_did, is_s) => { - def::DefVariant(e_did.tr(xcx), v_did.tr(xcx), is_s) + def::DefVariant(e_did.tr(dcx), v_did.tr(dcx), is_s) }, - def::DefTrait(did) => def::DefTrait(did.tr(xcx)), - def::DefTy(did) => def::DefTy(did.tr(xcx)), + def::DefTrait(did) => def::DefTrait(did.tr(dcx)), + def::DefTy(did) => def::DefTy(did.tr(dcx)), def::DefPrimTy(p) => def::DefPrimTy(p), - def::DefTyParam(s, did, v) => def::DefTyParam(s, did.tr(xcx), v), - def::DefBinding(nid, bm) => def::DefBinding(xcx.tr_id(nid), bm), - def::DefUse(did) => def::DefUse(did.tr(xcx)), + def::DefTyParam(s, did, v) => def::DefTyParam(s, did.tr(dcx), v), + def::DefBinding(nid, bm) => def::DefBinding(dcx.tr_id(nid), bm), + def::DefUse(did) => def::DefUse(did.tr(dcx)), def::DefUpvar(nid1, def, nid2, nid3) => { - def::DefUpvar(xcx.tr_id(nid1), - box(GC) (*def).tr(xcx), - xcx.tr_id(nid2), - xcx.tr_id(nid3)) + def::DefUpvar(dcx.tr_id(nid1), + box(GC) (*def).tr(dcx), + dcx.tr_id(nid2), + dcx.tr_id(nid3)) } - def::DefStruct(did) => def::DefStruct(did.tr(xcx)), - def::DefRegion(nid) => def::DefRegion(xcx.tr_id(nid)), + def::DefStruct(did) => def::DefStruct(did.tr(dcx)), + def::DefRegion(nid) => def::DefRegion(dcx.tr_id(nid)), def::DefTyParamBinder(nid) => { - def::DefTyParamBinder(xcx.tr_id(nid)) + def::DefTyParamBinder(dcx.tr_id(nid)) } - def::DefLabel(nid) => def::DefLabel(xcx.tr_id(nid)) + def::DefLabel(nid) => def::DefLabel(dcx.tr_id(nid)) } } } @@ -498,44 +478,44 @@ impl tr for def::Def { // Encoding and decoding of ancillary information impl tr for ty::Region { - fn tr(&self, xcx: &ExtendedDecodeContext) -> ty::Region { + fn tr(&self, dcx: &DecodeContext) -> ty::Region { match *self { ty::ReLateBound(id, br) => { - ty::ReLateBound(xcx.tr_id(id), br.tr(xcx)) + ty::ReLateBound(dcx.tr_id(id), br.tr(dcx)) } ty::ReEarlyBound(id, space, index, ident) => { - ty::ReEarlyBound(xcx.tr_id(id), space, index, ident) + ty::ReEarlyBound(dcx.tr_id(id), space, index, ident) } ty::ReScope(id) => { - ty::ReScope(xcx.tr_id(id)) + ty::ReScope(dcx.tr_id(id)) } ty::ReEmpty | ty::ReStatic | ty::ReInfer(..) => { *self } ty::ReFree(ref fr) => { - ty::ReFree(ty::FreeRegion {scope_id: xcx.tr_id(fr.scope_id), - bound_region: fr.bound_region.tr(xcx)}) + ty::ReFree(ty::FreeRegion {scope_id: dcx.tr_id(fr.scope_id), + bound_region: fr.bound_region.tr(dcx)}) } } } } impl tr for ty::BoundRegion { - fn tr(&self, xcx: &ExtendedDecodeContext) -> ty::BoundRegion { + fn tr(&self, dcx: &DecodeContext) -> ty::BoundRegion { match *self { ty::BrAnon(_) | ty::BrFresh(_) => *self, - ty::BrNamed(id, ident) => ty::BrNamed(xcx.tr_def_id(id), + ty::BrNamed(id, ident) => ty::BrNamed(dcx.tr_def_id(id), ident), } } } impl tr for ty::TraitStore { - fn tr(&self, xcx: &ExtendedDecodeContext) -> ty::TraitStore { + fn tr(&self, dcx: &DecodeContext) -> ty::TraitStore { match *self { ty::RegionTraitStore(r, m) => { - ty::RegionTraitStore(r.tr(xcx), m) + ty::RegionTraitStore(r.tr(dcx), m) } ty::UniqTraitStore => ty::UniqTraitStore } @@ -554,16 +534,16 @@ fn encode_capture_mode(rbml_w: &mut Encoder, cm: CaptureMode) { } trait rbml_decoder_helper { - fn read_freevar_entry(&mut self, xcx: &ExtendedDecodeContext) + fn read_freevar_entry(&mut self, dcx: &DecodeContext) -> freevar_entry; fn read_capture_mode(&mut self) -> CaptureMode; } impl<'a> rbml_decoder_helper for reader::Decoder<'a> { - fn read_freevar_entry(&mut self, xcx: &ExtendedDecodeContext) + fn read_freevar_entry(&mut self, dcx: &DecodeContext) -> freevar_entry { let fv: freevar_entry = Decodable::decode(self).unwrap(); - fv.tr(xcx) + fv.tr(dcx) } fn read_capture_mode(&mut self) -> CaptureMode { @@ -573,19 +553,19 @@ impl<'a> rbml_decoder_helper for reader::Decoder<'a> { } impl tr for freevar_entry { - fn tr(&self, xcx: &ExtendedDecodeContext) -> freevar_entry { + fn tr(&self, dcx: &DecodeContext) -> freevar_entry { freevar_entry { - def: self.def.tr(xcx), - span: self.span.tr(xcx), + def: self.def.tr(dcx), + span: self.span.tr(dcx), } } } impl tr for ty::UpvarBorrow { - fn tr(&self, xcx: &ExtendedDecodeContext) -> ty::UpvarBorrow { + fn tr(&self, dcx: &DecodeContext) -> ty::UpvarBorrow { ty::UpvarBorrow { kind: self.kind, - region: self.region.tr(xcx) + region: self.region.tr(dcx) } } } @@ -594,7 +574,7 @@ impl tr for ty::UpvarBorrow { // Encoding and decoding of MethodCallee trait read_method_callee_helper { - fn read_method_callee(&mut self, xcx: &ExtendedDecodeContext) + fn read_method_callee(&mut self, dcx: &DecodeContext) -> (typeck::ExprAdjustment, MethodCallee); } @@ -621,7 +601,7 @@ fn encode_method_callee(ecx: &e::EncodeContext, } impl<'a> read_method_callee_helper for reader::Decoder<'a> { - fn read_method_callee(&mut self, xcx: &ExtendedDecodeContext) + fn read_method_callee(&mut self, dcx: &DecodeContext) -> (typeck::ExprAdjustment, MethodCallee) { self.read_struct("MethodCallee", 4, |this| { @@ -632,13 +612,13 @@ impl<'a> read_method_callee_helper for reader::Decoder<'a> { origin: this.read_struct_field("origin", 1, |this| { let method_origin: MethodOrigin = Decodable::decode(this).unwrap(); - Ok(method_origin.tr(xcx)) + Ok(method_origin.tr(dcx)) }).unwrap(), ty: this.read_struct_field("ty", 2, |this| { - Ok(this.read_ty(xcx)) + Ok(this.read_ty(dcx)) }).unwrap(), substs: this.read_struct_field("substs", 3, |this| { - Ok(this.read_substs(xcx)) + Ok(this.read_substs(dcx)) }).unwrap() })) }).unwrap() @@ -646,16 +626,16 @@ impl<'a> read_method_callee_helper for reader::Decoder<'a> { } impl tr for MethodOrigin { - fn tr(&self, xcx: &ExtendedDecodeContext) -> MethodOrigin { + fn tr(&self, dcx: &DecodeContext) -> MethodOrigin { match *self { - typeck::MethodStatic(did) => typeck::MethodStatic(did.tr(xcx)), + typeck::MethodStatic(did) => typeck::MethodStatic(did.tr(dcx)), typeck::MethodStaticUnboxedClosure(did) => { - typeck::MethodStaticUnboxedClosure(did.tr(xcx)) + typeck::MethodStaticUnboxedClosure(did.tr(dcx)) } typeck::MethodParam(ref mp) => { typeck::MethodParam( typeck::MethodParam { - trait_id: mp.trait_id.tr(xcx), + trait_id: mp.trait_id.tr(dcx), .. *mp } ) @@ -663,7 +643,7 @@ impl tr for MethodOrigin { typeck::MethodObject(ref mo) => { typeck::MethodObject( typeck::MethodObject { - trait_id: mo.trait_id.tr(xcx), + trait_id: mo.trait_id.tr(dcx), .. *mo } ) @@ -862,10 +842,10 @@ impl<'a> vtable_decoder_helpers for reader::Decoder<'a> { 0 => { typeck::vtable_static( this.read_enum_variant_arg(0u, |this| { - Ok(this.read_def_id_noxcx(cdata)) + Ok(this.read_def_id_nodcx(cdata)) }).unwrap(), this.read_enum_variant_arg(1u, |this| { - Ok(this.read_substs_noxcx(tcx, cdata)) + Ok(this.read_substs_nodcx(tcx, cdata)) }).unwrap(), this.read_enum_variant_arg(2u, |this| { Ok(this.read_vtable_res(tcx, cdata)) @@ -885,7 +865,7 @@ impl<'a> vtable_decoder_helpers for reader::Decoder<'a> { 2 => { typeck::vtable_unboxed_closure( this.read_enum_variant_arg(0u, |this| { - Ok(this.read_def_id_noxcx(cdata)) + Ok(this.read_def_id_nodcx(cdata)) }).unwrap() ) } @@ -1398,40 +1378,40 @@ impl<'a> doc_decoder_helpers for rbml::Doc<'a> { } trait rbml_decoder_decoder_helpers { - fn read_ty(&mut self, xcx: &ExtendedDecodeContext) -> ty::t; - fn read_tys(&mut self, xcx: &ExtendedDecodeContext) -> Vec; - fn read_type_param_def(&mut self, xcx: &ExtendedDecodeContext) + fn read_ty(&mut self, dcx: &DecodeContext) -> ty::t; + fn read_tys(&mut self, dcx: &DecodeContext) -> Vec; + fn read_type_param_def(&mut self, dcx: &DecodeContext) -> ty::TypeParameterDef; - fn read_polytype(&mut self, xcx: &ExtendedDecodeContext) + fn read_polytype(&mut self, dcx: &DecodeContext) -> ty::Polytype; - fn read_existential_bounds(&mut self, xcx: &ExtendedDecodeContext) -> ty::ExistentialBounds; - fn read_substs(&mut self, xcx: &ExtendedDecodeContext) -> subst::Substs; - fn read_auto_adjustment(&mut self, xcx: &ExtendedDecodeContext) -> ty::AutoAdjustment; - fn read_unboxed_closure(&mut self, xcx: &ExtendedDecodeContext) + fn read_existential_bounds(&mut self, dcx: &DecodeContext) -> ty::ExistentialBounds; + fn read_substs(&mut self, dcx: &DecodeContext) -> subst::Substs; + fn read_auto_adjustment(&mut self, dcx: &DecodeContext) -> ty::AutoAdjustment; + fn read_unboxed_closure(&mut self, dcx: &DecodeContext) -> ty::UnboxedClosure; - fn read_auto_deref_ref(&mut self, xcx: &ExtendedDecodeContext) -> ty::AutoDerefRef; - fn read_autoref(&mut self, xcx: &ExtendedDecodeContext) -> ty::AutoRef; - fn read_unsize_kind(&mut self, xcx: &ExtendedDecodeContext) -> ty::UnsizeKind; + fn read_auto_deref_ref(&mut self, dcx: &DecodeContext) -> ty::AutoDerefRef; + fn read_autoref(&mut self, dcx: &DecodeContext) -> ty::AutoRef; + fn read_unsize_kind(&mut self, dcx: &DecodeContext) -> ty::UnsizeKind; fn convert_def_id(&mut self, - xcx: &ExtendedDecodeContext, + dcx: &DecodeContext, source: DefIdSource, did: ast::DefId) -> ast::DefId; // Versions of the type reading functions that don't need the full - // ExtendedDecodeContext. - fn read_ty_noxcx(&mut self, + // DecodeContext. + fn read_ty_nodcx(&mut self, tcx: &ty::ctxt, cdata: &cstore::crate_metadata) -> ty::t; - fn read_tys_noxcx(&mut self, + fn read_tys_nodcx(&mut self, tcx: &ty::ctxt, cdata: &cstore::crate_metadata) -> Vec; - fn read_substs_noxcx(&mut self, tcx: &ty::ctxt, + fn read_substs_nodcx(&mut self, tcx: &ty::ctxt, cdata: &cstore::crate_metadata) -> subst::Substs; } impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { - fn read_ty_noxcx(&mut self, + fn read_ty_nodcx(&mut self, tcx: &ty::ctxt, cdata: &cstore::crate_metadata) -> ty::t { self.read_opaque(|_, doc| { Ok(tydecode::parse_ty_data( @@ -1443,16 +1423,16 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { }).unwrap() } - fn read_tys_noxcx(&mut self, + fn read_tys_nodcx(&mut self, tcx: &ty::ctxt, cdata: &cstore::crate_metadata) -> Vec { - self.read_to_vec(|this| Ok(this.read_ty_noxcx(tcx, cdata)) ) + self.read_to_vec(|this| Ok(this.read_ty_nodcx(tcx, cdata)) ) .unwrap() .move_iter() .collect() } - fn read_substs_noxcx(&mut self, + fn read_substs_nodcx(&mut self, tcx: &ty::ctxt, cdata: &cstore::crate_metadata) -> subst::Substs @@ -1467,7 +1447,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { }).unwrap() } - fn read_ty(&mut self, xcx: &ExtendedDecodeContext) -> ty::t { + fn read_ty(&mut self, dcx: &DecodeContext) -> ty::t { // Note: regions types embed local node ids. In principle, we // should translate these node ids into the new decode // context. However, we do not bother, because region types @@ -1478,10 +1458,10 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { let ty = tydecode::parse_ty_data( doc.data, - xcx.dcx.cdata.cnum, + dcx.cdata.cnum, doc.start, - xcx.dcx.tcx, - |s, a| this.convert_def_id(xcx, s, a)); + dcx.tcx, + |s, a| this.convert_def_id(dcx, s, a)); Ok(ty) }).unwrap(); @@ -1495,23 +1475,23 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { } } - fn read_tys(&mut self, xcx: &ExtendedDecodeContext) -> Vec { - self.read_to_vec(|this| Ok(this.read_ty(xcx))).unwrap().move_iter().collect() + fn read_tys(&mut self, dcx: &DecodeContext) -> Vec { + self.read_to_vec(|this| Ok(this.read_ty(dcx))).unwrap().move_iter().collect() } - fn read_type_param_def(&mut self, xcx: &ExtendedDecodeContext) + fn read_type_param_def(&mut self, dcx: &DecodeContext) -> ty::TypeParameterDef { self.read_opaque(|this, doc| { Ok(tydecode::parse_type_param_def_data( doc.data, doc.start, - xcx.dcx.cdata.cnum, - xcx.dcx.tcx, - |s, a| this.convert_def_id(xcx, s, a))) + dcx.cdata.cnum, + dcx.tcx, + |s, a| this.convert_def_id(dcx, s, a))) }).unwrap() } - fn read_polytype(&mut self, xcx: &ExtendedDecodeContext) + fn read_polytype(&mut self, dcx: &DecodeContext) -> ty::Polytype { self.read_struct("Polytype", 2, |this| { Ok(ty::Polytype { @@ -1521,7 +1501,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { types: this.read_struct_field("types", 0, |this| { Ok(this.read_vec_per_param_space( - |this| this.read_type_param_def(xcx))) + |this| this.read_type_param_def(dcx))) }).unwrap(), regions: @@ -1533,34 +1513,34 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { }) }).unwrap(), ty: this.read_struct_field("ty", 1, |this| { - Ok(this.read_ty(xcx)) + Ok(this.read_ty(dcx)) }).unwrap() }) }).unwrap() } - fn read_existential_bounds(&mut self, xcx: &ExtendedDecodeContext) -> ty::ExistentialBounds + fn read_existential_bounds(&mut self, dcx: &DecodeContext) -> ty::ExistentialBounds { self.read_opaque(|this, doc| { Ok(tydecode::parse_existential_bounds_data(doc.data, - xcx.dcx.cdata.cnum, + dcx.cdata.cnum, doc.start, - xcx.dcx.tcx, - |s, a| this.convert_def_id(xcx, s, a))) + dcx.tcx, + |s, a| this.convert_def_id(dcx, s, a))) }).unwrap() } - fn read_substs(&mut self, xcx: &ExtendedDecodeContext) -> subst::Substs { + fn read_substs(&mut self, dcx: &DecodeContext) -> subst::Substs { self.read_opaque(|this, doc| { Ok(tydecode::parse_substs_data(doc.data, - xcx.dcx.cdata.cnum, + dcx.cdata.cnum, doc.start, - xcx.dcx.tcx, - |s, a| this.convert_def_id(xcx, s, a))) + dcx.tcx, + |s, a| this.convert_def_id(dcx, s, a))) }).unwrap() } - fn read_auto_adjustment(&mut self, xcx: &ExtendedDecodeContext) -> ty::AutoAdjustment { + fn read_auto_adjustment(&mut self, dcx: &DecodeContext) -> ty::AutoAdjustment { self.read_enum("AutoAdjustment", |this| { let variants = ["AutoAddEnv", "AutoDerefRef"]; this.read_enum_variant(variants, |this, i| { @@ -1569,12 +1549,12 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { let store: ty::TraitStore = this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap(); - ty::AutoAddEnv(store.tr(xcx)) + ty::AutoAddEnv(store.tr(dcx)) } 1 => { let auto_deref_ref: ty::AutoDerefRef = this.read_enum_variant_arg(0, - |this| Ok(this.read_auto_deref_ref(xcx))).unwrap(); + |this| Ok(this.read_auto_deref_ref(dcx))).unwrap(); ty::AutoDerefRef(auto_deref_ref) } @@ -1584,7 +1564,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { }).unwrap() } - fn read_auto_deref_ref(&mut self, xcx: &ExtendedDecodeContext) -> ty::AutoDerefRef { + fn read_auto_deref_ref(&mut self, dcx: &DecodeContext) -> ty::AutoDerefRef { self.read_struct("AutoDerefRef", 2, |this| { Ok(ty::AutoDerefRef { autoderefs: this.read_struct_field("autoderefs", 0, |this| { @@ -1593,7 +1573,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { autoref: this.read_struct_field("autoref", 1, |this| { this.read_option(|this, b| { if b { - Ok(Some(this.read_autoref(xcx))) + Ok(Some(this.read_autoref(dcx))) } else { Ok(None) } @@ -1603,7 +1583,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { }).unwrap() } - fn read_autoref(&mut self, xcx: &ExtendedDecodeContext) -> ty::AutoRef { + fn read_autoref(&mut self, dcx: &DecodeContext) -> ty::AutoRef { self.read_enum("AutoRef", |this| { let variants = ["AutoPtr", "AutoUnsize", @@ -1619,25 +1599,25 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { let a: Option> = this.read_enum_variant_arg(2, |this| this.read_option(|this, b| { if b { - Ok(Some(box this.read_autoref(xcx))) + Ok(Some(box this.read_autoref(dcx))) } else { Ok(None) } })).unwrap(); - ty::AutoPtr(r.tr(xcx), m, a) + ty::AutoPtr(r.tr(dcx), m, a) } 1 => { let uk: ty::UnsizeKind = this.read_enum_variant_arg(0, - |this| Ok(this.read_unsize_kind(xcx))).unwrap(); + |this| Ok(this.read_unsize_kind(dcx))).unwrap(); ty::AutoUnsize(uk) } 2 => { let uk: ty::UnsizeKind = this.read_enum_variant_arg(0, - |this| Ok(this.read_unsize_kind(xcx))).unwrap(); + |this| Ok(this.read_unsize_kind(dcx))).unwrap(); ty::AutoUnsizeUniq(uk) } @@ -1647,7 +1627,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { let a: Option> = this.read_enum_variant_arg(1, |this| this.read_option(|this, b| { if b { - Ok(Some(box this.read_autoref(xcx))) + Ok(Some(box this.read_autoref(dcx))) } else { Ok(None) } @@ -1661,7 +1641,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { }).unwrap() } - fn read_unsize_kind(&mut self, xcx: &ExtendedDecodeContext) -> ty::UnsizeKind { + fn read_unsize_kind(&mut self, dcx: &DecodeContext) -> ty::UnsizeKind { self.read_enum("UnsizeKind", |this| { let variants = ["UnsizeLength", "UnsizeStruct", "UnsizeVtable"]; this.read_enum_variant(variants, |this, i| { @@ -1675,7 +1655,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { 1 => { let uk: ty::UnsizeKind = this.read_enum_variant_arg(0, - |this| Ok(this.read_unsize_kind(xcx))).unwrap(); + |this| Ok(this.read_unsize_kind(dcx))).unwrap(); let idx: uint = this.read_enum_variant_arg(1, |this| Decodable::decode(this)).unwrap(); @@ -1684,13 +1664,13 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { 2 => { let b = this.read_enum_variant_arg( - 0, |this| Ok(this.read_existential_bounds(xcx))).unwrap(); + 0, |this| Ok(this.read_existential_bounds(dcx))).unwrap(); let def_id: ast::DefId = this.read_enum_variant_arg(1, |this| Decodable::decode(this)).unwrap(); let substs = this.read_enum_variant_arg(2, - |this| Ok(this.read_substs(xcx))).unwrap(); + |this| Ok(this.read_substs(dcx))).unwrap(); - ty::UnsizeVtable(b, def_id.tr(xcx), substs) + ty::UnsizeVtable(b, def_id.tr(dcx), substs) } _ => fail!("bad enum variant for ty::UnsizeKind") }) @@ -1698,15 +1678,15 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { }).unwrap() } - fn read_unboxed_closure(&mut self, xcx: &ExtendedDecodeContext) + fn read_unboxed_closure(&mut self, dcx: &DecodeContext) -> ty::UnboxedClosure { let closure_type = self.read_opaque(|this, doc| { Ok(tydecode::parse_ty_closure_data( doc.data, - xcx.dcx.cdata.cnum, + dcx.cdata.cnum, doc.start, - xcx.dcx.tcx, - |s, a| this.convert_def_id(xcx, s, a))) + dcx.tcx, + |s, a| this.convert_def_id(dcx, s, a))) }).unwrap(); let variants = [ "FnUnboxedClosureKind", @@ -1728,7 +1708,7 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { } fn convert_def_id(&mut self, - xcx: &ExtendedDecodeContext, + dcx: &DecodeContext, source: tydecode::DefIdSource, did: ast::DefId) -> ast::DefId { @@ -1761,21 +1741,20 @@ impl<'a> rbml_decoder_decoder_helpers for reader::Decoder<'a> { */ let r = match source { - NominalType | TypeWithId | RegionParameter => xcx.tr_def_id(did), - TypeParameter => xcx.tr_intern_def_id(did) + NominalType | TypeWithId | RegionParameter => dcx.tr_def_id(did), + TypeParameter => dcx.tr_intern_def_id(did) }; debug!("convert_def_id(source={:?}, did={:?})={:?}", source, did, r); return r; } } -fn decode_side_tables(xcx: &ExtendedDecodeContext, +fn decode_side_tables(dcx: &DecodeContext, ast_doc: rbml::Doc) { - let dcx = xcx.dcx; let tbl_doc = ast_doc.get(c::tag_table as uint); reader::docs(tbl_doc, |tag, entry_doc| { let id0 = entry_doc.get(c::tag_table_id as uint).as_int(); - let id = xcx.tr_id(id0 as ast::NodeId); + let id = dcx.tr_id(id0 as ast::NodeId); debug!(">> Side table document with tag 0x{:x} \ found for id {} (orig {})", @@ -1783,7 +1762,7 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, match c::astencode_tag::from_uint(tag) { None => { - xcx.dcx.tcx.sess.bug( + dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", tag).as_slice()); } @@ -1794,36 +1773,36 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, match value { c::tag_table_def => { - let def = decode_def(xcx, val_doc); + let def = decode_def(dcx, val_doc); dcx.tcx.def_map.borrow_mut().insert(id, def); } c::tag_table_node_type => { - let ty = val_dsr.read_ty(xcx); + let ty = val_dsr.read_ty(dcx); debug!("inserting ty for node {:?}: {}", id, ty_to_string(dcx.tcx, ty)); dcx.tcx.node_types.borrow_mut().insert(id as uint, ty); } c::tag_table_item_subst => { let item_substs = ty::ItemSubsts { - substs: val_dsr.read_substs(xcx) + substs: val_dsr.read_substs(dcx) }; dcx.tcx.item_substs.borrow_mut().insert( id, item_substs); } c::tag_table_freevars => { let fv_info = val_dsr.read_to_vec(|val_dsr| { - Ok(val_dsr.read_freevar_entry(xcx)) + Ok(val_dsr.read_freevar_entry(dcx)) }).unwrap().move_iter().collect(); dcx.tcx.freevars.borrow_mut().insert(id, fv_info); } c::tag_table_upvar_borrow_map => { let var_id: ast::NodeId = Decodable::decode(val_dsr).unwrap(); let upvar_id = ty::UpvarId { - var_id: xcx.tr_id(var_id), + var_id: dcx.tr_id(var_id), closure_expr_id: id }; let ub: ty::UpvarBorrow = Decodable::decode(val_dsr).unwrap(); - dcx.tcx.upvar_borrow_map.borrow_mut().insert(upvar_id, ub.tr(xcx)); + dcx.tcx.upvar_borrow_map.borrow_mut().insert(upvar_id, ub.tr(dcx)); } c::tag_table_capture_modes => { let capture_mode = val_dsr.read_capture_mode(); @@ -1833,16 +1812,16 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, .insert(id, capture_mode); } c::tag_table_tcache => { - let pty = val_dsr.read_polytype(xcx); + let pty = val_dsr.read_polytype(dcx); let lid = ast::DefId { krate: ast::LOCAL_CRATE, node: id }; dcx.tcx.tcache.borrow_mut().insert(lid, pty); } c::tag_table_param_defs => { - let bounds = val_dsr.read_type_param_def(xcx); + let bounds = val_dsr.read_type_param_def(dcx); dcx.tcx.ty_param_defs.borrow_mut().insert(id, bounds); } c::tag_table_method_map => { - let (adjustment, method) = val_dsr.read_method_callee(xcx); + let (adjustment, method) = val_dsr.read_method_callee(dcx); let method_call = MethodCall { expr_id: id, adjustment: adjustment @@ -1851,8 +1830,8 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, } c::tag_table_vtable_map => { let (adjustment, vtable_res) = - val_dsr.read_vtable_res_with_key(xcx.dcx.tcx, - xcx.dcx.cdata); + val_dsr.read_vtable_res_with_key(dcx.tcx, + dcx.cdata); let vtable_key = MethodCall { expr_id: id, adjustment: adjustment @@ -1860,12 +1839,12 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, dcx.tcx.vtable_map.borrow_mut().insert(vtable_key, vtable_res); } c::tag_table_adjustments => { - let adj: ty::AutoAdjustment = val_dsr.read_auto_adjustment(xcx); + let adj: ty::AutoAdjustment = val_dsr.read_auto_adjustment(dcx); dcx.tcx.adjustments.borrow_mut().insert(id, adj); } c::tag_table_unboxed_closures => { let unboxed_closure = - val_dsr.read_unboxed_closure(xcx); + val_dsr.read_unboxed_closure(dcx); dcx.tcx .unboxed_closures .borrow_mut() @@ -1873,7 +1852,7 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, unboxed_closure); } _ => { - xcx.dcx.tcx.sess.bug( + dcx.tcx.sess.bug( format!("unknown tag found in side tables: {:x}", tag).as_slice()); } @@ -1890,17 +1869,17 @@ fn decode_side_tables(xcx: &ExtendedDecodeContext, // Testing of astencode_gen #[cfg(test)] -fn encode_item_ast(rbml_w: &mut Encoder, item: Gc) { +fn encode_item_ast(rbml_w: &mut Encoder, item: &ast::Item) { rbml_w.start_tag(c::tag_tree as uint); (*item).encode(rbml_w); rbml_w.end_tag(); } #[cfg(test)] -fn decode_item_ast(par_doc: rbml::Doc) -> Gc { +fn decode_item_ast(par_doc: rbml::Doc) -> ast::Item { let chi_doc = par_doc.get(c::tag_tree as uint); let mut d = reader::Decoder::new(chi_doc); - box(GC) Decodable::decode(&mut d).unwrap() + Decodable::decode(&mut d).unwrap() } #[cfg(test)] @@ -1935,17 +1914,14 @@ fn mk_ctxt() -> parse::ParseSess { } #[cfg(test)] -fn roundtrip(in_item: Option>) { +fn roundtrip(in_item: Option>) { let in_item = in_item.unwrap(); let mut wr = SeekableMemWriter::new(); - { - let mut rbml_w = writer::Encoder::new(&mut wr); - encode_item_ast(&mut rbml_w, in_item); - } + encode_item_ast(&mut writer::Encoder::new(&mut wr), &*in_item); let rbml_doc = rbml::Doc::new(wr.get_ref()); let out_item = decode_item_ast(rbml_doc); - assert!(in_item == out_item); + assert!(*in_item == out_item); } #[test] diff --git a/src/librustc/middle/borrowck/graphviz.rs b/src/librustc/middle/borrowck/graphviz.rs index e75378de5a5c7..c789db5be0c01 100644 --- a/src/librustc/middle/borrowck/graphviz.rs +++ b/src/librustc/middle/borrowck/graphviz.rs @@ -44,7 +44,7 @@ impl Variant { } pub struct DataflowLabeller<'a, 'tcx: 'a> { - pub inner: cfg_dot::LabelledCFG<'a>, + pub inner: cfg_dot::LabelledCFG<'a, 'tcx>, pub variants: Vec, pub borrowck_ctxt: &'a BorrowckCtxt<'a, 'tcx>, pub analysis_data: &'a borrowck::AnalysisData<'a, 'tcx>, diff --git a/src/librustc/middle/borrowck/mod.rs b/src/librustc/middle/borrowck/mod.rs index acc2f47a0fe68..0d584a7664f67 100644 --- a/src/librustc/middle/borrowck/mod.rs +++ b/src/librustc/middle/borrowck/mod.rs @@ -22,9 +22,7 @@ use middle::mem_categorization as mc; use middle::ty; use util::ppaux::{note_and_explain_region, Repr, UserString}; -use std::cell::{Cell}; use std::rc::Rc; -use std::gc::{Gc, GC}; use std::string::String; use syntax::ast; use syntax::ast_map; @@ -71,34 +69,33 @@ impl<'a, 'tcx, 'v> Visitor<'v> for BorrowckCtxt<'a, 'tcx> { } } -pub fn check_crate(tcx: &ty::ctxt, - krate: &ast::Crate) { +pub fn check_crate(tcx: &ty::ctxt) { let mut bccx = BorrowckCtxt { tcx: tcx, - stats: box(GC) BorrowStats { - loaned_paths_same: Cell::new(0), - loaned_paths_imm: Cell::new(0), - stable_paths: Cell::new(0), - guaranteed_paths: Cell::new(0), + stats: BorrowStats { + loaned_paths_same: 0, + loaned_paths_imm: 0, + stable_paths: 0, + guaranteed_paths: 0 } }; - visit::walk_crate(&mut bccx, krate); + visit::walk_crate(&mut bccx, tcx.map.krate()); if tcx.sess.borrowck_stats() { println!("--- borrowck stats ---"); println!("paths requiring guarantees: {}", - bccx.stats.guaranteed_paths.get()); + bccx.stats.guaranteed_paths); println!("paths requiring loans : {}", - make_stat(&bccx, bccx.stats.loaned_paths_same.get())); + make_stat(&bccx, bccx.stats.loaned_paths_same)); println!("paths requiring imm loans : {}", - make_stat(&bccx, bccx.stats.loaned_paths_imm.get())); + make_stat(&bccx, bccx.stats.loaned_paths_imm)); println!("stable paths : {}", - make_stat(&bccx, bccx.stats.stable_paths.get())); + make_stat(&bccx, bccx.stats.stable_paths)); } fn make_stat(bccx: &BorrowckCtxt, stat: uint) -> String { - let total = bccx.stats.guaranteed_paths.get() as f64; + let total = bccx.stats.guaranteed_paths as f64; let perc = if total == 0.0 { 0.0 } else { stat as f64 * 100.0 / total }; format!("{} ({:.0f}%)", stat, perc) } @@ -110,8 +107,8 @@ fn borrowck_item(this: &mut BorrowckCtxt, item: &ast::Item) { // loan step is intended for things that have a data // flow dependent conditions. match item.node { - ast::ItemStatic(_, _, ex) => { - gather_loans::gather_loans_in_static_initializer(this, &*ex); + ast::ItemStatic(_, _, ref ex) => { + gather_loans::gather_loans_in_static_initializer(this, &**ex); } _ => { visit::walk_item(this, item); @@ -206,11 +203,11 @@ pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>( let mut bccx = BorrowckCtxt { tcx: tcx, - stats: box(GC) BorrowStats { - loaned_paths_same: Cell::new(0), - loaned_paths_imm: Cell::new(0), - stable_paths: Cell::new(0), - guaranteed_paths: Cell::new(0), + stats: BorrowStats { + loaned_paths_same: 0, + loaned_paths_imm: 0, + stable_paths: 0, + guaranteed_paths: 0 } }; @@ -234,14 +231,14 @@ pub struct BorrowckCtxt<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, // Statistics: - stats: Gc, + stats: BorrowStats } -pub struct BorrowStats { - loaned_paths_same: Cell, - loaned_paths_imm: Cell, - stable_paths: Cell, - guaranteed_paths: Cell, +struct BorrowStats { + loaned_paths_same: uint, + loaned_paths_imm: uint, + stable_paths: uint, + guaranteed_paths: uint } pub type BckResult = Result; @@ -290,9 +287,9 @@ pub fn closure_to_block(closure_id: ast::NodeId, tcx: &ty::ctxt) -> ast::NodeId { match tcx.map.get(closure_id) { ast_map::NodeExpr(expr) => match expr.node { - ast::ExprProc(_, block) | - ast::ExprFnBlock(_, _, block) | - ast::ExprUnboxedFn(_, _, _, block) => { block.id } + ast::ExprProc(_, ref block) | + ast::ExprFnBlock(_, _, ref block) | + ast::ExprUnboxedFn(_, _, _, ref block) => { block.id } _ => fail!("encountered non-closure id: {}", closure_id) }, _ => fail!("encountered non-expr id: {}", closure_id) diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index 6e9b27655af79..05cc61a754749 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -15,10 +15,9 @@ use middle::typeck; use middle::ty; use syntax::ast; use syntax::ast_util; +use syntax::ptr::P; use util::nodemap::NodeMap; -use std::gc::Gc; - struct CFGBuilder<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, exit_map: NodeMap, @@ -69,15 +68,15 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn block(&mut self, blk: &ast::Block, pred: CFGIndex) -> CFGIndex { let mut stmts_exit = pred; for stmt in blk.stmts.iter() { - stmts_exit = self.stmt(stmt.clone(), stmts_exit); + stmts_exit = self.stmt(&**stmt, stmts_exit); } - let expr_exit = self.opt_expr(blk.expr.clone(), stmts_exit); + let expr_exit = self.opt_expr(&blk.expr, stmts_exit); self.add_node(blk.id, [expr_exit]) } - fn stmt(&mut self, stmt: Gc, pred: CFGIndex) -> CFGIndex { + fn stmt(&mut self, stmt: &ast::Stmt, pred: CFGIndex) -> CFGIndex { match stmt.node { ast::StmtDecl(ref decl, id) => { let exit = self.decl(&**decl, pred); @@ -85,7 +84,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } ast::StmtExpr(ref expr, id) | ast::StmtSemi(ref expr, id) => { - let exit = self.expr(expr.clone(), pred); + let exit = self.expr(&**expr, pred); self.add_node(id, [exit]) } @@ -98,7 +97,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn decl(&mut self, decl: &ast::Decl, pred: CFGIndex) -> CFGIndex { match decl.node { ast::DeclLocal(ref local) => { - let init_exit = self.opt_expr(local.init.clone(), pred); + let init_exit = self.opt_expr(&local.init, pred); self.pat(&*local.pat, init_exit) } @@ -127,24 +126,20 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { ast::PatEnum(_, Some(ref subpats)) | ast::PatTup(ref subpats) => { - let pats_exit = - self.pats_all(subpats.iter().map(|p| p.clone()), pred); + let pats_exit = self.pats_all(subpats.iter(), pred); self.add_node(pat.id, [pats_exit]) } ast::PatStruct(_, ref subpats, _) => { let pats_exit = - self.pats_all(subpats.iter().map(|f| f.pat.clone()), pred); + self.pats_all(subpats.iter().map(|f| &f.pat), pred); self.add_node(pat.id, [pats_exit]) } ast::PatVec(ref pre, ref vec, ref post) => { - let pre_exit = - self.pats_all(pre.iter().map(|p| *p), pred); - let vec_exit = - self.pats_all(vec.iter().map(|p| *p), pre_exit); - let post_exit = - self.pats_all(post.iter().map(|p| *p), vec_exit); + let pre_exit = self.pats_all(pre.iter(), pred); + let vec_exit = self.pats_all(vec.iter(), pre_exit); + let post_exit = self.pats_all(post.iter(), vec_exit); self.add_node(pat.id, [post_exit]) } @@ -154,16 +149,16 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } } - fn pats_all>>(&mut self, - pats: I, - pred: CFGIndex) -> CFGIndex { + fn pats_all<'a, I: Iterator<&'a P>>(&mut self, + pats: I, + pred: CFGIndex) -> CFGIndex { //! Handles case where all of the patterns must match. let mut pats = pats; - pats.fold(pred, |pred, pat| self.pat(&*pat, pred)) + pats.fold(pred, |pred, pat| self.pat(&**pat, pred)) } fn pats_any(&mut self, - pats: &[Gc], + pats: &[P], pred: CFGIndex) -> CFGIndex { //! Handles case where just one of the patterns must match. @@ -171,15 +166,15 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.pat(&*pats[0], pred) } else { let collect = self.add_dummy_node([]); - for &pat in pats.iter() { - let pat_exit = self.pat(&*pat, pred); + for pat in pats.iter() { + let pat_exit = self.pat(&**pat, pred); self.add_contained_edge(pat_exit, collect); } collect } } - fn expr(&mut self, expr: Gc, pred: CFGIndex) -> CFGIndex { + fn expr(&mut self, expr: &ast::Expr, pred: CFGIndex) -> CFGIndex { match expr.node { ast::ExprBlock(ref blk) => { let blk_exit = self.block(&**blk, pred); @@ -201,7 +196,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // v 3 v 4 // [..expr..] // - let cond_exit = self.expr(cond.clone(), pred); // 1 + let cond_exit = self.expr(&**cond, pred); // 1 let then_exit = self.block(&**then, cond_exit); // 2 self.add_node(expr.id, [cond_exit, then_exit]) // 3,4 } @@ -221,9 +216,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // v 4 v 5 // [..expr..] // - let cond_exit = self.expr(cond.clone(), pred); // 1 + let cond_exit = self.expr(&**cond, pred); // 1 let then_exit = self.block(&**then, cond_exit); // 2 - let else_exit = self.expr(otherwise.clone(), cond_exit); // 3 + let else_exit = self.expr(&**otherwise, cond_exit); // 3 self.add_node(expr.id, [then_exit, else_exit]) // 4, 5 } @@ -247,7 +242,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // Is the condition considered part of the loop? let loopback = self.add_dummy_node([pred]); // 1 - let cond_exit = self.expr(cond.clone(), loopback); // 2 + let cond_exit = self.expr(&**cond, loopback); // 2 let expr_exit = self.add_node(expr.id, [cond_exit]); // 3 self.loop_scopes.push(LoopScope { loop_id: expr.id, @@ -283,7 +278,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // Note that `break` and `continue` statements // may cause additional edges. - let head = self.expr(head.clone(), pred); // 1 + let head = self.expr(&**head, pred); // 1 let loopback = self.add_dummy_node([head]); // 2 let cond = self.add_dummy_node([loopback]); // 3 let expr_exit = self.add_node(expr.id, [cond]); // 4 @@ -353,7 +348,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // v 6 v v // [.....expr.....] // - let discr_exit = self.expr(discr.clone(), pred); // 1 + let discr_exit = self.expr(&**discr, pred); // 1 let expr_exit = self.add_node(expr.id, []); let mut cond_exit = discr_exit; @@ -361,10 +356,9 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { cond_exit = self.add_dummy_node([cond_exit]); // 2 let pats_exit = self.pats_any(arm.pats.as_slice(), cond_exit); // 3 - let guard_exit = self.opt_expr(arm.guard, + let guard_exit = self.opt_expr(&arm.guard, pats_exit); // 4 - let body_exit = self.expr(arm.body.clone(), - guard_exit); // 5 + let body_exit = self.expr(&*arm.body, guard_exit); // 5 self.add_contained_edge(body_exit, expr_exit); // 6 } expr_exit @@ -385,13 +379,13 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { // v 3 v 4 // [..exit..] // - let l_exit = self.expr(l.clone(), pred); // 1 - let r_exit = self.expr(r.clone(), l_exit); // 2 + let l_exit = self.expr(&**l, pred); // 1 + let r_exit = self.expr(&**r, l_exit); // 2 self.add_node(expr.id, [l_exit, r_exit]) // 3,4 } ast::ExprRet(ref v) => { - let v_exit = self.opt_expr(v.clone(), pred); + let v_exit = self.opt_expr(v, pred); let b = self.add_node(expr.id, [v_exit]); self.add_returning_edge(expr, b); self.add_node(ast::DUMMY_NODE_ID, []) @@ -414,62 +408,60 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } ast::ExprVec(ref elems) => { - self.straightline(expr, pred, elems.as_slice()) + self.straightline(expr, pred, elems.iter().map(|e| &**e)) } ast::ExprCall(ref func, ref args) => { - self.call(expr, pred, func.clone(), args.as_slice()) + self.call(expr, pred, &**func, args.iter().map(|e| &**e)) } ast::ExprMethodCall(_, _, ref args) => { - self.call(expr, pred, *args.get(0), args.slice_from(1)) + self.call(expr, pred, &**args.get(0), args.slice_from(1).iter().map(|e| &**e)) } ast::ExprIndex(ref l, ref r) | - ast::ExprBinary(_, ref l, ref r) if self.is_method_call(&*expr) => { - self.call(expr, pred, l.clone(), [r.clone()]) + ast::ExprBinary(_, ref l, ref r) if self.is_method_call(expr) => { + self.call(expr, pred, &**l, Some(&**r).move_iter()) } - ast::ExprUnary(_, ref e) if self.is_method_call(&*expr) => { - self.call(expr, pred, e.clone(), []) + ast::ExprUnary(_, ref e) if self.is_method_call(expr) => { + self.call(expr, pred, &**e, None::.iter()) } ast::ExprTup(ref exprs) => { - self.straightline(expr, pred, exprs.as_slice()) + self.straightline(expr, pred, exprs.iter().map(|e| &**e)) } - ast::ExprStruct(_, ref fields, base) => { + ast::ExprStruct(_, ref fields, ref base) => { let base_exit = self.opt_expr(base, pred); - let field_exprs: Vec> = - fields.iter().map(|f| f.expr).collect(); - self.straightline(expr, base_exit, field_exprs.as_slice()) + self.straightline(expr, base_exit, fields.iter().map(|f| &*f.expr)) } - ast::ExprRepeat(elem, count) => { - self.straightline(expr, pred, [elem, count]) + ast::ExprRepeat(ref elem, ref count) => { + self.straightline(expr, pred, [elem, count].iter().map(|&e| &**e)) } - ast::ExprAssign(l, r) | - ast::ExprAssignOp(_, l, r) => { - self.straightline(expr, pred, [r, l]) + ast::ExprAssign(ref l, ref r) | + ast::ExprAssignOp(_, ref l, ref r) => { + self.straightline(expr, pred, [r, l].iter().map(|&e| &**e)) } - ast::ExprIndex(l, r) | - ast::ExprBinary(_, l, r) => { // NB: && and || handled earlier - self.straightline(expr, pred, [l, r]) + ast::ExprIndex(ref l, ref r) | + ast::ExprBinary(_, ref l, ref r) => { // NB: && and || handled earlier + self.straightline(expr, pred, [l, r].iter().map(|&e| &**e)) } - ast::ExprBox(p, e) => { - self.straightline(expr, pred, [p, e]) + ast::ExprBox(ref p, ref e) => { + self.straightline(expr, pred, [p, e].iter().map(|&e| &**e)) } - ast::ExprAddrOf(_, e) | - ast::ExprCast(e, _) | - ast::ExprUnary(_, e) | - ast::ExprParen(e) | - ast::ExprField(e, _, _) | - ast::ExprTupField(e, _, _) => { - self.straightline(expr, pred, [e]) + ast::ExprAddrOf(_, ref e) | + ast::ExprCast(ref e, _) | + ast::ExprUnary(_, ref e) | + ast::ExprParen(ref e) | + ast::ExprField(ref e, _, _) | + ast::ExprTupField(ref e, _, _) => { + self.straightline(expr, pred, Some(&**e).move_iter()) } ast::ExprInlineAsm(ref inline_asm) => { @@ -477,13 +469,13 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { let outputs = inline_asm.outputs.iter(); let post_inputs = self.exprs(inputs.map(|a| { debug!("cfg::construct InlineAsm id:{} input:{:?}", expr.id, a); - let &(_, expr) = a; - expr + let &(_, ref expr) = a; + &**expr }), pred); let post_outputs = self.exprs(outputs.map(|a| { debug!("cfg::construct InlineAsm id:{} output:{:?}", expr.id, a); - let &(_, expr, _) = a; - expr + let &(_, ref expr, _) = a; + &**expr }), post_inputs); self.add_node(expr.id, [post_outputs]) } @@ -494,16 +486,16 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { ast::ExprUnboxedFn(..) | ast::ExprLit(..) | ast::ExprPath(..) => { - self.straightline(expr, pred, []) + self.straightline(expr, pred, None::.iter()) } } } - fn call(&mut self, - call_expr: Gc, + fn call<'a, I: Iterator<&'a ast::Expr>>(&mut self, + call_expr: &ast::Expr, pred: CFGIndex, - func_or_rcvr: Gc, - args: &[Gc]) -> CFGIndex { + func_or_rcvr: &ast::Expr, + args: I) -> CFGIndex { let func_or_rcvr_exit = self.expr(func_or_rcvr, pred); let ret = self.straightline(call_expr, func_or_rcvr_exit, args); @@ -516,28 +508,27 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } } - fn exprs>>(&mut self, - mut exprs: I, - pred: CFGIndex) -> CFGIndex { + fn exprs<'a, I: Iterator<&'a ast::Expr>>(&mut self, + mut exprs: I, + pred: CFGIndex) -> CFGIndex { //! Constructs graph for `exprs` evaluated in order exprs.fold(pred, |p, e| self.expr(e, p)) } fn opt_expr(&mut self, - opt_expr: Option>, + opt_expr: &Option>, pred: CFGIndex) -> CFGIndex { //! Constructs graph for `opt_expr` evaluated, if Some - - opt_expr.iter().fold(pred, |p, &e| self.expr(e, p)) + opt_expr.iter().fold(pred, |p, e| self.expr(&**e, p)) } - fn straightline(&mut self, - expr: Gc, + fn straightline<'a, I: Iterator<&'a ast::Expr>>(&mut self, + expr: &ast::Expr, pred: CFGIndex, - subexprs: &[Gc]) -> CFGIndex { + subexprs: I) -> CFGIndex { //! Handles case of an expression that evaluates `subexprs` in order - let subexprs_exit = self.exprs(subexprs.iter().map(|&e|e), pred); + let subexprs_exit = self.exprs(subexprs, pred); self.add_node(expr.id, [subexprs_exit]) } @@ -566,7 +557,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } fn add_exiting_edge(&mut self, - from_expr: Gc, + from_expr: &ast::Expr, from_index: CFGIndex, to_loop: LoopScope, to_index: CFGIndex) { @@ -581,7 +572,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } fn add_returning_edge(&mut self, - _from_expr: Gc, + _from_expr: &ast::Expr, from_index: CFGIndex) { let mut data = CFGEdgeData { exiting_scopes: vec!(), @@ -593,7 +584,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } fn find_scope(&self, - expr: Gc, + expr: &ast::Expr, label: Option) -> LoopScope { match label { None => { diff --git a/src/librustc/middle/cfg/graphviz.rs b/src/librustc/middle/cfg/graphviz.rs index 0cccae8b8c9cd..84b96edc12652 100644 --- a/src/librustc/middle/cfg/graphviz.rs +++ b/src/librustc/middle/cfg/graphviz.rs @@ -22,8 +22,8 @@ use middle::cfg; pub type Node<'a> = (cfg::CFGIndex, &'a cfg::CFGNode); pub type Edge<'a> = &'a cfg::CFGEdge; -pub struct LabelledCFG<'a>{ - pub ast_map: &'a ast_map::Map, +pub struct LabelledCFG<'a, 'ast: 'a> { + pub ast_map: &'a ast_map::Map<'ast>, pub cfg: &'a cfg::CFG, pub name: String, } @@ -49,7 +49,7 @@ fn replace_newline_with_backslash_l(s: String) -> String { } } -impl<'a> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a> { +impl<'a, 'ast> dot::Labeller<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { fn graph_id(&'a self) -> dot::Id<'a> { dot::Id::new(self.name.as_slice()) } fn node_id(&'a self, &(i,_): &Node<'a>) -> dot::Id<'a> { @@ -110,7 +110,7 @@ impl<'a> dot::GraphWalk<'a, Node<'a>, Edge<'a>> for &'a cfg::CFG { } } -impl<'a> dot::GraphWalk<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a> +impl<'a, 'ast> dot::GraphWalk<'a, Node<'a>, Edge<'a>> for LabelledCFG<'a, 'ast> { fn nodes(&self) -> dot::Nodes<'a, Node<'a>> { self.cfg.nodes() } fn edges(&self) -> dot::Edges<'a, Edge<'a>> { self.cfg.edges() } diff --git a/src/librustc/middle/check_const.rs b/src/librustc/middle/check_const.rs index c0160b72784ca..303961105b526 100644 --- a/src/librustc/middle/check_const.rs +++ b/src/librustc/middle/check_const.rs @@ -53,15 +53,16 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { } } -pub fn check_crate(krate: &Crate, tcx: &ty::ctxt) { - visit::walk_crate(&mut CheckCrateVisitor { tcx: tcx, in_const: false }, krate); +pub fn check_crate(tcx: &ty::ctxt) { + visit::walk_crate(&mut CheckCrateVisitor { tcx: tcx, in_const: false }, + tcx.map.krate()); tcx.sess.abort_if_errors(); } fn check_item(v: &mut CheckCrateVisitor, it: &Item) { match it.node { - ItemStatic(_, _, ex) => { - v.inside_const(|v| v.visit_expr(&*ex)); + ItemStatic(_, _, ref ex) => { + v.inside_const(|v| v.visit_expr(&**ex)); check_item_recursion(&v.tcx.sess, &v.tcx.map, &v.tcx.def_map, it); } ItemEnum(ref enum_definition, _) => { @@ -78,9 +79,9 @@ fn check_item(v: &mut CheckCrateVisitor, it: &Item) { fn check_pat(v: &mut CheckCrateVisitor, p: &Pat) { fn is_str(e: &Expr) -> bool { match e.node { - ExprBox(_, expr) => { + ExprBox(_, ref expr) => { match expr.node { - ExprLit(lit) => ast_util::lit_is_str(lit), + ExprLit(ref lit) => ast_util::lit_is_str(&**lit), _ => false, } } @@ -106,7 +107,7 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr) { span_err!(v.tcx.sess, e.span, E0010, "cannot do allocations in constant expressions"); return; } - ExprLit(lit) if ast_util::lit_is_str(lit) => {} + ExprLit(ref lit) if ast_util::lit_is_str(&**lit) => {} ExprBinary(..) | ExprUnary(..) => { let method_call = typeck::MethodCall::expr(e.id); if v.tcx.method_map.borrow().contains_key(&method_call) { @@ -149,7 +150,7 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr) { } } } - ExprCall(callee, _) => { + ExprCall(ref callee, _) => { match v.tcx.def_map.borrow().find(&callee.id) { Some(&DefStruct(..)) => {} // OK. Some(&DefVariant(..)) => {} // OK. @@ -194,7 +195,7 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr) { ExprTup(..) | ExprRepeat(..) | ExprStruct(..) => { } - ExprAddrOf(_, inner) => { + ExprAddrOf(_, ref inner) => { match inner.node { // Mutable slices are allowed. ExprVec(_) => {} @@ -214,12 +215,13 @@ fn check_expr(v: &mut CheckCrateVisitor, e: &Expr) { visit::walk_expr(v, e); } -struct CheckItemRecursionVisitor<'a> { +struct CheckItemRecursionVisitor<'a, 'ast: 'a> { root_it: &'a Item, sess: &'a Session, - ast_map: &'a ast_map::Map, + ast_map: &'a ast_map::Map<'ast>, def_map: &'a resolve::DefMap, - idstack: Vec } + idstack: Vec +} // Make sure a const item doesn't recursively refer to itself // FIXME: Should use the dependency graph when it's available (#1356) @@ -238,7 +240,7 @@ pub fn check_item_recursion<'a>(sess: &'a Session, visitor.visit_item(it); } -impl<'a, 'v> Visitor<'v> for CheckItemRecursionVisitor<'a> { +impl<'a, 'ast, 'v> Visitor<'v> for CheckItemRecursionVisitor<'a, 'ast> { fn visit_item(&mut self, it: &Item) { if self.idstack.iter().any(|x| x == &(it.id)) { self.sess.span_fatal(self.root_it.span, "recursive constant"); diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 6e8f6530075e6..eb308f903d3dd 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -19,19 +19,26 @@ use middle::pat_util::*; use middle::ty::*; use middle::ty; use std::fmt; -use std::gc::{Gc, GC}; use std::iter::AdditiveIterator; use std::iter::range_inclusive; +use std::slice; use syntax::ast::*; use syntax::ast_util::walk_pat; use syntax::codemap::{Span, Spanned, DUMMY_SP}; use syntax::fold::{Folder, noop_fold_pat}; use syntax::print::pprust::pat_to_string; use syntax::parse::token; +use syntax::ptr::P; use syntax::visit::{mod, Visitor, FnKind}; use util::ppaux::ty_to_string; -struct Matrix(Vec>>); +static DUMMY_WILD_PAT: Pat = Pat { + id: DUMMY_NODE_ID, + node: PatWild(PatWildSingle), + span: DUMMY_SP +}; + +struct Matrix<'a>(Vec>); /// Pretty-printer for matrices of patterns, example: /// ++++++++++++++++++++++++++ @@ -45,7 +52,7 @@ struct Matrix(Vec>>); /// ++++++++++++++++++++++++++ /// + _ + [_, _, ..tail] + /// ++++++++++++++++++++++++++ -impl fmt::Show for Matrix { +impl<'a> fmt::Show for Matrix<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "\n")); @@ -80,8 +87,8 @@ impl fmt::Show for Matrix { } } -impl FromIterator>> for Matrix { - fn from_iter>>>(mut iterator: T) -> Matrix { +impl<'a> FromIterator> for Matrix<'a> { + fn from_iter>>(mut iterator: T) -> Matrix<'a> { Matrix(iterator.collect()) } } @@ -110,7 +117,7 @@ pub enum Constructor { #[deriving(Clone, PartialEq)] enum Usefulness { Useful, - UsefulWithWitness(Vec>), + UsefulWithWitness(Vec>), NotUseful } @@ -132,16 +139,15 @@ impl<'a, 'tcx, 'v> Visitor<'v> for MatchCheckCtxt<'a, 'tcx> { } } -pub fn check_crate(tcx: &ty::ctxt, krate: &Crate) { - let mut cx = MatchCheckCtxt { tcx: tcx }; - visit::walk_crate(&mut cx, krate); +pub fn check_crate(tcx: &ty::ctxt) { + visit::walk_crate(&mut MatchCheckCtxt { tcx: tcx }, tcx.map.krate()); tcx.sess.abort_if_errors(); } fn check_expr(cx: &mut MatchCheckCtxt, ex: &Expr) { visit::walk_expr(cx, ex); match ex.node { - ExprMatch(scrut, ref arms) => { + ExprMatch(ref scrut, ref arms) => { // First, check legality of move bindings. for arm in arms.iter() { check_legality_of_move_bindings(cx, @@ -156,28 +162,26 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &Expr) { // assigning or borrowing anything mutably. for arm in arms.iter() { match arm.guard { - Some(guard) => check_for_mutation_in_guard(cx, &*guard), + Some(ref guard) => check_for_mutation_in_guard(cx, &**guard), None => {} } } let mut static_inliner = StaticInliner::new(cx.tcx); - let inlined_arms = arms - .iter() - .map(|arm| Arm { - pats: arm.pats.iter().map(|pat| { - static_inliner.fold_pat(*pat) - }).collect(), - ..arm.clone() - }) - .collect::>(); + let inlined_arms = arms.iter().map(|arm| { + (arm.pats.iter().map(|pat| { + static_inliner.fold_pat((*pat).clone()) + }).collect(), arm.guard.as_ref().map(|e| &**e)) + }).collect::>, Option<&Expr>)>>(); if static_inliner.failed { return; } // Third, check if there are any references to NaN that we should warn about. - check_for_static_nan(cx, inlined_arms.as_slice()); + for &(ref pats, _) in inlined_arms.iter() { + check_for_static_nan(cx, pats.as_slice()); + } // Fourth, check for unreachable arms. check_arms(cx, inlined_arms.as_slice()); @@ -198,28 +202,25 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &Expr) { } let matrix: Matrix = inlined_arms - .move_iter() - .filter(|arm| arm.guard.is_none()) - .flat_map(|arm| arm.pats.move_iter()) - .map(|pat| vec![pat]) + .iter() + .filter(|&&(_, guard)| guard.is_none()) + .flat_map(|arm| arm.ref0().iter()) + .map(|pat| vec![&**pat]) .collect(); check_exhaustive(cx, ex.span, &matrix); }, ExprForLoop(ref pat, _, _, _) => { let mut static_inliner = StaticInliner::new(cx.tcx); - match is_refutable(cx, static_inliner.fold_pat(*pat)) { - Some(uncovered_pat) => { - cx.tcx.sess.span_err( - pat.span, - format!("refutable pattern in `for` loop binding: \ - `{}` not covered", - pat_to_string(&*uncovered_pat)).as_slice()); - }, - None => {} - } + is_refutable(cx, &*static_inliner.fold_pat((*pat).clone()), |uncovered_pat| { + cx.tcx.sess.span_err( + pat.span, + format!("refutable pattern in `for` loop binding: \ + `{}` not covered", + pat_to_string(uncovered_pat)).as_slice()); + }); // Check legality of move bindings. - check_legality_of_move_bindings(cx, false, [ *pat ]); + check_legality_of_move_bindings(cx, false, slice::ref_slice(pat)); check_legality_of_bindings_in_at_patterns(cx, &**pat); } _ => () @@ -234,36 +235,34 @@ fn is_expr_const_nan(tcx: &ty::ctxt, expr: &Expr) -> bool { } // Check that we do not match against a static NaN (#6804) -fn check_for_static_nan(cx: &MatchCheckCtxt, arms: &[Arm]) { - for arm in arms.iter() { - for &pat in arm.pats.iter() { - walk_pat(&*pat, |p| { - match p.node { - PatLit(expr) if is_expr_const_nan(cx.tcx, &*expr) => { - span_warn!(cx.tcx.sess, p.span, E0003, - "unmatchable NaN in pattern, \ - use the is_nan method in a guard instead"); - } - _ => () +fn check_for_static_nan(cx: &MatchCheckCtxt, pats: &[P]) { + for pat in pats.iter() { + walk_pat(&**pat, |p| { + match p.node { + PatLit(ref expr) if is_expr_const_nan(cx.tcx, &**expr) => { + span_warn!(cx.tcx.sess, p.span, E0003, + "unmatchable NaN in pattern, \ + use the is_nan method in a guard instead"); } - true - }); - } + _ => () + } + true + }); } } // Check for unreachable patterns -fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) { - let mut seen = Matrix(vec!()); - for arm in arms.iter() { - for &pat in arm.pats.iter() { - let v = vec![pat]; +fn check_arms(cx: &MatchCheckCtxt, arms: &[(Vec>, Option<&Expr>)]) { + let mut seen = Matrix(vec![]); + for &(ref pats, guard) in arms.iter() { + for pat in pats.iter() { + let v = vec![&**pat]; match is_useful(cx, &seen, v.as_slice(), LeaveOutWitness) { NotUseful => span_err!(cx.tcx.sess, pat.span, E0001, "unreachable pattern"), Useful => (), UsefulWithWitness(_) => unreachable!() } - if arm.guard.is_none() { + if guard.is_none() { let Matrix(mut rows) = seen; rows.push(v); seen = Matrix(rows); @@ -272,17 +271,24 @@ fn check_arms(cx: &MatchCheckCtxt, arms: &[Arm]) { } } +fn raw_pat<'a>(p: &'a Pat) -> &'a Pat { + match p.node { + PatIdent(_, _, Some(ref s)) => raw_pat(&**s), + _ => p + } +} + fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) { - match is_useful(cx, matrix, [wild()], ConstructWitness) { + match is_useful(cx, matrix, &[&DUMMY_WILD_PAT], ConstructWitness) { UsefulWithWitness(pats) => { let witness = match pats.as_slice() { - [witness] => witness, - [] => wild(), + [ref witness] => &**witness, + [] => &DUMMY_WILD_PAT, _ => unreachable!() }; span_err!(cx.tcx.sess, sp, E0004, "non-exhaustive patterns: `{}` not covered", - pat_to_string(&*witness) + pat_to_string(witness) ); } NotUseful => { @@ -292,17 +298,17 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, matrix: &Matrix) { } } -fn const_val_to_expr(value: &const_val) -> Gc { +fn const_val_to_expr(value: &const_val) -> P { let node = match value { &const_bool(b) => LitBool(b), &const_nil => LitNil, _ => unreachable!() }; - box (GC) Expr { + P(Expr { id: 0, - node: ExprLit(box(GC) Spanned { node: node, span: DUMMY_SP }), + node: ExprLit(P(Spanned { node: node, span: DUMMY_SP })), span: DUMMY_SP - } + }) } pub struct StaticInliner<'a, 'tcx: 'a> { @@ -320,16 +326,18 @@ impl<'a, 'tcx> StaticInliner<'a, 'tcx> { } impl<'a, 'tcx> Folder for StaticInliner<'a, 'tcx> { - fn fold_pat(&mut self, pat: Gc) -> Gc { + fn fold_pat(&mut self, pat: P) -> P { match pat.node { PatIdent(..) | PatEnum(..) => { let def = self.tcx.def_map.borrow().find_copy(&pat.id); match def { Some(DefStatic(did, _)) => match lookup_const_by_id(self.tcx, did) { - Some(const_expr) => box (GC) Pat { - span: pat.span, - ..(*const_expr_to_pat(self.tcx, const_expr)).clone() - }, + Some(const_expr) => { + const_expr_to_pat(self.tcx, const_expr).map(|mut new_pat| { + new_pat.span = pat.span; + new_pat + }) + } None => { self.failed = true; span_err!(self.tcx.sess, pat.span, E0158, @@ -359,9 +367,11 @@ impl<'a, 'tcx> Folder for StaticInliner<'a, 'tcx> { /// left_ty: struct X { a: (bool, &'static str), b: uint} /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 } fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor, - pats: Vec>, left_ty: ty::t) -> Gc { + pats: Vec<&Pat>, left_ty: ty::t) -> P { + let pats_len = pats.len(); + let mut pats = pats.move_iter().map(|p| P((*p).clone())); let pat = match ty::get(left_ty).sty { - ty::ty_tup(_) => PatTup(pats), + ty::ty_tup(_) => PatTup(pats.collect()), ty::ty_enum(cid, _) | ty::ty_struct(cid, _) => { let (vid, is_structure) = match ctor { @@ -374,16 +384,16 @@ fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor, if is_structure { let fields = ty::lookup_struct_fields(cx.tcx, vid); let field_pats: Vec = fields.move_iter() - .zip(pats.iter()) - .filter(|&(_, pat)| pat.node != PatWild(PatWildSingle)) + .zip(pats) + .filter(|&(_, ref pat)| pat.node != PatWild(PatWildSingle)) .map(|(field, pat)| FieldPat { ident: Ident::new(field.name), - pat: pat.clone() + pat: pat }).collect(); - let has_more_fields = field_pats.len() < pats.len(); + let has_more_fields = field_pats.len() < pats_len; PatStruct(def_to_path(cx.tcx, vid), field_pats, has_more_fields) } else { - PatEnum(def_to_path(cx.tcx, vid), Some(pats)) + PatEnum(def_to_path(cx.tcx, vid), Some(pats.collect())) } } @@ -391,35 +401,35 @@ fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor, match ty::get(ty).sty { ty::ty_vec(_, Some(n)) => match ctor { &Single => { - assert_eq!(pats.len(), n); - PatVec(pats, None, vec!()) + assert_eq!(pats_len, n); + PatVec(pats.collect(), None, vec!()) }, _ => unreachable!() }, ty::ty_vec(_, None) => match ctor { &Slice(n) => { - assert_eq!(pats.len(), n); - PatVec(pats, None, vec!()) + assert_eq!(pats_len, n); + PatVec(pats.collect(), None, vec!()) }, _ => unreachable!() }, ty::ty_str => PatWild(PatWildSingle), _ => { - assert_eq!(pats.len(), 1); - PatRegion(pats.get(0).clone()) + assert_eq!(pats_len, 1); + PatRegion(pats.nth(0).unwrap()) } } } ty::ty_box(_) => { - assert_eq!(pats.len(), 1); - PatBox(pats.get(0).clone()) + assert_eq!(pats_len, 1); + PatBox(pats.nth(0).unwrap()) } ty::ty_vec(_, Some(len)) => { - assert_eq!(pats.len(), len); - PatVec(pats, None, vec!()) + assert_eq!(pats_len, len); + PatVec(pats.collect(), None, vec![]) } _ => { @@ -430,11 +440,11 @@ fn construct_witness(cx: &MatchCheckCtxt, ctor: &Constructor, } }; - box (GC) Pat { + P(Pat { id: 0, node: pat, span: DUMMY_SP - } + }) } fn missing_constructor(cx: &MatchCheckCtxt, &Matrix(ref rows): &Matrix, @@ -492,7 +502,7 @@ fn all_constructors(cx: &MatchCheckCtxt, left_ty: ty::t, // So it assumes that v is non-empty. fn is_useful(cx: &MatchCheckCtxt, matrix: &Matrix, - v: &[Gc], + v: &[&Pat], witness: WitnessPreference) -> Usefulness { let &Matrix(ref rows) = matrix; @@ -506,12 +516,12 @@ fn is_useful(cx: &MatchCheckCtxt, if rows.get(0).len() == 0u { return NotUseful; } - let real_pat = match rows.iter().find(|r| r.get(0).id != 0) { + let real_pat = match rows.iter().find(|r| r.get(0).id != DUMMY_NODE_ID) { Some(r) => raw_pat(*r.get(0)), None if v.len() == 0 => return NotUseful, None => v[0] }; - let left_ty = if real_pat.id == 0 { + let left_ty = if real_pat.id == DUMMY_NODE_ID { ty::mk_nil() } else { ty::pat_ty(cx.tcx, &*real_pat) @@ -530,14 +540,13 @@ fn is_useful(cx: &MatchCheckCtxt, match is_useful_specialized(cx, matrix, v, c.clone(), left_ty, witness) { UsefulWithWitness(pats) => UsefulWithWitness({ let arity = constructor_arity(cx, &c, left_ty); - let subpats = { + let mut result = { let pat_slice = pats.as_slice(); - Vec::from_fn(arity, |i| { - pat_slice.get(i).map(|p| p.clone()) - .unwrap_or_else(|| wild()) - }) + let subpats = Vec::from_fn(arity, |i| { + pat_slice.get(i).map_or(&DUMMY_WILD_PAT, |p| &**p) + }); + vec![construct_witness(cx, &c, subpats, left_ty)] }; - let mut result = vec!(construct_witness(cx, &c, subpats, left_ty)); result.extend(pats.move_iter().skip(arity)); result }), @@ -547,13 +556,21 @@ fn is_useful(cx: &MatchCheckCtxt, }, Some(constructor) => { - let matrix = rows.iter().filter_map(|r| default(cx, r.as_slice())).collect(); + let matrix = rows.iter().filter_map(|r| { + if pat_is_binding_or_wild(&cx.tcx.def_map, raw_pat(r[0])) { + Some(Vec::from_slice(r.tail())) + } else { + None + } + }).collect(); match is_useful(cx, &matrix, v.tail(), witness) { UsefulWithWitness(pats) => { let arity = constructor_arity(cx, &constructor, left_ty); - let wild_pats = Vec::from_elem(arity, wild()); + let wild_pats = Vec::from_elem(arity, &DUMMY_WILD_PAT); let enum_pat = construct_witness(cx, &constructor, wild_pats, left_ty); - UsefulWithWitness(vec!(enum_pat).append(pats.as_slice())) + let mut new_pats = vec![enum_pat]; + new_pats.extend(pats.move_iter()); + UsefulWithWitness(new_pats) }, result => result } @@ -566,8 +583,9 @@ fn is_useful(cx: &MatchCheckCtxt, } } -fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, v: &[Gc], - ctor: Constructor, lty: ty::t, witness: WitnessPreference) -> Usefulness { +fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, + v: &[&Pat], ctor: Constructor, lty: ty::t, + witness: WitnessPreference) -> Usefulness { let arity = constructor_arity(cx, &ctor, lty); let matrix = Matrix(m.iter().filter_map(|r| { specialize(cx, r.as_slice(), &ctor, 0u, arity) @@ -587,7 +605,7 @@ fn is_useful_specialized(cx: &MatchCheckCtxt, &Matrix(ref m): &Matrix, v: &[Gc

, +fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat, left_ty: ty::t, max_slice_length: uint) -> Vec { let pat = raw_pat(p); match pat.node { @@ -613,10 +631,10 @@ fn pat_constructors(cx: &MatchCheckCtxt, p: Gc, Some(&DefVariant(_, id, _)) => vec!(Variant(id)), _ => vec!(Single) }, - PatLit(expr) => - vec!(ConstantValue(eval_const_expr(cx.tcx, &*expr))), - PatRange(lo, hi) => - vec!(ConstantRange(eval_const_expr(cx.tcx, &*lo), eval_const_expr(cx.tcx, &*hi))), + PatLit(ref expr) => + vec!(ConstantValue(eval_const_expr(cx.tcx, &**expr))), + PatRange(ref lo, ref hi) => + vec!(ConstantRange(eval_const_expr(cx.tcx, &**lo), eval_const_expr(cx.tcx, &**hi))), PatVec(ref before, ref slice, ref after) => match ty::get(left_ty).sty { ty::ty_vec(_, Some(_)) => vec!(Single), @@ -691,14 +709,15 @@ fn range_covered_by_constructor(ctor: &Constructor, /// different patterns. /// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing /// fields filled with wild patterns. -pub fn specialize(cx: &MatchCheckCtxt, r: &[Gc], - constructor: &Constructor, col: uint, arity: uint) -> Option>> { +pub fn specialize<'a>(cx: &MatchCheckCtxt, r: &[&'a Pat], + constructor: &Constructor, col: uint, arity: uint) -> Option> { let &Pat { id: pat_id, node: ref node, span: pat_span - } = &(*raw_pat(r[col])); - let head: Option>> = match node { + } = raw_pat(r[col]); + let head: Option> = match node { + &PatWild(_) => - Some(Vec::from_elem(arity, wild())), + Some(Vec::from_elem(arity, &DUMMY_WILD_PAT)), &PatIdent(_, _, _) => { let opt_def = cx.tcx.def_map.borrow().find_copy(&pat_id); @@ -710,7 +729,7 @@ pub fn specialize(cx: &MatchCheckCtxt, r: &[Gc], } else { None }, - _ => Some(Vec::from_elem(arity, wild())) + _ => Some(Vec::from_elem(arity, &DUMMY_WILD_PAT)) } } @@ -722,8 +741,8 @@ pub fn specialize(cx: &MatchCheckCtxt, r: &[Gc], DefVariant(_, id, _) if *constructor != Variant(id) => None, DefVariant(..) | DefFn(..) | DefStruct(..) => { Some(match args { - &Some(ref args) => args.clone(), - &None => Vec::from_elem(arity, wild()) + &Some(ref args) => args.iter().map(|p| &**p).collect(), + &None => Vec::from_elem(arity, &DUMMY_WILD_PAT) }) } _ => None @@ -757,8 +776,8 @@ pub fn specialize(cx: &MatchCheckCtxt, r: &[Gc], let struct_fields = ty::lookup_struct_fields(cx.tcx, variant_id); let args = struct_fields.iter().map(|sf| { match pattern_fields.iter().find(|f| f.ident.name == sf.name) { - Some(f) => f.pat, - _ => wild() + Some(ref f) => &*f.pat, + _ => &DUMMY_WILD_PAT } }).collect(); args @@ -766,15 +785,15 @@ pub fn specialize(cx: &MatchCheckCtxt, r: &[Gc], } &PatTup(ref args) => - Some(args.clone()), + Some(args.iter().map(|p| &**p).collect()), &PatBox(ref inner) | &PatRegion(ref inner) => - Some(vec!(inner.clone())), + Some(vec![&**inner]), &PatLit(ref expr) => { let expr_value = eval_const_expr(cx.tcx, &**expr); match range_covered_by_constructor(constructor, &expr_value, &expr_value) { - Some(true) => Some(vec!()), + Some(true) => Some(vec![]), Some(false) => None, None => { cx.tcx.sess.span_err(pat_span, "mismatched types between arms"); @@ -787,7 +806,7 @@ pub fn specialize(cx: &MatchCheckCtxt, r: &[Gc], let from_value = eval_const_expr(cx.tcx, &**from); let to_value = eval_const_expr(cx.tcx, &**to); match range_covered_by_constructor(constructor, &from_value, &to_value) { - Some(true) => Some(vec!()), + Some(true) => Some(vec![]), Some(false) => None, None => { cx.tcx.sess.span_err(pat_span, "mismatched types between arms"); @@ -800,28 +819,28 @@ pub fn specialize(cx: &MatchCheckCtxt, r: &[Gc], match *constructor { // Fixed-length vectors. Single => { - let mut pats = before.clone(); - pats.grow_fn(arity - before.len() - after.len(), |_| wild()); - pats.push_all(after.as_slice()); + let mut pats: Vec<&Pat> = before.iter().map(|p| &**p).collect(); + pats.grow_fn(arity - before.len() - after.len(), |_| &DUMMY_WILD_PAT); + pats.extend(after.iter().map(|p| &**p)); Some(pats) }, Slice(length) if before.len() + after.len() <= length && slice.is_some() => { - let mut pats = before.clone(); - pats.grow_fn(arity - before.len() - after.len(), |_| wild()); - pats.push_all(after.as_slice()); + let mut pats: Vec<&Pat> = before.iter().map(|p| &**p).collect(); + pats.grow_fn(arity - before.len() - after.len(), |_| &DUMMY_WILD_PAT); + pats.extend(after.iter().map(|p| &**p)); Some(pats) }, Slice(length) if before.len() + after.len() == length => { - let mut pats = before.clone(); - pats.push_all(after.as_slice()); + let mut pats: Vec<&Pat> = before.iter().map(|p| &**p).collect(); + pats.extend(after.iter().map(|p| &**p)); Some(pats) }, SliceWithSubslice(prefix, suffix) if before.len() == prefix && after.len() == suffix && slice.is_some() => { - let mut pats = before.clone(); - pats.push_all(after.as_slice()); + let mut pats: Vec<&Pat> = before.iter().map(|p| &**p).collect(); + pats.extend(after.iter().map(|p| &**p)); Some(pats) } _ => None @@ -836,14 +855,6 @@ pub fn specialize(cx: &MatchCheckCtxt, r: &[Gc], head.map(|head| head.append(r.slice_to(col)).append(r.slice_from(col + 1))) } -fn default(cx: &MatchCheckCtxt, r: &[Gc]) -> Option>> { - if pat_is_binding_or_wild(&cx.tcx.def_map, &*raw_pat(r[0])) { - Some(Vec::from_slice(r.tail())) - } else { - None - } -} - fn check_local(cx: &mut MatchCheckCtxt, loc: &Local) { visit::walk_local(cx, loc); @@ -853,18 +864,15 @@ fn check_local(cx: &mut MatchCheckCtxt, loc: &Local) { }; let mut static_inliner = StaticInliner::new(cx.tcx); - match is_refutable(cx, static_inliner.fold_pat(loc.pat)) { - Some(pat) => { - span_err!(cx.tcx.sess, loc.pat.span, E0005, - "refutable pattern in {} binding: `{}` not covered", - name, pat_to_string(&*pat) - ); - }, - None => () - } + is_refutable(cx, &*static_inliner.fold_pat(loc.pat.clone()), |pat| { + span_err!(cx.tcx.sess, loc.pat.span, E0005, + "refutable pattern in {} binding: `{}` not covered", + name, pat_to_string(pat) + ); + }); // Check legality of move bindings and `@` patterns. - check_legality_of_move_bindings(cx, false, [ loc.pat ]); + check_legality_of_move_bindings(cx, false, slice::ref_slice(&loc.pat)); check_legality_of_bindings_in_at_patterns(cx, &*loc.pat); } @@ -875,26 +883,23 @@ fn check_fn(cx: &mut MatchCheckCtxt, sp: Span) { visit::walk_fn(cx, kind, decl, body, sp); for input in decl.inputs.iter() { - match is_refutable(cx, input.pat) { - Some(pat) => { - span_err!(cx.tcx.sess, input.pat.span, E0006, - "refutable pattern in function argument: `{}` not covered", - pat_to_string(&*pat) - ); - }, - None => () - } - check_legality_of_move_bindings(cx, false, [input.pat]); + is_refutable(cx, &*input.pat, |pat| { + span_err!(cx.tcx.sess, input.pat.span, E0006, + "refutable pattern in function argument: `{}` not covered", + pat_to_string(pat) + ); + }); + check_legality_of_move_bindings(cx, false, slice::ref_slice(&input.pat)); check_legality_of_bindings_in_at_patterns(cx, &*input.pat); } } -fn is_refutable(cx: &MatchCheckCtxt, pat: Gc) -> Option> { +fn is_refutable(cx: &MatchCheckCtxt, pat: &Pat, refutable: |&Pat| -> A) -> Option { let pats = Matrix(vec!(vec!(pat))); - match is_useful(cx, &pats, [wild()], ConstructWitness) { + match is_useful(cx, &pats, [&DUMMY_WILD_PAT], ConstructWitness) { UsefulWithWitness(pats) => { assert_eq!(pats.len(), 1); - Some(pats.get(0).clone()) + Some(refutable(&*pats[0])) }, NotUseful => None, Useful => unreachable!() @@ -904,7 +909,7 @@ fn is_refutable(cx: &MatchCheckCtxt, pat: Gc) -> Option> { // Legality of move bindings checking fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, has_guard: bool, - pats: &[Gc]) { + pats: &[P]) { let tcx = cx.tcx; let def_map = &tcx.def_map; let mut by_ref_span = None; @@ -920,7 +925,7 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, }) } - let check_move: |&Pat, Option>| = |p, sub| { + let check_move: |&Pat, Option<&Pat>| = |p, sub| { // check legality of moving out of the enum // x @ Foo(..) is legal, but x @ Foo(y) isn't. @@ -939,10 +944,10 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, walk_pat(&**pat, |p| { if pat_is_binding(def_map, &*p) { match p.node { - PatIdent(BindByValue(_), _, sub) => { + PatIdent(BindByValue(_), _, ref sub) => { let pat_ty = ty::node_id_to_type(tcx, p.id); if ty::type_moves_by_default(tcx, pat_ty) { - check_move(p, sub); + check_move(p, sub.as_ref().map(|p| &**p)); } } PatIdent(BindByRef(_), _, _) => { diff --git a/src/librustc/middle/check_static.rs b/src/librustc/middle/check_static.rs index 46896b8811f00..7a11090a8eecb 100644 --- a/src/librustc/middle/check_static.rs +++ b/src/librustc/middle/check_static.rs @@ -56,8 +56,9 @@ struct CheckStaticVisitor<'a, 'tcx: 'a> { in_const: bool } -pub fn check_crate(tcx: &ty::ctxt, krate: &ast::Crate) { - visit::walk_crate(&mut CheckStaticVisitor { tcx: tcx, in_const: false }, krate) +pub fn check_crate(tcx: &ty::ctxt) { + visit::walk_crate(&mut CheckStaticVisitor { tcx: tcx, in_const: false }, + tcx.map.krate()) } impl<'a, 'tcx> CheckStaticVisitor<'a, 'tcx> { diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 23ab6f4585b1f..8c7c8eda2d278 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -22,12 +22,12 @@ use util::nodemap::{DefIdMap}; use syntax::ast::*; use syntax::parse::token::InternedString; +use syntax::ptr::P; use syntax::visit::Visitor; use syntax::visit; use syntax::{ast, ast_map, ast_util}; use std::rc::Rc; -use std::gc::{Gc, GC}; // // This pass classifies expressions by their constant-ness. @@ -83,7 +83,7 @@ pub fn join_all>(mut cs: It) -> constness { cs.fold(integral_const, |a, b| join(a, b)) } -pub fn lookup_const(tcx: &ty::ctxt, e: &Expr) -> Option> { +fn lookup_const<'a>(tcx: &'a ty::ctxt, e: &Expr) -> Option<&'a Expr> { let opt_def = tcx.def_map.borrow().find_copy(&e.id); match opt_def { Some(def::DefStatic(def_id, false)) => { @@ -96,83 +96,90 @@ pub fn lookup_const(tcx: &ty::ctxt, e: &Expr) -> Option> { } } -pub fn lookup_variant_by_id(tcx: &ty::ctxt, +fn lookup_variant_by_id<'a>(tcx: &'a ty::ctxt, enum_def: ast::DefId, variant_def: ast::DefId) - -> Option> { - fn variant_expr(variants: &[ast::P], - id: ast::NodeId) -> Option> { + -> Option<&'a Expr> { + fn variant_expr<'a>(variants: &'a [P], id: ast::NodeId) + -> Option<&'a Expr> { for variant in variants.iter() { if variant.node.id == id { - return variant.node.disr_expr; + return variant.node.disr_expr.as_ref().map(|e| &**e); } } None } if ast_util::is_local(enum_def) { - { - match tcx.map.find(enum_def.node) { - None => None, - Some(ast_map::NodeItem(it)) => match it.node { - ItemEnum(ast::EnumDef { variants: ref variants }, _) => { - variant_expr(variants.as_slice(), variant_def.node) - } - _ => None - }, - Some(_) => None - } + match tcx.map.find(enum_def.node) { + None => None, + Some(ast_map::NodeItem(it)) => match it.node { + ItemEnum(ast::EnumDef { variants: ref variants }, _) => { + variant_expr(variants.as_slice(), variant_def.node) + } + _ => None + }, + Some(_) => None } } else { match tcx.extern_const_variants.borrow().find(&variant_def) { - Some(&e) => return e, + Some(&ast::DUMMY_NODE_ID) => return None, + Some(&expr_id) => { + return Some(tcx.map.expect_expr(expr_id)); + } None => {} } - let e = match csearch::maybe_get_item_ast(tcx, enum_def, + let expr_id = match csearch::maybe_get_item_ast(tcx, enum_def, |a, b, c, d| astencode::decode_inlined_item(a, b, c, d)) { - csearch::found(ast::IIItem(item)) => match item.node { + csearch::found(&ast::IIItem(ref item)) => match item.node { ItemEnum(ast::EnumDef { variants: ref variants }, _) => { - variant_expr(variants.as_slice(), variant_def.node) + // NOTE this doesn't do the right thing, it compares inlined + // NodeId's to the original variant_def's NodeId, but they + // come from different crates, so they will likely never match. + variant_expr(variants.as_slice(), variant_def.node).map(|e| e.id) } _ => None }, _ => None }; - tcx.extern_const_variants.borrow_mut().insert(variant_def, e); - return e; + tcx.extern_const_variants.borrow_mut().insert(variant_def, + expr_id.unwrap_or(ast::DUMMY_NODE_ID)); + expr_id.map(|id| tcx.map.expect_expr(id)) } } -pub fn lookup_const_by_id(tcx: &ty::ctxt, def_id: ast::DefId) - -> Option> { +pub fn lookup_const_by_id<'a>(tcx: &'a ty::ctxt, def_id: ast::DefId) + -> Option<&'a Expr> { if ast_util::is_local(def_id) { - { - match tcx.map.find(def_id.node) { - None => None, - Some(ast_map::NodeItem(it)) => match it.node { - ItemStatic(_, ast::MutImmutable, const_expr) => { - Some(const_expr) - } - _ => None - }, - Some(_) => None - } + match tcx.map.find(def_id.node) { + None => None, + Some(ast_map::NodeItem(it)) => match it.node { + ItemStatic(_, ast::MutImmutable, ref const_expr) => { + Some(&**const_expr) + } + _ => None + }, + Some(_) => None } } else { match tcx.extern_const_statics.borrow().find(&def_id) { - Some(&e) => return e, + Some(&ast::DUMMY_NODE_ID) => return None, + Some(&expr_id) => { + return Some(tcx.map.expect_expr(expr_id)); + } None => {} } - let e = match csearch::maybe_get_item_ast(tcx, def_id, + let expr_id = match csearch::maybe_get_item_ast(tcx, def_id, |a, b, c, d| astencode::decode_inlined_item(a, b, c, d)) { - csearch::found(ast::IIItem(item)) => match item.node { - ItemStatic(_, ast::MutImmutable, const_expr) => Some(const_expr), + csearch::found(&ast::IIItem(ref item)) => match item.node { + ItemStatic(_, ast::MutImmutable, ref const_expr) => Some(const_expr.id), _ => None }, _ => None }; - tcx.extern_const_statics.borrow_mut().insert(def_id, e); - return e; + tcx.extern_const_statics.borrow_mut().insert(def_id, + expr_id.unwrap_or(ast::DUMMY_NODE_ID)); + expr_id.map(|id| tcx.map.expect_expr(id)) } } @@ -271,8 +278,8 @@ impl<'a, 'tcx> ConstEvalVisitor<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for ConstEvalVisitor<'a, 'tcx> { fn visit_ty(&mut self, t: &Ty) { match t.node { - TyFixedLengthVec(_, expr) => { - check::check_const_in_type(self.tcx, &*expr, ty::mk_uint()); + TyFixedLengthVec(_, ref expr) => { + check::check_const_in_type(self.tcx, &**expr, ty::mk_uint()); } _ => {} } @@ -285,13 +292,11 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstEvalVisitor<'a, 'tcx> { } } -pub fn process_crate(krate: &ast::Crate, - tcx: &ty::ctxt) { - let mut v = ConstEvalVisitor { +pub fn process_crate(tcx: &ty::ctxt) { + visit::walk_crate(&mut ConstEvalVisitor { tcx: tcx, ccache: DefIdMap::new(), - }; - visit::walk_crate(&mut v, krate); + }, tcx.map.krate()); tcx.sess.abort_if_errors(); } @@ -309,12 +314,12 @@ pub enum const_val { const_nil } -pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: Gc) -> Gc { +pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: &Expr) -> P { let pat = match expr.node { ExprTup(ref exprs) => - PatTup(exprs.iter().map(|&expr| const_expr_to_pat(tcx, expr)).collect()), + PatTup(exprs.iter().map(|expr| const_expr_to_pat(tcx, &**expr)).collect()), - ExprCall(callee, ref args) => { + ExprCall(ref callee, ref args) => { let def = tcx.def_map.borrow().get_copy(&callee.id); tcx.def_map.borrow_mut().find_or_insert(expr.id, def); let path = match def { @@ -322,20 +327,20 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: Gc) -> Gc { def::DefVariant(_, variant_did, _) => def_to_path(tcx, variant_did), _ => unreachable!() }; - let pats = args.iter().map(|&expr| const_expr_to_pat(tcx, expr)).collect(); + let pats = args.iter().map(|expr| const_expr_to_pat(tcx, &**expr)).collect(); PatEnum(path, Some(pats)) } ExprStruct(ref path, ref fields, None) => { let field_pats = fields.iter().map(|field| FieldPat { ident: field.ident.node, - pat: const_expr_to_pat(tcx, field.expr) + pat: const_expr_to_pat(tcx, &*field.expr) }).collect(); PatStruct(path.clone(), field_pats, false) } ExprVec(ref exprs) => { - let pats = exprs.iter().map(|&expr| const_expr_to_pat(tcx, expr)).collect(); + let pats = exprs.iter().map(|expr| const_expr_to_pat(tcx, &**expr)).collect(); PatVec(pats, None, vec![]) } @@ -347,7 +352,7 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: Gc) -> Gc { Some(def::DefVariant(..)) => PatEnum(path.clone(), None), _ => { - match lookup_const(tcx, &*expr) { + match lookup_const(tcx, expr) { Some(actual) => return const_expr_to_pat(tcx, actual), _ => unreachable!() } @@ -355,9 +360,9 @@ pub fn const_expr_to_pat(tcx: &ty::ctxt, expr: Gc) -> Gc { } } - _ => PatLit(expr) + _ => PatLit(P(expr.clone())) }; - box (GC) Pat { id: expr.id, node: pat, span: expr.span } + P(Pat { id: expr.id, node: pat, span: expr.span }) } pub fn eval_const_expr(tcx: &ty::ctxt, e: &Expr) -> const_val { diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 68bcd950f9ce7..46e3585912a7f 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -219,12 +219,12 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { } } ast_map::NodeTraitItem(trait_method) => { - visit::walk_trait_item(self, &*trait_method); + visit::walk_trait_item(self, trait_method); } ast_map::NodeImplItem(impl_item) => { match *impl_item { - ast::MethodImplItem(method) => { - visit::walk_block(self, &*method.pe_body()); + ast::MethodImplItem(ref method) => { + visit::walk_block(self, method.pe_body()); } } } @@ -338,7 +338,7 @@ impl<'v> Visitor<'v> for LifeSeeder { ast::ItemImpl(_, Some(ref _trait_ref), _, ref impl_items) => { for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(method) => { + ast::MethodImplItem(ref method) => { self.worklist.push(method.id); } } @@ -422,7 +422,7 @@ fn should_warn(item: &ast::Item) -> bool { fn get_struct_ctor_id(item: &ast::Item) -> Option { match item.node { - ast::ItemStruct(struct_def, _) => struct_def.ctor_id, + ast::ItemStruct(ref struct_def, _) => struct_def.ctor_id, _ => None } } @@ -551,8 +551,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for DeadVisitor<'a, 'tcx> { pub fn check_crate(tcx: &ty::ctxt, exported_items: &privacy::ExportedItems, - reachable_symbols: &NodeSet, - krate: &ast::Crate) { + reachable_symbols: &NodeSet) { + let krate = tcx.map.krate(); let live_symbols = find_live(tcx, exported_items, reachable_symbols, krate); let mut visitor = DeadVisitor { tcx: tcx, live_symbols: live_symbols }; diff --git a/src/librustc/middle/effect.rs b/src/librustc/middle/effect.rs index db9eb90b6ec02..b492203b3521e 100644 --- a/src/librustc/middle/effect.rs +++ b/src/librustc/middle/effect.rs @@ -64,7 +64,7 @@ impl<'a, 'tcx> EffectCheckVisitor<'a, 'tcx> { fn check_str_index(&mut self, e: &ast::Expr) { let base_type = match e.node { - ast::ExprIndex(base, _) => ty::node_id_to_type(self.tcx, base.id), + ast::ExprIndex(ref base, _) => ty::node_id_to_type(self.tcx, base.id), _ => return }; debug!("effect: checking index with base type {}", @@ -153,7 +153,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> { "invocation of unsafe method") } } - ast::ExprCall(base, _) => { + ast::ExprCall(ref base, _) => { let base_type = ty::node_id_to_type(self.tcx, base.id); debug!("effect: call case, base type is {}", ppaux::ty_to_string(self.tcx, base_type)); @@ -161,7 +161,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> { self.require_unsafe(expr.span, "call to unsafe function") } } - ast::ExprUnary(ast::UnDeref, base) => { + ast::ExprUnary(ast::UnDeref, ref base) => { let base_type = ty::node_id_to_type(self.tcx, base.id); debug!("effect: unary case, base type is {}", ppaux::ty_to_string(self.tcx, base_type)); @@ -197,11 +197,11 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> { } } -pub fn check_crate(tcx: &ty::ctxt, krate: &ast::Crate) { +pub fn check_crate(tcx: &ty::ctxt) { let mut visitor = EffectCheckVisitor { tcx: tcx, unsafe_context: SafeContext, }; - visit::walk_crate(&mut visitor, krate); + visit::walk_crate(&mut visitor, tcx.map.krate()); } diff --git a/src/librustc/middle/entry.rs b/src/librustc/middle/entry.rs index 2b96eb717ea86..01854564c0572 100644 --- a/src/librustc/middle/entry.rs +++ b/src/librustc/middle/entry.rs @@ -11,7 +11,7 @@ use driver::config; use driver::session::Session; -use syntax::ast::{Crate, Name, NodeId, Item, ItemFn}; +use syntax::ast::{Name, NodeId, Item, ItemFn}; use syntax::ast_map; use syntax::attr; use syntax::codemap::Span; @@ -19,10 +19,10 @@ use syntax::parse::token; use syntax::visit; use syntax::visit::Visitor; -struct EntryContext<'a> { +struct EntryContext<'a, 'ast: 'a> { session: &'a Session, - ast_map: &'a ast_map::Map, + ast_map: &'a ast_map::Map<'ast>, // The interned Name for "main". main_name: Name, @@ -41,13 +41,13 @@ struct EntryContext<'a> { non_main_fns: Vec<(NodeId, Span)> , } -impl<'a, 'v> Visitor<'v> for EntryContext<'a> { +impl<'a, 'ast, 'v> Visitor<'v> for EntryContext<'a, 'ast> { fn visit_item(&mut self, item: &Item) { find_item(item, self); } } -pub fn find_entry_point(session: &Session, krate: &Crate, ast_map: &ast_map::Map) { +pub fn find_entry_point(session: &Session, ast_map: &ast_map::Map) { let any_exe = session.crate_types.borrow().iter().any(|ty| { *ty == config::CrateTypeExecutable }); @@ -57,7 +57,7 @@ pub fn find_entry_point(session: &Session, krate: &Crate, ast_map: &ast_map::Map } // If the user wants no main function at all, then stop here. - if attr::contains_name(krate.attrs.as_slice(), "no_main") { + if attr::contains_name(ast_map.krate().attrs.as_slice(), "no_main") { session.entry_type.set(Some(config::EntryNone)); return } @@ -72,7 +72,7 @@ pub fn find_entry_point(session: &Session, krate: &Crate, ast_map: &ast_map::Map non_main_fns: Vec::new(), }; - visit::walk_crate(&mut ctxt, krate); + visit::walk_crate(&mut ctxt, ast_map.krate()); configure_main(&mut ctxt); } diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index d2362b7e9429b..1e79ea68eebac 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -24,8 +24,8 @@ use middle::typeck::{MethodStatic, MethodStaticUnboxedClosure}; use middle::typeck; use util::ppaux::Repr; -use std::gc::Gc; use syntax::ast; +use syntax::ptr::P; use syntax::codemap::Span; /////////////////////////////////////////////////////////////////////////// @@ -242,7 +242,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { ty::ReScope(body.id), // Args live only as long as the fn body. arg_ty); - self.walk_pat(arg_cmt, arg.pat.clone()); + self.walk_pat(arg_cmt, &*arg.pat); } } @@ -258,7 +258,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { self.delegate.consume(consume_id, consume_span, cmt, mode); } - fn consume_exprs(&mut self, exprs: &Vec>) { + fn consume_exprs(&mut self, exprs: &Vec>) { for expr in exprs.iter() { self.consume_expr(&**expr); } @@ -315,7 +315,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { ast::ExprPath(..) => { } ast::ExprUnary(ast::UnDeref, ref base) => { // *base - if !self.walk_overloaded_operator(expr, &**base, []) { + if !self.walk_overloaded_operator(expr, &**base, None) { self.select_from_expr(&**base); } } @@ -328,8 +328,8 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { self.select_from_expr(&**base); } - ast::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs] - if !self.walk_overloaded_operator(expr, &**lhs, [rhs.clone()]) { + ast::ExprIndex(ref lhs, ref rhs) => { // lhs[rhs] + if !self.walk_overloaded_operator(expr, &**lhs, Some(&**rhs)) { self.select_from_expr(&**lhs); self.consume_expr(&**rhs); } @@ -345,7 +345,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { } ast::ExprStruct(_, ref fields, ref opt_with) => { - self.walk_struct_expr(expr, fields, opt_with.clone()); + self.walk_struct_expr(expr, fields, opt_with); } ast::ExprTup(ref exprs) => { @@ -423,19 +423,19 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { pat.span, ty::ReScope(blk.id), pattern_type); - self.walk_pat(pat_cmt, pat.clone()); + self.walk_pat(pat_cmt, &**pat); self.walk_block(&**blk); } ast::ExprUnary(_, ref lhs) => { - if !self.walk_overloaded_operator(expr, &**lhs, []) { + if !self.walk_overloaded_operator(expr, &**lhs, None) { self.consume_expr(&**lhs); } } ast::ExprBinary(_, ref lhs, ref rhs) => { - if !self.walk_overloaded_operator(expr, &**lhs, [rhs.clone()]) { + if !self.walk_overloaded_operator(expr, &**lhs, Some(&**rhs)) { self.consume_expr(&**lhs); self.consume_expr(&**rhs); } @@ -554,7 +554,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { ast::StmtDecl(ref decl, _) => { match decl.node { ast::DeclLocal(ref local) => { - self.walk_local(local.clone()); + self.walk_local(&**local); } ast::DeclItem(_) => { @@ -575,7 +575,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { } } - fn walk_local(&mut self, local: Gc) { + fn walk_local(&mut self, local: &ast::Local) { match local.init { None => { let delegate = &mut self.delegate; @@ -592,7 +592,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { // `walk_pat`: self.walk_expr(&**expr); let init_cmt = return_if_err!(self.mc.cat_expr(&**expr)); - self.walk_pat(init_cmt, local.pat); + self.walk_pat(init_cmt, &*local.pat); } } } @@ -617,14 +617,14 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { fn walk_struct_expr(&mut self, _expr: &ast::Expr, fields: &Vec, - opt_with: Option>) { + opt_with: &Option>) { // Consume the expressions supplying values for each field. for field in fields.iter() { self.consume_expr(&*field.expr); } - let with_expr = match opt_with { - Some(ref w) => { w.clone() } + let with_expr = match *opt_with { + Some(ref w) => &**w, None => { return; } }; @@ -773,7 +773,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { fn walk_overloaded_operator(&mut self, expr: &ast::Expr, receiver: &ast::Expr, - args: &[Gc]) + rhs: Option<&ast::Expr>) -> bool { if !self.typer.is_method_call(expr.id) { @@ -789,15 +789,15 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { let r = ty::ReScope(expr.id); let bk = ty::ImmBorrow; - for arg in args.iter() { - self.borrow_expr(&**arg, r, bk, OverloadedOperator); + for &arg in rhs.iter() { + self.borrow_expr(arg, r, bk, OverloadedOperator); } return true; } fn walk_arm(&mut self, discr_cmt: mc::cmt, arm: &ast::Arm) { - for &pat in arm.pats.iter() { - self.walk_pat(discr_cmt.clone(), pat); + for pat in arm.pats.iter() { + self.walk_pat(discr_cmt.clone(), &**pat); } for guard in arm.guard.iter() { @@ -807,7 +807,7 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { self.consume_expr(&*arm.body); } - fn walk_pat(&mut self, cmt_discr: mc::cmt, pat: Gc) { + fn walk_pat(&mut self, cmt_discr: mc::cmt, pat: &ast::Pat) { debug!("walk_pat cmt_discr={} pat={}", cmt_discr.repr(self.tcx()), pat.repr(self.tcx())); let mc = &self.mc; @@ -859,14 +859,14 @@ impl<'d,'t,'tcx,TYPER:mc::Typer<'tcx>> ExprUseVisitor<'d,'t,TYPER> { } } else { match pat.node { - ast::PatVec(_, Some(slice_pat), _) => { + ast::PatVec(_, Some(ref slice_pat), _) => { // The `slice_pat` here creates a slice into // the original vector. This is effectively a // borrow of the elements of the vector being // matched. let (slice_cmt, slice_mutbl, slice_r) = { - match mc.cat_slice_pattern(cmt_pat, &*slice_pat) { + match mc.cat_slice_pattern(cmt_pat, &**slice_pat) { Ok(v) => v, Err(()) => { tcx.sess.span_bug(slice_pat.span, diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index 76ade1a7504eb..9d0d21d6d2af6 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -148,11 +148,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for IntrinsicCheckingVisitor<'a, 'tcx> { } } -pub fn check_crate(tcx: &ctxt, krate: &ast::Crate) { - let mut visitor = IntrinsicCheckingVisitor { - tcx: tcx, - }; - - visit::walk_crate(&mut visitor, krate); +pub fn check_crate(tcx: &ctxt) { + visit::walk_crate(&mut IntrinsicCheckingVisitor { tcx: tcx }, + tcx.map.krate()); } diff --git a/src/librustc/middle/kind.rs b/src/librustc/middle/kind.rs index 1e398ce210b57..33f7680d873c3 100644 --- a/src/librustc/middle/kind.rs +++ b/src/librustc/middle/kind.rs @@ -83,14 +83,13 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Context<'a, 'tcx> { } } -pub fn check_crate(tcx: &ty::ctxt, - krate: &Crate) { +pub fn check_crate(tcx: &ty::ctxt) { let mut ctx = Context { tcx: tcx, struct_and_enum_bounds_checked: HashSet::new(), parameter_environments: Vec::new(), }; - visit::walk_crate(&mut ctx, krate); + visit::walk_crate(&mut ctx, tcx.map.krate()); tcx.sess.abort_if_errors(); } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 18f3de82280ef..aecfa36c108e0 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -111,7 +111,6 @@ use lint; use util::nodemap::NodeMap; use std::fmt; -use std::gc::Gc; use std::io; use std::mem::transmute; use std::rc::Rc; @@ -122,16 +121,16 @@ use syntax::codemap::{BytePos, original_sp, Span}; use syntax::parse::token::special_idents; use syntax::parse::token; use syntax::print::pprust::{expr_to_string, block_to_string}; +use syntax::ptr::P; use syntax::{visit, ast_util}; use syntax::visit::{Visitor, FnKind}; /// For use with `propagate_through_loop`. -#[deriving(PartialEq, Eq)] -enum LoopKind { +enum LoopKind<'a> { /// An endless `loop` loop. LoopLoop, /// A `while` loop, with the given expression as condition. - WhileLoop(Gc), + WhileLoop(&'a Expr), /// A `for` loop. ForLoop, } @@ -189,9 +188,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for IrMaps<'a, 'tcx> { fn visit_arm(&mut self, a: &Arm) { visit_arm(self, a); } } -pub fn check_crate(tcx: &ty::ctxt, - krate: &Crate) { - visit::walk_crate(&mut IrMaps::new(tcx), krate); +pub fn check_crate(tcx: &ty::ctxt) { + visit::walk_crate(&mut IrMaps::new(tcx), tcx.map.krate()); tcx.sess.abort_if_errors(); } @@ -617,25 +615,25 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } fn arm_pats_bindings(&mut self, - pats: &[Gc], + pat: Option<&Pat>, f: |&mut Liveness<'a, 'tcx>, LiveNode, Variable, Span, NodeId|) { - // only consider the first pattern; any later patterns must have - // the same bindings, and we also consider the first pattern to be - // the "authoritative" set of ids - if !pats.is_empty() { - self.pat_bindings(&*pats[0], f) + match pat { + Some(pat) => { + self.pat_bindings(pat, f); + } + None => {} } } - fn define_bindings_in_pat(&mut self, pat: Gc, succ: LiveNode) + fn define_bindings_in_pat(&mut self, pat: &Pat, succ: LiveNode) -> LiveNode { - self.define_bindings_in_arm_pats([pat], succ) + self.define_bindings_in_arm_pats(Some(pat), succ) } - fn define_bindings_in_arm_pats(&mut self, pats: &[Gc], succ: LiveNode) + fn define_bindings_in_arm_pats(&mut self, pat: Option<&Pat>, succ: LiveNode) -> LiveNode { let mut succ = succ; - self.arm_pats_bindings(pats, |this, ln, var, _sp, _id| { + self.arm_pats_bindings(pat, |this, ln, var, _sp, _id| { this.init_from_succ(ln, succ); this.define(ln, var); succ = ln; @@ -882,7 +880,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { fn propagate_through_block(&mut self, blk: &Block, succ: LiveNode) -> LiveNode { - let succ = self.propagate_through_opt_expr(blk.expr, succ); + let succ = self.propagate_through_opt_expr(blk.expr.as_ref().map(|e| &**e), succ); blk.stmts.iter().rev().fold(succ, |succ, stmt| { self.propagate_through_stmt(&**stmt, succ) }) @@ -931,11 +929,11 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // initialization, which is mildly more complex than checking // once at the func header but otherwise equivalent. - let succ = self.propagate_through_opt_expr(local.init, succ); - self.define_bindings_in_pat(local.pat, succ) + let succ = self.propagate_through_opt_expr(local.init.as_ref().map(|e| &**e), succ); + self.define_bindings_in_pat(&*local.pat, succ) } - fn propagate_through_exprs(&mut self, exprs: &[Gc], succ: LiveNode) + fn propagate_through_exprs(&mut self, exprs: &[P], succ: LiveNode) -> LiveNode { exprs.iter().rev().fold(succ, |succ, expr| { self.propagate_through_expr(&**expr, succ) @@ -943,7 +941,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } fn propagate_through_opt_expr(&mut self, - opt_expr: Option>, + opt_expr: Option<&Expr>, succ: LiveNode) -> LiveNode { opt_expr.iter().fold(succ, |succ, expr| { @@ -1014,7 +1012,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // v v // ( succ ) // - let else_ln = self.propagate_through_opt_expr(els.clone(), succ); + let else_ln = self.propagate_through_opt_expr(els.as_ref().map(|e| &**e), succ); let then_ln = self.propagate_through_block(&**then, succ); let ln = self.live_node(expr.id, expr.span); self.init_from_succ(ln, else_ln); @@ -1023,10 +1021,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } ExprWhile(ref cond, ref blk, _) => { - self.propagate_through_loop(expr, - WhileLoop(cond.clone()), - &**blk, - succ) + self.propagate_through_loop(expr, WhileLoop(&**cond), &**blk, succ) } ExprForLoop(_, ref head, ref blk, _) => { @@ -1062,9 +1057,12 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let body_succ = self.propagate_through_expr(&*arm.body, succ); let guard_succ = - self.propagate_through_opt_expr(arm.guard, body_succ); + self.propagate_through_opt_expr(arm.guard.as_ref().map(|e| &**e), body_succ); + // only consider the first pattern; any later patterns must have + // the same bindings, and we also consider the first pattern to be + // the "authoritative" set of ids let arm_succ = - self.define_bindings_in_arm_pats(arm.pats.as_slice(), + self.define_bindings_in_arm_pats(arm.pats.as_slice().head().map(|p| &**p), guard_succ); self.merge_from_succ(ln, arm_succ, first_merge); first_merge = false; @@ -1072,10 +1070,10 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.propagate_through_expr(&**e, ln) } - ExprRet(o_e) => { + ExprRet(ref o_e) => { // ignore succ and subst exit_ln: let exit_ln = self.s.exit_ln; - self.propagate_through_opt_expr(o_e, exit_ln) + self.propagate_through_opt_expr(o_e.as_ref().map(|e| &**e), exit_ln) } ExprBreak(opt_label) => { @@ -1134,7 +1132,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { } ExprStruct(_, ref fields, ref with_expr) => { - let succ = self.propagate_through_opt_expr(with_expr.clone(), succ); + let succ = self.propagate_through_opt_expr(with_expr.as_ref().map(|e| &**e), succ); fields.iter().rev().fold(succ, |succ, field| { self.propagate_through_expr(&*field.expr, succ) }) @@ -1182,7 +1180,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { ExprIndex(ref l, ref r) | ExprBinary(_, ref l, ref r) | ExprBox(ref l, ref r) => { - self.propagate_through_exprs([l.clone(), r.clone()], succ) + let r_succ = self.propagate_through_expr(&**r, succ); + self.propagate_through_expr(&**l, r_succ) } ExprAddrOf(_, ref e) | @@ -1342,12 +1341,15 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let mut first_merge = true; let ln = self.live_node(expr.id, expr.span); self.init_empty(ln, succ); - if kind != LoopLoop { - // If this is not a `loop` loop, then it's possible we bypass - // the body altogether. Otherwise, the only way is via a `break` - // in the loop body. - self.merge_from_succ(ln, succ, first_merge); - first_merge = false; + match kind { + LoopLoop => {} + _ => { + // If this is not a `loop` loop, then it's possible we bypass + // the body altogether. Otherwise, the only way is via a `break` + // in the loop body. + self.merge_from_succ(ln, succ, first_merge); + first_merge = false; + } } debug!("propagate_through_loop: using id for loop body {} {}", expr.id, block_to_string(body)); @@ -1413,7 +1415,10 @@ fn check_local(this: &mut Liveness, local: &Local) { } fn check_arm(this: &mut Liveness, arm: &Arm) { - this.arm_pats_bindings(arm.pats.as_slice(), |this, ln, var, sp, id| { + // only consider the first pattern; any later patterns must have + // the same bindings, and we also consider the first pattern to be + // the "authoritative" set of ids + this.arm_pats_bindings(arm.pats.as_slice().head().map(|p| &**p), |this, ln, var, sp, id| { this.warn_about_unused(sp, id, ln, var); }); visit::walk_arm(this, arm); diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 0d3dd8f91d967..3e42ee9187c42 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -490,7 +490,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { } ast::ExprPath(_) => { - let def = self.tcx().def_map.borrow().get_copy(&expr.id); + let def = *self.tcx().def_map.borrow().get(&expr.id); self.cat_def(expr.id, expr.span, expr_ty, def) } @@ -1154,7 +1154,7 @@ impl<'t,'tcx,TYPER:Typer<'tcx>> MemCategorizationContext<'t,TYPER> { if_ok!(self.cat_pattern(subcmt, &**subpat, op)); } - ast::PatVec(ref before, slice, ref after) => { + ast::PatVec(ref before, ref slice, ref after) => { let elt_cmt = self.cat_index(pat, self.deref_vec(pat, cmt)); for before_pat in before.iter() { if_ok!(self.cat_pattern(elt_cmt.clone(), &**before_pat, diff --git a/src/librustc/middle/pat_util.rs b/src/librustc/middle/pat_util.rs index 727f5ad9385a6..ac1a62b185231 100644 --- a/src/librustc/middle/pat_util.rs +++ b/src/librustc/middle/pat_util.rs @@ -13,7 +13,6 @@ use middle::resolve; use middle::ty; use std::collections::HashMap; -use std::gc::{Gc, GC}; use syntax::ast::*; use syntax::ast_util::{walk_pat}; use syntax::codemap::{Span, DUMMY_SP}; @@ -115,17 +114,6 @@ pub fn simple_identifier<'a>(pat: &'a Pat) -> Option<&'a Ident> { } } -pub fn wild() -> Gc { - box (GC) Pat { id: 0, node: PatWild(PatWildSingle), span: DUMMY_SP } -} - -pub fn raw_pat(p: Gc) -> Gc { - match p.node { - PatIdent(_, _, Some(s)) => { raw_pat(s) } - _ => { p } - } -} - pub fn def_to_path(tcx: &ty::ctxt, id: DefId) -> Path { ty::with_path(tcx, id, |mut path| Path { global: false, diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index da957024b9a2e..feacbf84f6739 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -12,7 +12,6 @@ //! outside their scopes. This pass will also generate a set of exported items //! which are available for use externally when compiled as a library. -use std::gc::Gc; use std::mem::replace; use metadata::csearch; @@ -263,7 +262,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { if public_ty || public_trait { for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(method) => { + ast::MethodImplItem(ref method) => { let meth_public = match method.pe_explicit_self().node { ast::SelfStatic => public_ty, @@ -457,11 +456,10 @@ impl<'a, 'tcx> PrivacyVisitor<'a, 'tcx> { // invocation. // FIXME(#10573) is this the right behavior? Why not consider // where the method was defined? - Some(ast_map::NodeImplItem(ref ii)) => { - match **ii { - ast::MethodImplItem(m) => { - let imp = self.tcx - .map + Some(ast_map::NodeImplItem(ii)) => { + match *ii { + ast::MethodImplItem(ref m) => { + let imp = self.tcx.map .get_parent_did(closest_private_id); match ty::impl_trait_ref(self.tcx, imp) { Some(..) => return Allowable, @@ -1108,7 +1106,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { impls"); for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(m) => { + ast::MethodImplItem(ref m) => { check_inherited(m.span, m.pe_vis(), ""); } } @@ -1169,7 +1167,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { tcx.sess.span_err(sp, "visibility has no effect inside functions"); } } - let check_struct = |def: &Gc| { + let check_struct = |def: &ast::StructDef| { for f in def.fields.iter() { match f.node.kind { ast::NamedField(_, p) => check_inherited(tcx, f.span, p), @@ -1182,7 +1180,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { ast::ItemImpl(_, _, _, ref impl_items) => { for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(m) => { + ast::MethodImplItem(ref m) => { check_inherited(tcx, m.span, m.pe_vis()); } } @@ -1198,13 +1196,13 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { check_inherited(tcx, v.span, v.node.vis); match v.node.kind { - ast::StructVariantKind(ref s) => check_struct(s), + ast::StructVariantKind(ref s) => check_struct(&**s), ast::TupleVariantKind(..) => {} } } } - ast::ItemStruct(ref def, _) => check_struct(def), + ast::ItemStruct(ref def, _) => check_struct(&**def), ast::ItemTrait(_, _, _, ref methods) => { for m in methods.iter() { @@ -1305,7 +1303,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { // (i.e. we could just return here to not check them at // all, or some worse estimation of whether an impl is // publicly visible. - ast::ItemImpl(ref g, ref trait_ref, self_, ref impl_items) => { + ast::ItemImpl(ref g, ref trait_ref, ref self_, ref impl_items) => { // `impl [... for] Private` is never visible. let self_contains_private; // impl [... for] Public<...>, but not `impl [... for] @@ -1320,7 +1318,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { at_outer_type: true, outer_type_is_public_path: false, }; - visitor.visit_ty(&*self_); + visitor.visit_ty(&**self_); self_contains_private = visitor.contains_private; self_is_public_path = visitor.outer_type_is_public_path; } @@ -1349,7 +1347,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { impl_items.iter() .any(|impl_item| { match *impl_item { - ast::MethodImplItem(m) => { + ast::MethodImplItem(ref m) => { self.exported_items.contains(&m.id) } } @@ -1365,8 +1363,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { None => { for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(method) => { - visit::walk_method_helper(self, &*method) + ast::MethodImplItem(ref method) => { + visit::walk_method_helper(self, &**method) } } } @@ -1393,13 +1391,13 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { let mut found_pub_static = false; for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(method) => { + ast::MethodImplItem(ref method) => { if method.pe_explicit_self().node == ast::SelfStatic && self.exported_items .contains(&method.id) { found_pub_static = true; - visit::walk_method_helper(self, &*method); + visit::walk_method_helper(self, &**method); } } } @@ -1487,8 +1485,10 @@ impl<'a, 'tcx, 'v> Visitor<'v> for VisiblePrivateTypesVisitor<'a, 'tcx> { pub fn check_crate(tcx: &ty::ctxt, exp_map2: &resolve::ExportMap2, external_exports: resolve::ExternalExports, - last_private_map: resolve::LastPrivateMap, - krate: &ast::Crate) -> (ExportedItems, PublicItems) { + last_private_map: resolve::LastPrivateMap) + -> (ExportedItems, PublicItems) { + let krate = tcx.map.krate(); + // Figure out who everyone's parent is let mut visitor = ParentVisitor { parents: NodeMap::new(), diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index d7cf25e7410e6..630b65f527878 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -201,7 +201,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { } Some(ast_map::NodeImplItem(impl_item)) => { match *impl_item { - ast::MethodImplItem(method) => { + ast::MethodImplItem(ref method) => { if generics_require_inlining(method.pe_generics()) || attributes_specify_inlining( method.attrs.as_slice()) { @@ -333,10 +333,10 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { } ast_map::NodeImplItem(impl_item) => { match *impl_item { - ast::MethodImplItem(method) => { + ast::MethodImplItem(ref method) => { let did = self.tcx.map.get_parent_did(search_item); - if method_might_be_inlined(self.tcx, &*method, did) { - visit::walk_block(self, &*method.pe_body()) + if method_might_be_inlined(self.tcx, &**method, did) { + visit::walk_block(self, method.pe_body()) } } } diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index 45107d26f2f89..4f81aac5eb049 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -29,12 +29,12 @@ use util::common::can_reach; use std::cell::RefCell; use std::collections::{HashMap, HashSet}; -use std::gc::Gc; use syntax::codemap::Span; use syntax::{ast, visit}; -use syntax::visit::{Visitor, FnKind}; use syntax::ast::{Block, Item, FnDecl, NodeId, Arm, Pat, Stmt, Expr, Local}; use syntax::ast_util::{stmt_id}; +use syntax::ptr::P; +use syntax::visit::{Visitor, FnKind}; /** The region maps encode information about region relationships. @@ -422,7 +422,7 @@ fn resolve_arm(visitor: &mut RegionResolutionVisitor, arm: &ast::Arm) { visitor.region_maps.mark_as_terminating_scope(arm.body.id); match arm.guard { - Some(expr) => { + Some(ref expr) => { visitor.region_maps.mark_as_terminating_scope(expr.id); } None => { } @@ -471,28 +471,28 @@ fn resolve_expr(visitor: &mut RegionResolutionVisitor, expr: &ast::Expr) { // scopes, meaning that temporaries cannot outlive them. // This ensures fixed size stacks. - ast::ExprBinary(ast::BiAnd, _, r) | - ast::ExprBinary(ast::BiOr, _, r) => { + ast::ExprBinary(ast::BiAnd, _, ref r) | + ast::ExprBinary(ast::BiOr, _, ref r) => { // For shortcircuiting operators, mark the RHS as a terminating // scope since it only executes conditionally. visitor.region_maps.mark_as_terminating_scope(r.id); } - ast::ExprIf(_, then, Some(otherwise)) => { + ast::ExprIf(_, ref then, Some(ref otherwise)) => { visitor.region_maps.mark_as_terminating_scope(then.id); visitor.region_maps.mark_as_terminating_scope(otherwise.id); } - ast::ExprIf(expr, then, None) => { + ast::ExprIf(ref expr, ref then, None) => { visitor.region_maps.mark_as_terminating_scope(expr.id); visitor.region_maps.mark_as_terminating_scope(then.id); } - ast::ExprLoop(body, _) => { + ast::ExprLoop(ref body, _) => { visitor.region_maps.mark_as_terminating_scope(body.id); } - ast::ExprWhile(expr, body, _) => { + ast::ExprWhile(ref expr, ref body, _) => { visitor.region_maps.mark_as_terminating_scope(expr.id); visitor.region_maps.mark_as_terminating_scope(body.id); } @@ -776,7 +776,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &ast::Local) { ast::ExprTupField(ref subexpr, _, _) | ast::ExprIndex(ref subexpr, _) | ast::ExprParen(ref subexpr) => { - let subexpr: &'a Gc = subexpr; // FIXME(#11586) + let subexpr: &'a P = subexpr; // FIXME(#11586) expr = &**subexpr; } _ => { diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index ed795ff0aacc4..64ae2776ccc34 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -30,7 +30,7 @@ use syntax::ast::{Ident, ImplItem, Item, ItemEnum, ItemFn, ItemForeignMod}; use syntax::ast::{ItemImpl, ItemMac, ItemMod, ItemStatic, ItemStruct}; use syntax::ast::{ItemTrait, ItemTy, LOCAL_CRATE, Local, Method}; use syntax::ast::{MethodImplItem, Mod, Name, NamedField, NodeId}; -use syntax::ast::{P, Pat, PatEnum, PatIdent, PatLit}; +use syntax::ast::{Pat, PatEnum, PatIdent, PatLit}; use syntax::ast::{PatRange, PatStruct, Path, PathListIdent, PathListMod}; use syntax::ast::{PrimTy, Public, SelfExplicit, SelfStatic}; use syntax::ast::{RegionTyParamBound, StmtDecl, StructField}; @@ -43,8 +43,7 @@ use syntax::ast::{UnboxedFnTyParamBound, UnnamedField, UnsafeFn, Variant}; use syntax::ast::{ViewItem, ViewItemExternCrate, ViewItemUse, ViewPathGlob}; use syntax::ast::{ViewPathList, ViewPathSimple, Visibility}; use syntax::ast; -use syntax::ast_util::{PostExpansionMethod, local_def}; -use syntax::ast_util::{trait_item_to_ty_method, walk_pat}; +use syntax::ast_util::{PostExpansionMethod, local_def, walk_pat}; use syntax::attr::AttrMetaMethods; use syntax::ext::mtwt; use syntax::parse::token::special_names; @@ -52,6 +51,7 @@ use syntax::parse::token::special_idents; use syntax::parse::token; use syntax::codemap::{Span, DUMMY_SP, Pos}; use syntax::owned_slice::OwnedSlice; +use syntax::ptr::P; use syntax::visit; use syntax::visit::Visitor; @@ -1164,7 +1164,7 @@ impl<'a> Resolver<'a> { // Check each statement. for statement in block.stmts.iter() { match statement.node { - StmtDecl(declaration, _) => { + StmtDecl(ref declaration, _) => { match declaration.node { DeclItem(_) => { return true; @@ -1277,7 +1277,7 @@ impl<'a> Resolver<'a> { } // These items live in both the type and value namespaces. - ItemStruct(struct_def, _) => { + ItemStruct(ref struct_def, _) => { // Adding to both Type and Value namespaces or just Type? let (forbid, ctor_id) = match struct_def.ctor_id { Some(ctor_id) => (ForbidDuplicateTypesAndValues, Some(ctor_id)), @@ -1309,7 +1309,7 @@ impl<'a> Resolver<'a> { parent } - ItemImpl(_, None, ty, ref impl_items) => { + ItemImpl(_, None, ref ty, ref impl_items) => { // If this implements an anonymous trait, then add all the // methods within to a new module, if the type was defined // within this module. @@ -1364,7 +1364,7 @@ impl<'a> Resolver<'a> { // For each implementation item... for impl_item in impl_items.iter() { match *impl_item { - MethodImplItem(method) => { + MethodImplItem(ref method) => { // Add the method to the module. let ident = method.pe_ident(); let method_name_bindings = @@ -1430,37 +1430,42 @@ impl<'a> Resolver<'a> { // Add the names of all the methods to the trait info. for method in methods.iter() { - let ty_m = trait_item_to_ty_method(method); - - let ident = ty_m.ident; + let (m_id, m_ident, m_fn_style, m_self, m_span) = match *method { + ast::RequiredMethod(ref m) => { + (m.id, m.ident, m.fn_style, &m.explicit_self, m.span) + } + ast::ProvidedMethod(ref m) => { + (m.id, m.pe_ident(), m.pe_fn_style(), m.pe_explicit_self(), m.span) + } + }; // Add it as a name in the trait module. - let (def, static_flag) = match ty_m.explicit_self.node { + let (def, static_flag) = match m_self.node { SelfStatic => { // Static methods become `def_static_method`s. - (DefStaticMethod(local_def(ty_m.id), + (DefStaticMethod(local_def(m_id), FromTrait(local_def(item.id)), - ty_m.fn_style), + m_fn_style), StaticMethodTraitItemKind) } _ => { // Non-static methods become `def_method`s. - (DefMethod(local_def(ty_m.id), + (DefMethod(local_def(m_id), Some(local_def(item.id))), NonstaticMethodTraitItemKind) } }; let method_name_bindings = - self.add_child(ident, + self.add_child(m_ident, module_parent.clone(), ForbidDuplicateValues, - ty_m.span); - method_name_bindings.define_value(def, ty_m.span, true); + m_span); + method_name_bindings.define_value(def, m_span, true); self.trait_item_map .borrow_mut() - .insert((ident.name, def_id), static_flag); + .insert((m_ident.name, def_id), static_flag); } name_bindings.define_type(DefTrait(def_id), sp, is_public); @@ -4068,7 +4073,7 @@ impl<'a> Resolver<'a> { ItemStruct(ref struct_def, ref generics) => { self.resolve_struct(item.id, generics, - struct_def.super_struct, + &struct_def.super_struct, struct_def.fields.as_slice()); } @@ -4100,15 +4105,15 @@ impl<'a> Resolver<'a> { }); } - ItemFn(fn_decl, _, _, ref generics, block) => { + ItemFn(ref fn_decl, _, _, ref generics, ref block) => { self.resolve_function(ItemRibKind, - Some(fn_decl), + Some(&**fn_decl), HasTypeParameters (generics, FnSpace, item.id, ItemRibKind), - block); + &**block); } ItemStatic(..) => { @@ -4179,9 +4184,9 @@ impl<'a> Resolver<'a> { fn resolve_function(&mut self, rib_kind: RibKind, - optional_declaration: Option>, + optional_declaration: Option<&FnDecl>, type_parameters: TypeParameters, - block: P) { + block: &Block) { // Create a value rib for the function. let function_value_rib = Rib::new(rib_kind); self.value_ribs.borrow_mut().push(function_value_rib); @@ -4357,7 +4362,7 @@ impl<'a> Resolver<'a> { fn resolve_struct(&mut self, id: NodeId, generics: &Generics, - super_struct: Option>, + super_struct: &Option>, fields: &[StructField]) { // If applicable, create a rib for the type parameters. self.with_type_parameter_rib(HasTypeParameters(generics, @@ -4370,8 +4375,8 @@ impl<'a> Resolver<'a> { this.resolve_where_clause(&generics.where_clause); // Resolve the super struct. - match super_struct { - Some(t) => match t.node { + match *super_struct { + Some(ref t) => match t.node { TyPath(ref path, None, path_id) => { match this.resolve_path(id, path, TypeNS, true) { Some((DefTy(def_id), lp)) if this.structs.contains_key(&def_id) => { @@ -4489,7 +4494,7 @@ impl<'a> Resolver<'a> { this.with_current_self_type(self_type, |this| { for impl_item in impl_items.iter() { match *impl_item { - MethodImplItem(method) => { + MethodImplItem(ref method) => { // If this is a trait impl, ensure the method // exists in trait this.check_trait_item(method.pe_ident(), @@ -4500,7 +4505,7 @@ impl<'a> Resolver<'a> { this.resolve_method( MethodRibKind(id, ProvidedMethod(method.id)), - &*method); + &**method); } } } @@ -4738,7 +4743,7 @@ impl<'a> Resolver<'a> { }); } - TyClosure(c) | TyProc(c) => { + TyClosure(ref c) | TyProc(ref c) => { self.resolve_type_parameter_bounds(ty.id, &c.bounds, TraitBoundingTypeParameter); visit::walk_ty(self, ty); @@ -4775,7 +4780,7 @@ impl<'a> Resolver<'a> { let renamed = mtwt::resolve(ident); match self.resolve_bare_identifier_pattern(ident, pattern.span) { - FoundStructOrEnumVariant(def, lp) + FoundStructOrEnumVariant(ref def, lp) if mode == RefutableMode => { debug!("(resolving pattern) resolving `{}` to \ struct or enum variant", @@ -4785,7 +4790,7 @@ impl<'a> Resolver<'a> { pattern, binding_mode, "an enum variant"); - self.record_def(pattern.id, (def, lp)); + self.record_def(pattern.id, (def.clone(), lp)); } FoundStructOrEnumVariant(..) => { self.resolve_error( @@ -4795,7 +4800,7 @@ impl<'a> Resolver<'a> { scope", token::get_name(renamed)).as_slice()); } - FoundConst(def, lp) if mode == RefutableMode => { + FoundConst(ref def, lp) if mode == RefutableMode => { debug!("(resolving pattern) resolving `{}` to \ constant", token::get_name(renamed)); @@ -4804,7 +4809,7 @@ impl<'a> Resolver<'a> { pattern, binding_mode, "a constant"); - self.record_def(pattern.id, (def, lp)); + self.record_def(pattern.id, (def.clone(), lp)); } FoundConst(..) => { self.resolve_error(pattern.span, @@ -5024,7 +5029,7 @@ impl<'a> Resolver<'a> { if path.segments.len() > 1 { let def = self.resolve_module_relative_path(path, namespace); match (def, unqualified_def) { - (Some((d, _)), Some((ud, _))) if d == ud => { + (Some((ref d, _)), Some((ref ud, _))) if *d == *ud => { self.session .add_lint(lint::builtin::UNNECESSARY_QUALIFICATION, id, @@ -5386,8 +5391,8 @@ impl<'a> Resolver<'a> { -> Option<(Path, NodeId, FallbackChecks)> { match t.node { TyPath(ref path, _, node_id) => Some((path.clone(), node_id, allow)), - TyPtr(mut_ty) => extract_path_and_node_id(&*mut_ty.ty, OnlyTraitAndStatics), - TyRptr(_, mut_ty) => extract_path_and_node_id(&*mut_ty.ty, allow), + TyPtr(ref mut_ty) => extract_path_and_node_id(&*mut_ty.ty, OnlyTraitAndStatics), + TyRptr(_, ref mut_ty) => extract_path_and_node_id(&*mut_ty.ty, allow), // This doesn't handle the remaining `Ty` variants as they are not // that commonly the self_type, it might be interesting to provide // support for those in future. @@ -5647,12 +5652,12 @@ impl<'a> Resolver<'a> { visit::walk_expr(self, expr); } - ExprFnBlock(_, fn_decl, block) | - ExprProc(fn_decl, block) | - ExprUnboxedFn(_, _, fn_decl, block) => { + ExprFnBlock(_, ref fn_decl, ref block) | + ExprProc(ref fn_decl, ref block) | + ExprUnboxedFn(_, _, ref fn_decl, ref block) => { self.resolve_function(FunctionRibKind(expr.id, block.id), - Some(fn_decl), NoTypeParameters, - block); + Some(&**fn_decl), NoTypeParameters, + &**block); } ExprStruct(ref path, _, _) => { diff --git a/src/librustc/middle/save/mod.rs b/src/librustc/middle/save/mod.rs index 1d4050c71b18a..04e04efd93c26 100644 --- a/src/librustc/middle/save/mod.rs +++ b/src/librustc/middle/save/mod.rs @@ -35,7 +35,6 @@ use middle::ty; use middle::typeck; use std::cell::Cell; -use std::gc::Gc; use std::io; use std::io::File; use std::io::fs; @@ -54,6 +53,7 @@ use syntax::owned_slice::OwnedSlice; use syntax::visit; use syntax::visit::Visitor; use syntax::print::pprust::{path_to_string,ty_to_string}; +use syntax::ptr::P; use middle::save::span_utils::SpanUtils; use middle::save::recorder::Recorder; @@ -289,9 +289,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { NodeItem(item) => { scope_id = item.id; match item.node { - ast::ItemImpl(_, _, ty, _) => { + ast::ItemImpl(_, _, ref ty, _) => { let mut result = String::from_str("<"); - result.push_str(ty_to_string(&*ty).as_slice()); + result.push_str(ty_to_string(&**ty).as_slice()); match ty::trait_of_item(&self.analysis.ty_cx, ast_util::local_def(method.id)) { @@ -466,9 +466,9 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { fn process_fn(&mut self, item: &ast::Item, - decl: ast::P, + decl: &ast::FnDecl, ty_params: &ast::Generics, - body: ast::P) { + body: &ast::Block) { let qualname = self.analysis.ty_cx.map.path_to_string(item.id); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Fn); @@ -494,7 +494,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { fn process_static(&mut self, item: &ast::Item, - typ: ast::P, + typ: &ast::Ty, mt: ast::Mutability, expr: &ast::Expr) { @@ -611,7 +611,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { item: &ast::Item, type_parameters: &ast::Generics, trait_ref: &Option, - typ: ast::P, + typ: &ast::Ty, impl_items: &Vec) { match typ.node { ast::TyPath(ref path, _, id) => { @@ -643,8 +643,8 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.process_generic_params(type_parameters, item.span, "", item.id); for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(method) => { - visit::walk_method_helper(self, &*method) + ast::MethodImplItem(ref method) => { + visit::walk_method_helper(self, &**method) } } } @@ -833,7 +833,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { ex: &ast::Expr, path: &ast::Path, fields: &Vec, - base: &Option>) { + base: &Option>) { if generated_code(path.span) { return } @@ -883,7 +883,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { fn process_method_call(&mut self, ex: &ast::Expr, - args: &Vec>) { + args: &Vec>) { let method_map = self.analysis.ty_cx.method_map.borrow(); let method_callee = method_map.get(&typeck::MethodCall::expr(ex.id)); let (def_id, decl_id) = match method_callee.origin { @@ -1010,7 +1010,7 @@ impl <'l, 'tcx> DxrVisitor<'l, 'tcx> { self.collected_paths.push((p.id, path, immut, recorder::VarRef)); match *optional_subpattern { None => {} - Some(subpattern) => self.visit_pat(&*subpattern), + Some(ref subpattern) => self.visit_pat(&**subpattern) } } _ => visit::walk_pat(self, p) @@ -1025,28 +1025,28 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { } match item.node { - ast::ItemFn(decl, _, _, ref ty_params, body) => - self.process_fn(item, decl, ty_params, body), - ast::ItemStatic(typ, mt, expr) => - self.process_static(item, typ, mt, &*expr), - ast::ItemStruct(def, ref ty_params) => self.process_struct(item, &*def, ty_params), + ast::ItemFn(ref decl, _, _, ref ty_params, ref body) => + self.process_fn(item, &**decl, ty_params, &**body), + ast::ItemStatic(ref typ, mt, ref expr) => + self.process_static(item, &**typ, mt, &**expr), + ast::ItemStruct(ref def, ref ty_params) => self.process_struct(item, &**def, ty_params), ast::ItemEnum(ref def, ref ty_params) => self.process_enum(item, def, ty_params), ast::ItemImpl(ref ty_params, ref trait_ref, - typ, + ref typ, ref impl_items) => { self.process_impl(item, ty_params, trait_ref, - typ, + &**typ, impl_items) } ast::ItemTrait(ref generics, _, ref trait_refs, ref methods) => self.process_trait(item, generics, trait_refs, methods), ast::ItemMod(ref m) => self.process_mod(item, m), - ast::ItemTy(ty, ref ty_params) => { + ast::ItemTy(ref ty, ref ty_params) => { let qualname = self.analysis.ty_cx.map.path_to_string(item.id); - let value = ty_to_string(&*ty); + let value = ty_to_string(&**ty); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Type); self.fmt.typedef_str(item.span, sub_span, @@ -1054,7 +1054,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { qualname.as_slice(), value.as_slice()); - self.visit_ty(&*ty); + self.visit_ty(&**ty); self.process_generic_params(ty_params, item.span, qualname.as_slice(), item.id); }, ast::ItemMac(_) => (), @@ -1073,8 +1073,8 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { } } match param.default { - Some(ty) => self.visit_ty(&*ty), - None => (), + Some(ref ty) => self.visit_ty(&**ty), + None => {} } } } @@ -1139,7 +1139,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { qualname, method_type.id); } - ast::ProvidedMethod(method) => self.process_method(&*method), + ast::ProvidedMethod(ref method) => self.process_method(&**method) } } @@ -1269,7 +1269,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { } match ex.node { - ast::ExprCall(_f, ref _args) => { + ast::ExprCall(ref _f, ref _args) => { // Don't need to do anything for function calls, // because just walking the callee path does what we want. visit::walk_expr(self, ex); @@ -1278,14 +1278,14 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { ast::ExprStruct(ref path, ref fields, ref base) => self.process_struct_lit(ex, path, fields, base), ast::ExprMethodCall(_, _, ref args) => self.process_method_call(ex, args), - ast::ExprField(sub_ex, ident, _) => { + ast::ExprField(ref sub_ex, ident, _) => { if generated_code(sub_ex.span) { return } - self.visit_expr(&*sub_ex); + self.visit_expr(&**sub_ex); - let t = ty::expr_ty_adjusted(&self.analysis.ty_cx, &*sub_ex); + let t = ty::expr_ty_adjusted(&self.analysis.ty_cx, &**sub_ex); let t_box = ty::get(t); match t_box.sty { ty::ty_struct(def_id, _) => { @@ -1306,14 +1306,14 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { "Expected struct type, but not ty_struct"), } }, - ast::ExprTupField(sub_ex, idx, _) => { + ast::ExprTupField(ref sub_ex, idx, _) => { if generated_code(sub_ex.span) { return } - self.visit_expr(&*sub_ex); + self.visit_expr(&**sub_ex); - let t = ty::expr_ty_adjusted(&self.analysis.ty_cx, &*sub_ex); + let t = ty::expr_ty_adjusted(&self.analysis.ty_cx, &**sub_ex); let t_box = ty::get(t); match t_box.sty { ty::ty_struct(def_id, _) => { @@ -1334,7 +1334,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { "Expected struct type, but not ty_struct"), } }, - ast::ExprFnBlock(_, decl, body) => { + ast::ExprFnBlock(_, ref decl, ref body) => { if generated_code(body.span) { return } @@ -1349,7 +1349,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DxrVisitor<'l, 'tcx> { self.visit_ty(&*decl.output); // walk the body - self.nest(ex.id, |v| v.visit_block(&*body)); + self.nest(ex.id, |v| v.visit_block(&**body)); }, _ => { visit::walk_expr(self, ex) diff --git a/src/librustc/middle/trans/_match.rs b/src/librustc/middle/trans/_match.rs index dd0668b8fa0e7..755ff991fb60a 100644 --- a/src/librustc/middle/trans/_match.rs +++ b/src/librustc/middle/trans/_match.rs @@ -218,24 +218,20 @@ use util::ppaux::{Repr, vec_map_to_string}; use std; use std::collections::HashMap; -use std::gc::{Gc}; use std::rc::Rc; use syntax::ast; -use syntax::ast::Ident; +use syntax::ast::{DUMMY_NODE_ID, Ident}; use syntax::codemap::Span; use syntax::fold::Folder; +use syntax::ptr::P; -struct ConstantExpr<'a, 'tcx: 'a>(&'a ty::ctxt<'tcx>, Gc); +struct ConstantExpr<'a>(&'a ast::Expr); -impl<'a, 'tcx> Eq for ConstantExpr<'a, 'tcx> { - fn assert_receiver_is_total_eq(&self) {} -} - -impl<'a, 'tcx> PartialEq for ConstantExpr<'a, 'tcx> { - fn eq(&self, other: &ConstantExpr<'a, 'tcx>) -> bool { - let &ConstantExpr(tcx, expr) = self; - let &ConstantExpr(_, other_expr) = other; - match const_eval::compare_lit_exprs(tcx, &*expr, &*other_expr) { +impl<'a> ConstantExpr<'a> { + fn eq(self, other: ConstantExpr<'a>, tcx: &ty::ctxt) -> bool { + let ConstantExpr(expr) = self; + let ConstantExpr(other_expr) = other; + match const_eval::compare_lit_exprs(tcx, expr, other_expr) { Some(val1) => val1 == 0, None => fail!("compare_list_exprs: type mismatch"), } @@ -243,30 +239,44 @@ impl<'a, 'tcx> PartialEq for ConstantExpr<'a, 'tcx> { } // An option identifying a branch (either a literal, an enum variant or a range) -#[deriving(Eq, PartialEq)] -enum Opt<'blk, 'tcx: 'blk> { - ConstantValue(ConstantExpr<'blk, 'tcx>), - ConstantRange(ConstantExpr<'blk, 'tcx>, ConstantExpr<'blk, 'tcx>), +enum Opt<'a> { + ConstantValue(ConstantExpr<'a>), + ConstantRange(ConstantExpr<'a>, ConstantExpr<'a>), Variant(ty::Disr, Rc, ast::DefId), SliceLengthEqual(uint), SliceLengthGreaterOrEqual(/* prefix length */ uint, /* suffix length */ uint), } -impl<'blk, 'tcx> Opt<'blk, 'tcx> { - fn trans(&self, mut bcx: Block<'blk, 'tcx>) -> OptResult<'blk, 'tcx> { +impl<'a> Opt<'a> { + fn eq(&self, other: &Opt<'a>, tcx: &ty::ctxt) -> bool { + match (self, other) { + (&ConstantValue(a), &ConstantValue(b)) => a.eq(b, tcx), + (&ConstantRange(a1, a2), &ConstantRange(b1, b2)) => { + a1.eq(b1, tcx) && a2.eq(b2, tcx) + } + (&Variant(a_disr, ref a_repr, a_def), &Variant(b_disr, ref b_repr, b_def)) => { + a_disr == b_disr && *a_repr == *b_repr && a_def == b_def + } + (&SliceLengthEqual(a), &SliceLengthEqual(b)) => a == b, + (&SliceLengthGreaterOrEqual(a1, a2), &SliceLengthGreaterOrEqual(b1, b2)) => { + a1 == b1 && a2 == b2 + } + _ => false + } + } + + fn trans<'blk, 'tcx>(&self, mut bcx: Block<'blk, 'tcx>) -> OptResult<'blk, 'tcx> { let _icx = push_ctxt("match::trans_opt"); let ccx = bcx.ccx(); match *self { - ConstantValue(ConstantExpr(_, lit_expr)) => { + ConstantValue(ConstantExpr(lit_expr)) => { let lit_ty = ty::node_id_to_type(bcx.tcx(), lit_expr.id); let (llval, _, _) = consts::const_expr(ccx, &*lit_expr, true); let lit_datum = immediate_rvalue(llval, lit_ty); let lit_datum = unpack_datum!(bcx, lit_datum.to_appropriate_datum(bcx)); SingleResult(Result::new(bcx, lit_datum.val)) } - ConstantRange( - ConstantExpr(_, ref l1), - ConstantExpr(_, ref l2)) => { + ConstantRange(ConstantExpr(ref l1), ConstantExpr(ref l2)) => { let (l1, _, _) = consts::const_expr(ccx, &**l1, true); let (l2, _, _) = consts::const_expr(ccx, &**l2, true); RangeResult(Result::new(bcx, l1), Result::new(bcx, l2)) @@ -325,9 +335,9 @@ pub struct BindingInfo { type BindingsMap = HashMap; -struct ArmData<'a, 'blk, 'tcx: 'blk> { +struct ArmData<'p, 'blk, 'tcx: 'blk> { bodycx: Block<'blk, 'tcx>, - arm: &'a ast::Arm, + arm: &'p ast::Arm, bindings_map: BindingsMap } @@ -337,13 +347,13 @@ struct ArmData<'a, 'blk, 'tcx: 'blk> { * As we proceed `bound_ptrs` are filled with pointers to values to be bound, * these pointers are stored in llmatch variables just before executing `data` arm. */ -struct Match<'a, 'blk: 'a, 'tcx: 'blk> { - pats: Vec>, - data: &'a ArmData<'a, 'blk, 'tcx>, +struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> { + pats: Vec<&'p ast::Pat>, + data: &'a ArmData<'p, 'blk, 'tcx>, bound_ptrs: Vec<(Ident, ValueRef)> } -impl<'a, 'blk, 'tcx> Repr for Match<'a, 'blk, 'tcx> { +impl<'a, 'p, 'blk, 'tcx> Repr for Match<'a, 'p, 'blk, 'tcx> { fn repr(&self, tcx: &ty::ctxt) -> String { if tcx.sess.verbose() { // for many programs, this just take too long to serialize @@ -364,11 +374,11 @@ fn has_nested_bindings(m: &[Match], col: uint) -> bool { return false; } -fn expand_nested_bindings<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - m: &'a [Match<'a, 'blk, 'tcx>], - col: uint, - val: ValueRef) - -> Vec> { +fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, + m: &[Match<'a, 'p, 'blk, 'tcx>], + col: uint, + val: ValueRef) + -> Vec> { debug!("expand_nested_bindings(bcx={}, m={}, col={}, val={})", bcx.to_str(), m.repr(bcx.tcx()), @@ -381,9 +391,9 @@ fn expand_nested_bindings<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let mut pat = *br.pats.get(col); loop { pat = match pat.node { - ast::PatIdent(_, ref path, Some(inner)) => { + ast::PatIdent(_, ref path, Some(ref inner)) => { bound_ptrs.push((path.node, val)); - inner.clone() + &**inner }, _ => break } @@ -399,15 +409,15 @@ fn expand_nested_bindings<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, }).collect() } -type EnterPatterns<'a> = |&[Gc]|: 'a -> Option>>; +type EnterPatterns<'a> = <'p> |&[&'p ast::Pat]|: 'a -> Option>; -fn enter_match<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - dm: &DefMap, - m: &'a [Match<'a, 'blk, 'tcx>], - col: uint, - val: ValueRef, - e: EnterPatterns) - -> Vec> { +fn enter_match<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, + dm: &DefMap, + m: &[Match<'a, 'p, 'blk, 'tcx>], + col: uint, + val: ValueRef, + e: EnterPatterns) + -> Vec> { debug!("enter_match(bcx={}, m={}, col={}, val={})", bcx.to_str(), m.repr(bcx.tcx()), @@ -425,7 +435,7 @@ fn enter_match<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bound_ptrs.push((path.node, val)); } } - ast::PatVec(ref before, Some(slice), ref after) => { + ast::PatVec(ref before, Some(ref slice), ref after) => { match slice.node { ast::PatIdent(_, ref path, None) => { let subslice_val = bind_subslice_pat( @@ -438,7 +448,6 @@ fn enter_match<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } _ => {} } - Match { pats: pats, data: br.data, @@ -448,12 +457,12 @@ fn enter_match<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, }).collect() } -fn enter_default<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - dm: &DefMap, - m: &'a [Match<'a, 'blk, 'tcx>], - col: uint, - val: ValueRef) - -> Vec> { +fn enter_default<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, + dm: &DefMap, + m: &[Match<'a, 'p, 'blk, 'tcx>], + col: uint, + val: ValueRef) + -> Vec> { debug!("enter_default(bcx={}, m={}, col={}, val={})", bcx.to_str(), m.repr(bcx.tcx()), @@ -499,16 +508,16 @@ fn enter_default<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, /// takes the complete row of patterns rather than just the first one. /// Also, most of the enter_() family functions have been unified with /// the check_match specialization step. -fn enter_opt<'a, 'blk, 'tcx>( +fn enter_opt<'a, 'p, 'blk, 'tcx>( bcx: Block<'blk, 'tcx>, _: ast::NodeId, dm: &DefMap, - m: &'a [Match<'a, 'blk, 'tcx>], + m: &[Match<'a, 'p, 'blk, 'tcx>], opt: &Opt, col: uint, variant_size: uint, val: ValueRef) - -> Vec> { + -> Vec> { debug!("enter_opt(bcx={}, m={}, opt={:?}, col={}, val={})", bcx.to_str(), m.repr(bcx.tcx()), @@ -518,10 +527,10 @@ fn enter_opt<'a, 'blk, 'tcx>( let _indenter = indenter(); let ctor = match opt { - &ConstantValue(ConstantExpr(_, expr)) => check_match::ConstantValue( + &ConstantValue(ConstantExpr(expr)) => check_match::ConstantValue( const_eval::eval_const_expr(bcx.tcx(), &*expr) ), - &ConstantRange(ConstantExpr(_, lo), ConstantExpr(_, hi)) => check_match::ConstantRange( + &ConstantRange(ConstantExpr(lo), ConstantExpr(hi)) => check_match::ConstantRange( const_eval::eval_const_expr(bcx.tcx(), &*lo), const_eval::eval_const_expr(bcx.tcx(), &*hi) ), @@ -542,51 +551,41 @@ fn enter_opt<'a, 'blk, 'tcx>( // Returns the options in one column of matches. An option is something that // needs to be conditionally matched at runtime; for example, the discriminant // on a set of enum variants or a literal. -fn get_branches<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - m: &[Match], col: uint) - -> Vec> { - let ccx = bcx.ccx(); - - fn add_to_set<'blk, 'tcx>(set: &mut Vec>, opt: Opt<'blk, 'tcx>) { - if !set.contains(&opt) { - set.push(opt); - } - } +fn get_branches<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, + m: &[Match<'a, 'p, 'blk, 'tcx>], col: uint) + -> Vec> { + let tcx = bcx.tcx(); - let mut found = Vec::new(); + let mut found: Vec = vec![]; for (i, br) in m.iter().enumerate() { let cur = *br.pats.get(col); - match cur.node { - ast::PatLit(l) => { - add_to_set(&mut found, ConstantValue(ConstantExpr(ccx.tcx(), l))); - } + let opt = match cur.node { + ast::PatLit(ref l) => ConstantValue(ConstantExpr(&**l)), ast::PatIdent(..) | ast::PatEnum(..) | ast::PatStruct(..) => { // This is either an enum variant or a variable binding. - let opt_def = ccx.tcx().def_map.borrow().find_copy(&cur.id); + let opt_def = tcx.def_map.borrow().find_copy(&cur.id); match opt_def { Some(def::DefVariant(enum_id, var_id, _)) => { - let variant = ty::enum_variant_with_id(ccx.tcx(), enum_id, var_id); - add_to_set(&mut found, Variant( - variant.disr_val, - adt::represent_node(bcx, cur.id), var_id - )); + let variant = ty::enum_variant_with_id(tcx, enum_id, var_id); + Variant(variant.disr_val, adt::represent_node(bcx, cur.id), var_id) } - _ => {} + _ => continue } } - ast::PatRange(l1, l2) => { - add_to_set(&mut found, ConstantRange( - ConstantExpr(ccx.tcx(), l1), - ConstantExpr(ccx.tcx(), l2) - )); + ast::PatRange(ref l1, ref l2) => { + ConstantRange(ConstantExpr(&**l1), ConstantExpr(&**l2)) } ast::PatVec(ref before, None, ref after) => { - add_to_set(&mut found, SliceLengthEqual(before.len() + after.len())); + SliceLengthEqual(before.len() + after.len()) } ast::PatVec(ref before, Some(_), ref after) => { - add_to_set(&mut found, SliceLengthGreaterOrEqual(before.len(), after.len())); + SliceLengthGreaterOrEqual(before.len(), after.len()) } - _ => {} + _ => continue + }; + + if !found.iter().any(|x| x.eq(&opt, tcx)) { + found.push(opt); } } found @@ -870,14 +869,14 @@ fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, bcx } -fn compile_guard<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - guard_expr: &ast::Expr, - data: &ArmData, - m: &'a [Match<'a, 'blk, 'tcx>], - vals: &[ValueRef], - chk: &FailureHandler, - has_genuine_default: bool) - -> Block<'blk, 'tcx> { +fn compile_guard<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, + guard_expr: &ast::Expr, + data: &ArmData, + m: &[Match<'a, 'p, 'blk, 'tcx>], + vals: &[ValueRef], + chk: &FailureHandler, + has_genuine_default: bool) + -> Block<'blk, 'tcx> { debug!("compile_guard(bcx={}, guard_expr={}, m={}, vals={})", bcx.to_str(), bcx.expr_to_string(guard_expr), @@ -918,11 +917,11 @@ fn compile_guard<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, }) } -fn compile_submatch<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - m: &'a [Match<'a, 'blk, 'tcx>], - vals: &[ValueRef], - chk: &FailureHandler, - has_genuine_default: bool) { +fn compile_submatch<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, + m: &[Match<'a, 'p, 'blk, 'tcx>], + vals: &[ValueRef], + chk: &FailureHandler, + has_genuine_default: bool) { debug!("compile_submatch(bcx={}, m={}, vals={})", bcx.to_str(), m.repr(bcx.tcx()), @@ -978,13 +977,13 @@ fn compile_submatch<'a, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } } -fn compile_submatch_continue<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, - m: &'a [Match<'a, 'blk, 'tcx>], - vals: &[ValueRef], - chk: &FailureHandler, - col: uint, - val: ValueRef, - has_genuine_default: bool) { +fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, + m: &[Match<'a, 'p, 'blk, 'tcx>], + vals: &[ValueRef], + chk: &FailureHandler, + col: uint, + val: ValueRef, + has_genuine_default: bool) { let fcx = bcx.fcx; let tcx = bcx.tcx(); let dm = &tcx.def_map; @@ -994,9 +993,11 @@ fn compile_submatch_continue<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, // Find a real id (we're adding placeholder wildcard patterns, but // each column is guaranteed to have at least one real pattern) - let pat_id = m.iter().map(|br| br.pats.get(col).id).find(|&id| id != 0).unwrap_or(0); + let pat_id = m.iter().map(|br| br.pats.get(col).id) + .find(|&id| id != DUMMY_NODE_ID) + .unwrap_or(DUMMY_NODE_ID); - let left_ty = if pat_id == 0 { + let left_ty = if pat_id == DUMMY_NODE_ID { ty::mk_nil() } else { node_id_type(bcx, pat_id) @@ -1264,7 +1265,7 @@ impl euv::Delegate for ReassignmentChecker { } } -fn create_bindings_map(bcx: Block, pat: Gc, +fn create_bindings_map(bcx: Block, pat: &ast::Pat, discr: &ast::Expr, body: &ast::Expr) -> BindingsMap { // Create the bindings map, which is a mapping from each binding name // to an alloca() that will be the value for that local variable. @@ -1345,14 +1346,17 @@ fn trans_match_inner<'blk, 'tcx>(scope_cx: Block<'blk, 'tcx>, let arm_datas: Vec = arms.iter().map(|arm| ArmData { bodycx: fcx.new_id_block("case_body", arm.body.id), arm: arm, - bindings_map: create_bindings_map(bcx, *arm.pats.get(0), discr_expr, &*arm.body) + bindings_map: create_bindings_map(bcx, &**arm.pats.get(0), discr_expr, &*arm.body) }).collect(); let mut static_inliner = StaticInliner::new(scope_cx.tcx()); + let arm_pats: Vec>> = arm_datas.iter().map(|arm_data| { + arm_data.arm.pats.iter().map(|p| static_inliner.fold_pat((*p).clone())).collect() + }).collect(); let mut matches = Vec::new(); - for arm_data in arm_datas.iter() { - matches.extend(arm_data.arm.pats.iter().map(|&p| Match { - pats: vec![static_inliner.fold_pat(p)], + for (arm_data, pats) in arm_datas.iter().zip(arm_pats.iter()) { + matches.extend(pats.iter().map(|p| Match { + pats: vec![&**p], data: arm_data, bound_ptrs: Vec::new(), })); @@ -1404,11 +1408,25 @@ pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let _icx = push_ctxt("match::store_local"); let mut bcx = bcx; let tcx = bcx.tcx(); - let pat = local.pat; - let opt_init_expr = local.init; + let pat = &*local.pat; + + fn create_dummy_locals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, + pat: &ast::Pat) + -> Block<'blk, 'tcx> { + // create dummy memory for the variables if we have no + // value to store into them immediately + let tcx = bcx.tcx(); + pat_bindings(&tcx.def_map, pat, |_, p_id, _, path1| { + let scope = cleanup::var_scope(tcx, p_id); + bcx = mk_binding_alloca( + bcx, p_id, &path1.node, BindLocal, scope, (), + |(), bcx, llval, ty| { zero_mem(bcx, llval, ty); bcx }); + }); + bcx + } - return match opt_init_expr { - Some(init_expr) => { + match local.init { + Some(ref init_expr) => { // Optimize the "let x = expr" case. This just writes // the result of evaluating `expr` directly into the alloca // for `x`. Often the general path results in similar or the @@ -1424,7 +1442,7 @@ pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let var_scope = cleanup::var_scope(tcx, local.id); return mk_binding_alloca( bcx, pat.id, ident, BindLocal, var_scope, (), - |(), bcx, v, _| expr::trans_into(bcx, &*init_expr, + |(), bcx, v, _| expr::trans_into(bcx, &**init_expr, expr::SaveIn(v))); } @@ -1433,8 +1451,8 @@ pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // General path. let init_datum = - unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &*init_expr, "let")); - if ty::type_is_bot(expr_ty(bcx, &*init_expr)) { + unpack_datum!(bcx, expr::trans_to_lvalue(bcx, &**init_expr, "let")); + if ty::type_is_bot(expr_ty(bcx, &**init_expr)) { create_dummy_locals(bcx, pat) } else { if bcx.sess().asm_comments() { @@ -1447,26 +1465,11 @@ pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, None => { create_dummy_locals(bcx, pat) } - }; - - fn create_dummy_locals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, - pat: Gc) - -> Block<'blk, 'tcx> { - // create dummy memory for the variables if we have no - // value to store into them immediately - let tcx = bcx.tcx(); - pat_bindings(&tcx.def_map, &*pat, |_, p_id, _, path1| { - let scope = cleanup::var_scope(tcx, p_id); - bcx = mk_binding_alloca( - bcx, p_id, &path1.node, BindLocal, scope, (), - |(), bcx, llval, ty| { zero_mem(bcx, llval, ty); bcx }); - }); - bcx } } pub fn store_arg<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, - pat: Gc, + pat: &ast::Pat, arg: Datum, arg_scope: cleanup::ScopeId) -> Block<'blk, 'tcx> { @@ -1520,7 +1523,7 @@ pub fn store_arg<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, /// Generates code for the pattern binding in a `for` loop like /// `for in { ... }`. pub fn store_for_loop_binding<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - pat: Gc, + pat: &ast::Pat, llvalue: ValueRef, body_scope: cleanup::ScopeId) -> Block<'blk, 'tcx> { @@ -1573,7 +1576,7 @@ fn mk_binding_alloca<'blk, 'tcx, A>(bcx: Block<'blk, 'tcx>, } fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - pat: Gc, + pat: &ast::Pat, val: ValueRef, binding_mode: IrrefutablePatternBindingMode, cleanup_scope: cleanup::ScopeId) @@ -1611,7 +1614,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let tcx = bcx.tcx(); let ccx = bcx.ccx(); match pat.node { - ast::PatIdent(pat_binding_mode, ref path1, inner) => { + ast::PatIdent(pat_binding_mode, ref path1, ref inner) => { if pat_is_binding(&tcx.def_map, &*pat) { // Allocate the stack slot where the value of this // binding will live and place it into the appropriate @@ -1637,8 +1640,8 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, }); } - for &inner_pat in inner.iter() { - bcx = bind_irrefutable_pat(bcx, inner_pat, val, + for inner_pat in inner.iter() { + bcx = bind_irrefutable_pat(bcx, &**inner_pat, val, binding_mode, cleanup_scope); } } @@ -1655,9 +1658,9 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, vinfo.disr_val, val); for sub_pat in sub_pats.iter() { - for (i, argval) in args.vals.iter().enumerate() { - bcx = bind_irrefutable_pat(bcx, *sub_pat.get(i), - *argval, binding_mode, + for (i, &argval) in args.vals.iter().enumerate() { + bcx = bind_irrefutable_pat(bcx, &**sub_pat.get(i), + argval, binding_mode, cleanup_scope); } } @@ -1674,7 +1677,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, for (i, elem) in elems.iter().enumerate() { let fldptr = adt::trans_field_ptr(bcx, &*repr, val, 0, i); - bcx = bind_irrefutable_pat(bcx, *elem, + bcx = bind_irrefutable_pat(bcx, &**elem, fldptr, binding_mode, cleanup_scope); } @@ -1695,7 +1698,7 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let ix = ty::field_idx_strict(tcx, f.ident.name, field_tys); let fldptr = adt::trans_field_ptr(bcx, &*pat_repr, val, discr, ix); - bcx = bind_irrefutable_pat(bcx, f.pat, fldptr, + bcx = bind_irrefutable_pat(bcx, &*f.pat, fldptr, binding_mode, cleanup_scope); } }) @@ -1704,17 +1707,17 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let repr = adt::represent_node(bcx, pat.id); for (i, elem) in elems.iter().enumerate() { let fldptr = adt::trans_field_ptr(bcx, &*repr, val, 0, i); - bcx = bind_irrefutable_pat(bcx, *elem, fldptr, + bcx = bind_irrefutable_pat(bcx, &**elem, fldptr, binding_mode, cleanup_scope); } } - ast::PatBox(inner) => { + ast::PatBox(ref inner) => { let llbox = Load(bcx, val); - bcx = bind_irrefutable_pat(bcx, inner, llbox, binding_mode, cleanup_scope); + bcx = bind_irrefutable_pat(bcx, &**inner, llbox, binding_mode, cleanup_scope); } - ast::PatRegion(inner) => { + ast::PatRegion(ref inner) => { let loaded_val = Load(bcx, val); - bcx = bind_irrefutable_pat(bcx, inner, loaded_val, binding_mode, cleanup_scope); + bcx = bind_irrefutable_pat(bcx, &**inner, loaded_val, binding_mode, cleanup_scope); } ast::PatVec(ref before, ref slice, ref after) => { let pat_ty = node_id_type(bcx, pat.id); @@ -1733,8 +1736,8 @@ fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, .chain(slice.iter()) .chain(after.iter()) .zip(extracted.vals.move_iter()) - .fold(bcx, |bcx, (&inner, elem)| - bind_irrefutable_pat(bcx, inner, elem, binding_mode, cleanup_scope) + .fold(bcx, |bcx, (inner, elem)| + bind_irrefutable_pat(bcx, &**inner, elem, binding_mode, cleanup_scope) ); } ast::PatMac(..) => { diff --git a/src/librustc/middle/trans/asm.rs b/src/librustc/middle/trans/asm.rs index 7fb692c270ee3..c474aab9b2647 100644 --- a/src/librustc/middle/trans/asm.rs +++ b/src/librustc/middle/trans/asm.rs @@ -67,10 +67,10 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) let in_datum = unpack_datum!(bcx, expr::trans(bcx, &**input)); unpack_result!(bcx, { callee::trans_arg_datum(bcx, - expr_ty(bcx, &**input), - in_datum, - cleanup::CustomScope(temp_scope), - callee::DontAutorefArg) + expr_ty(bcx, &**input), + in_datum, + cleanup::CustomScope(temp_scope), + callee::DontAutorefArg) }) }).collect::>().append(ext_inputs.as_slice()); diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index c023b7a953493..e86df86511870 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -1356,7 +1356,7 @@ fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { match tcx.map.find(id) { Some(ast_map::NodeItem(i)) => { match i.node { - ast::ItemFn(_, _, _, _, blk) => { + ast::ItemFn(_, _, _, _, ref blk) => { let mut explicit = CheckForNestedReturnsVisitor::explicit(); let mut implicit = CheckForNestedReturnsVisitor::implicit(); visit::walk_item(&mut explicit, &*i); @@ -1368,12 +1368,12 @@ fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { } Some(ast_map::NodeTraitItem(trait_method)) => { match *trait_method { - ast::ProvidedMethod(m) => { + ast::ProvidedMethod(ref m) => { match m.node { - ast::MethDecl(_, _, _, _, _, _, blk, _) => { + ast::MethDecl(_, _, _, _, _, _, ref blk, _) => { let mut explicit = CheckForNestedReturnsVisitor::explicit(); let mut implicit = CheckForNestedReturnsVisitor::implicit(); - visit::walk_method_helper(&mut explicit, &*m); + visit::walk_method_helper(&mut explicit, &**m); visit::walk_expr_opt(&mut implicit, &blk.expr); explicit.found || implicit.found } @@ -1386,11 +1386,11 @@ fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { } } } - Some(ast_map::NodeImplItem(ref ii)) => { - match **ii { + Some(ast_map::NodeImplItem(ii)) => { + match *ii { ast::MethodImplItem(ref m) => { match m.node { - ast::MethDecl(_, _, _, _, _, _, blk, _) => { + ast::MethDecl(_, _, _, _, _, _, ref blk, _) => { let mut explicit = CheckForNestedReturnsVisitor::explicit(); let mut implicit = CheckForNestedReturnsVisitor::implicit(); visit::walk_method_helper(&mut explicit, &**m); @@ -1404,12 +1404,12 @@ fn has_nested_returns(tcx: &ty::ctxt, id: ast::NodeId) -> bool { } Some(ast_map::NodeExpr(e)) => { match e.node { - ast::ExprFnBlock(_, _, blk) | - ast::ExprProc(_, blk) | - ast::ExprUnboxedFn(_, _, _, blk) => { + ast::ExprFnBlock(_, _, ref blk) | + ast::ExprProc(_, ref blk) | + ast::ExprUnboxedFn(_, _, _, ref blk) => { let mut explicit = CheckForNestedReturnsVisitor::explicit(); let mut implicit = CheckForNestedReturnsVisitor::implicit(); - visit::walk_expr(&mut explicit, &*e); + visit::walk_expr(&mut explicit, e); visit::walk_expr_opt(&mut implicit, &blk.expr); explicit.found || implicit.found } @@ -1649,7 +1649,7 @@ fn copy_args_to_allocas<'blk, 'tcx>(fcx: &FunctionContext<'blk, 'tcx>, // This alloca should be optimized away by LLVM's mem-to-reg pass in // the event it's not truly needed. - bcx = _match::store_arg(bcx, args[i].pat, arg_datum, arg_scope_id); + bcx = _match::store_arg(bcx, &*args[i].pat, arg_datum, arg_scope_id); if fcx.ccx.sess().opts.debuginfo == FullDebugInfo { debuginfo::create_argument_metadata(bcx, &args[i]); @@ -1701,7 +1701,7 @@ fn copy_unboxed_closure_args_to_allocas<'blk, 'tcx>( tuple_element_datum.to_rvalue_datum(bcx, "arg")); bcx = _match::store_arg(bcx, - args[j].pat, + &*args[j].pat, tuple_element_datum, arg_scope_id); @@ -2008,7 +2008,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, if !type_is_zero_size(ccx, result_ty) { match args { callee::ArgExprs(exprs) => { - let fields = exprs.iter().map(|x| *x).enumerate().collect::>(); + let fields = exprs.iter().map(|x| &**x).enumerate().collect::>(); bcx = expr::trans_adt(bcx, result_ty, disr, fields.as_slice(), None, expr::SaveIn(llresult)); } @@ -2792,15 +2792,15 @@ pub fn get_item_val(ccx: &CrateContext, id: ast::NodeId) -> ValueRef { ccx.sess().bug("unexpected variant: required trait method in \ get_item_val()"); } - ast::ProvidedMethod(m) => { - register_method(ccx, id, &*m) + ast::ProvidedMethod(ref m) => { + register_method(ccx, id, &**m) } } } ast_map::NodeImplItem(ii) => { match *ii { - ast::MethodImplItem(m) => register_method(ccx, id, &*m), + ast::MethodImplItem(ref m) => register_method(ccx, id, &**m), } } @@ -3042,9 +3042,10 @@ fn internalize_symbols(cx: &SharedCrateContext, reachable: &HashSet) { } } -pub fn trans_crate(krate: ast::Crate, - analysis: CrateAnalysis) -> (ty::ctxt, CrateTranslation) { +pub fn trans_crate<'tcx>(analysis: CrateAnalysis<'tcx>) + -> (ty::ctxt<'tcx>, CrateTranslation) { let CrateAnalysis { ty_cx: tcx, exp_map2, reachable, name, .. } = analysis; + let krate = tcx.map.krate(); // Before we touch LLVM, make sure that multithreading is enabled. unsafe { @@ -3064,7 +3065,7 @@ pub fn trans_crate(krate: ast::Crate, } } - let link_meta = link::build_link_meta(&tcx.sess, &krate, name); + let link_meta = link::build_link_meta(&tcx.sess, krate, name); let codegen_units = tcx.sess.opts.cg.codegen_units; let shared_ccx = SharedCrateContext::new(link_meta.crate_name.as_slice(), @@ -3096,7 +3097,7 @@ pub fn trans_crate(krate: ast::Crate, } // Translate the metadata. - let metadata = write_metadata(&shared_ccx, &krate); + let metadata = write_metadata(&shared_ccx, krate); if shared_ccx.sess().trans_stats() { let stats = shared_ccx.stats(); diff --git a/src/librustc/middle/trans/callee.rs b/src/librustc/middle/trans/callee.rs index 5d66ec0a4b936..878d95773ada5 100644 --- a/src/librustc/middle/trans/callee.rs +++ b/src/librustc/middle/trans/callee.rs @@ -53,10 +53,10 @@ use middle::typeck::MethodCall; use util::ppaux::Repr; use util::ppaux::ty_to_string; -use std::gc::Gc; use syntax::abi as synabi; use syntax::ast; use syntax::ast_map; +use syntax::ptr::P; pub struct MethodData { pub llfn: ValueRef, @@ -902,7 +902,7 @@ pub fn trans_call_inner<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pub enum CallArgs<'a> { // Supply value of arguments as a list of expressions that must be // translated. This is used in the common case of `foo(bar, qux)`. - ArgExprs(&'a [Gc]), + ArgExprs(&'a [P]), // Supply value of arguments as a list of LLVM value refs; frequently // used with lang items and so forth, when the argument is an internal @@ -916,12 +916,12 @@ pub enum CallArgs<'a> { // Supply value of arguments as a list of expressions that must be // translated, for overloaded call operators. - ArgOverloadedCall(&'a [Gc]), + ArgOverloadedCall(Vec<&'a ast::Expr>), } fn trans_args_under_call_abi<'blk, 'tcx>( mut bcx: Block<'blk, 'tcx>, - arg_exprs: &[Gc], + arg_exprs: &[P], fn_ty: ty::t, llargs: &mut Vec, arg_cleanup_scope: cleanup::ScopeId, @@ -941,13 +941,13 @@ fn trans_args_under_call_abi<'blk, 'tcx>( } // Now untuple the rest of the arguments. - let tuple_expr = arg_exprs[1]; + let tuple_expr = &arg_exprs[1]; let tuple_type = node_id_type(bcx, tuple_expr.id); match ty::get(tuple_type).sty { ty::ty_tup(ref field_types) => { let tuple_datum = unpack_datum!(bcx, - expr::trans(bcx, &*tuple_expr)); + expr::trans(bcx, &**tuple_expr)); let tuple_lvalue_datum = unpack_datum!(bcx, tuple_datum.to_lvalue_datum(bcx, @@ -982,7 +982,7 @@ fn trans_args_under_call_abi<'blk, 'tcx>( fn trans_overloaded_call_args<'blk, 'tcx>( mut bcx: Block<'blk, 'tcx>, - arg_exprs: &[Gc], + arg_exprs: Vec<&ast::Expr>, fn_ty: ty::t, llargs: &mut Vec, arg_cleanup_scope: cleanup::ScopeId, @@ -991,7 +991,7 @@ fn trans_overloaded_call_args<'blk, 'tcx>( // Translate the `self` argument first. let arg_tys = ty::ty_fn_args(fn_ty); if !ignore_self { - let arg_datum = unpack_datum!(bcx, expr::trans(bcx, &*arg_exprs[0])); + let arg_datum = unpack_datum!(bcx, expr::trans(bcx, arg_exprs[0])); llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, *arg_tys.get(0), @@ -1007,7 +1007,7 @@ fn trans_overloaded_call_args<'blk, 'tcx>( ty::ty_tup(ref field_types) => { for (i, &field_type) in field_types.iter().enumerate() { let arg_datum = - unpack_datum!(bcx, expr::trans(bcx, &*arg_exprs[i + 1])); + unpack_datum!(bcx, expr::trans(bcx, arg_exprs[i + 1])); llargs.push(unpack_result!(bcx, { trans_arg_datum(bcx, field_type, diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index 4ac9ae64d5a08..bbb9ba4bbb685 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -472,7 +472,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> { pub fn def(&self, nid: ast::NodeId) -> def::Def { match self.tcx().def_map.borrow().find(&nid) { - Some(&v) => v, + Some(v) => v.clone(), None => { self.tcx().sess.bug(format!( "no def associated with node id {:?}", nid).as_slice()); diff --git a/src/librustc/middle/trans/consts.rs b/src/librustc/middle/trans/consts.rs index 8f6a3864b37ea..576031500b958 100644 --- a/src/librustc/middle/trans/consts.rs +++ b/src/librustc/middle/trans/consts.rs @@ -33,12 +33,12 @@ use middle::ty; use util::ppaux::{Repr, ty_to_string}; use std::c_str::ToCStr; -use std::gc::Gc; use std::vec; use libc::c_uint; use syntax::{ast, ast_util}; +use syntax::ptr::P; -pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit) +pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: &ast::Lit) -> ValueRef { let _icx = push_ctxt("trans_lit"); debug!("const_lit: {}", lit); @@ -102,7 +102,7 @@ fn first_two((a, b, _): (R, S, T)) -> (R, S) { } fn const_vec(cx: &CrateContext, e: &ast::Expr, - es: &[Gc], is_local: bool) -> (ValueRef, Type, bool) { + es: &[P], is_local: bool) -> (ValueRef, Type, bool) { let vec_ty = ty::expr_ty(cx.tcx(), e); let unit_ty = ty::sequence_element_type(cx.tcx(), vec_ty); let llunitty = type_of::type_of(cx, unit_ty); @@ -321,7 +321,7 @@ pub fn const_expr(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef // if it's assigned to a static. fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr, is_local: bool) -> (ValueRef, bool) { - let map_list = |exprs: &[Gc]| { + let map_list = |exprs: &[P]| { exprs.iter().map(|e| first_two(const_expr(cx, &**e, is_local))) .fold((Vec::new(), true), |(l, all_inlineable), (val, inlineable)| { @@ -332,7 +332,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr, let _icx = push_ctxt("const_expr"); return match e.node { ast::ExprLit(ref lit) => { - (consts::const_lit(cx, e, (**lit).clone()), true) + (consts::const_lit(cx, e, &**lit), true) } ast::ExprBinary(b, ref e1, ref e2) => { let (te1, _, _) = const_expr(cx, &**e1, is_local); @@ -653,7 +653,7 @@ fn const_expr_unadjusted(cx: &CrateContext, e: &ast::Expr, } } } - ast::ExprCall(callee, ref args) => { + ast::ExprCall(ref callee, ref args) => { let opt_def = cx.tcx().def_map.borrow().find_copy(&callee.id); match opt_def { Some(def::DefStruct(_)) => { diff --git a/src/librustc/middle/trans/controlflow.rs b/src/librustc/middle/trans/controlflow.rs index 164ddd65f3540..fb12520741bcf 100644 --- a/src/librustc/middle/trans/controlflow.rs +++ b/src/librustc/middle/trans/controlflow.rs @@ -39,8 +39,6 @@ use syntax::parse::token::InternedString; use syntax::parse::token; use syntax::visit::Visitor; -use std::gc::Gc; - pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>, s: &ast::Stmt) -> Block<'blk, 'tcx> { @@ -61,7 +59,7 @@ pub fn trans_stmt<'blk, 'tcx>(cx: Block<'blk, 'tcx>, ast::StmtExpr(ref e, _) | ast::StmtSemi(ref e, _) => { bcx = trans_stmt_semi(bcx, &**e); } - ast::StmtDecl(d, _) => { + ast::StmtDecl(ref d, _) => { match d.node { ast::DeclLocal(ref local) => { bcx = init_local(bcx, &**local); @@ -132,8 +130,8 @@ pub fn trans_block<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if_id: ast::NodeId, cond: &ast::Expr, - thn: ast::P, - els: Option>, + thn: &ast::Block, + els: Option<&ast::Expr>, dest: expr::Dest) -> Block<'blk, 'tcx> { debug!("trans_if(bcx={}, if_id={}, cond={}, thn={:?}, dest={})", @@ -251,7 +249,7 @@ pub fn trans_while<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, /// Translates a `for` loop. pub fn trans_for<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, loop_info: NodeInfo, - pat: Gc, + pat: &ast::Pat, head: &ast::Expr, body: &ast::Block) -> Block<'blk, 'tcx> { @@ -453,7 +451,7 @@ pub fn trans_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } pub fn trans_ret<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - e: Option>) + e: Option<&ast::Expr>) -> Block<'blk, 'tcx> { let _icx = push_ctxt("trans_ret"); let fcx = bcx.fcx; diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index f0b0507afbb13..21ba2189414c2 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -207,7 +207,6 @@ use std::c_str::{CString, ToCStr}; use std::cell::{Cell, RefCell}; use std::collections::HashMap; use std::collections::HashSet; -use std::gc::Gc; use std::ptr; use std::rc::{Rc, Weak}; use syntax::util::interner::Interner; @@ -1129,8 +1128,8 @@ pub fn create_function_debug_context(cx: &CrateContext, } match item.node { - ast::ItemFn(fn_decl, _, _, ref generics, top_level_block) => { - (item.ident, fn_decl, generics, top_level_block, item.span, true) + ast::ItemFn(ref fn_decl, _, _, ref generics, ref top_level_block) => { + (item.ident, &**fn_decl, generics, &**top_level_block, item.span, true) } _ => { cx.sess().span_bug(item.span, @@ -1158,16 +1157,16 @@ pub fn create_function_debug_context(cx: &CrateContext, } ast_map::NodeExpr(ref expr) => { match expr.node { - ast::ExprFnBlock(_, fn_decl, top_level_block) | - ast::ExprProc(fn_decl, top_level_block) | - ast::ExprUnboxedFn(_, _, fn_decl, top_level_block) => { + ast::ExprFnBlock(_, ref fn_decl, ref top_level_block) | + ast::ExprProc(ref fn_decl, ref top_level_block) | + ast::ExprUnboxedFn(_, _, ref fn_decl, ref top_level_block) => { let name = format!("fn{}", token::gensym("fn")); let name = token::str_to_ident(name.as_slice()); - (name, fn_decl, + (name, &**fn_decl, // This is not quite right. It should actually inherit // the generics of the enclosing function. &empty_generics, - top_level_block, + &**top_level_block, expr.span, // Don't try to lookup the item path: false) @@ -1287,9 +1286,8 @@ pub fn create_function_debug_context(cx: &CrateContext, source_locations_enabled: Cell::new(false), }; - let arg_pats = fn_decl.inputs.iter().map(|arg_ref| arg_ref.pat).collect::>(); populate_scope_map(cx, - arg_pats.as_slice(), + fn_decl.inputs.as_slice(), &*top_level_block, fn_metadata, &mut *fn_debug_context.scope_map.borrow_mut()); @@ -3169,7 +3167,7 @@ fn get_namespace_and_span_for_item(cx: &CrateContext, def_id: ast::DefId) // introducing *artificial* lexical scope descriptors where necessary. These // artificial scopes allow GDB to correctly handle name shadowing. fn populate_scope_map(cx: &CrateContext, - arg_pats: &[Gc], + args: &[ast::Arg], fn_entry_block: &ast::Block, fn_metadata: DISubprogram, scope_map: &mut HashMap) { @@ -3185,8 +3183,8 @@ fn populate_scope_map(cx: &CrateContext, // Push argument identifiers onto the stack so arguments integrate nicely // with variable shadowing. - for &arg_pat in arg_pats.iter() { - pat_util::pat_bindings(def_map, &*arg_pat, |_, _, _, path1| { + for arg in args.iter() { + pat_util::pat_bindings(def_map, &*arg.pat, |_, _, _, path1| { scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata, ident: Some(path1.node) }); }) @@ -3272,10 +3270,10 @@ fn populate_scope_map(cx: &CrateContext, scope_stack: &mut Vec , scope_map: &mut HashMap) { match *decl { - codemap::Spanned { node: ast::DeclLocal(local), .. } => { + codemap::Spanned { node: ast::DeclLocal(ref local), .. } => { scope_map.insert(local.id, scope_stack.last().unwrap().scope_metadata); - walk_pattern(cx, local.pat, scope_stack, scope_map); + walk_pattern(cx, &*local.pat, scope_stack, scope_map); for exp in local.init.iter() { walk_expr(cx, &**exp, scope_stack, scope_map); @@ -3286,7 +3284,7 @@ fn populate_scope_map(cx: &CrateContext, } fn walk_pattern(cx: &CrateContext, - pat: Gc, + pat: &ast::Pat, scope_stack: &mut Vec , scope_map: &mut HashMap) { @@ -3367,8 +3365,8 @@ fn populate_scope_map(cx: &CrateContext, scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - for &sub_pat in sub_pat_opt.iter() { - walk_pattern(cx, sub_pat, scope_stack, scope_map); + for sub_pat in sub_pat_opt.iter() { + walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } } @@ -3379,9 +3377,9 @@ fn populate_scope_map(cx: &CrateContext, ast::PatEnum(_, ref sub_pats_opt) => { scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - for ref sub_pats in sub_pats_opt.iter() { - for &p in sub_pats.iter() { - walk_pattern(cx, p, scope_stack, scope_map); + for sub_pats in sub_pats_opt.iter() { + for p in sub_pats.iter() { + walk_pattern(cx, &**p, scope_stack, scope_map); } } } @@ -3389,8 +3387,8 @@ fn populate_scope_map(cx: &CrateContext, ast::PatStruct(_, ref field_pats, _) => { scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - for &ast::FieldPat { pat: sub_pat, .. } in field_pats.iter() { - walk_pattern(cx, sub_pat, scope_stack, scope_map); + for &ast::FieldPat { pat: ref sub_pat, .. } in field_pats.iter() { + walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } } @@ -3398,13 +3396,13 @@ fn populate_scope_map(cx: &CrateContext, scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); for sub_pat in sub_pats.iter() { - walk_pattern(cx, sub_pat.clone(), scope_stack, scope_map); + walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } } ast::PatBox(ref sub_pat) | ast::PatRegion(ref sub_pat) => { scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - walk_pattern(cx, sub_pat.clone(), scope_stack, scope_map); + walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } ast::PatLit(ref exp) => { @@ -3421,16 +3419,16 @@ fn populate_scope_map(cx: &CrateContext, ast::PatVec(ref front_sub_pats, ref middle_sub_pats, ref back_sub_pats) => { scope_map.insert(pat.id, scope_stack.last().unwrap().scope_metadata); - for &sub_pat in front_sub_pats.iter() { - walk_pattern(cx, sub_pat, scope_stack, scope_map); + for sub_pat in front_sub_pats.iter() { + walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } - for &sub_pat in middle_sub_pats.iter() { - walk_pattern(cx, sub_pat, scope_stack, scope_map); + for sub_pat in middle_sub_pats.iter() { + walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } - for &sub_pat in back_sub_pats.iter() { - walk_pattern(cx, sub_pat, scope_stack, scope_map); + for sub_pat in back_sub_pats.iter() { + walk_pattern(cx, &**sub_pat, scope_stack, scope_map); } } @@ -3466,8 +3464,8 @@ fn populate_scope_map(cx: &CrateContext, walk_expr(cx, &**sub_expr, scope_stack, scope_map); } - ast::ExprRet(exp_opt) => match exp_opt { - Some(sub_exp) => walk_expr(cx, &*sub_exp, scope_stack, scope_map), + ast::ExprRet(ref exp_opt) => match *exp_opt { + Some(ref sub_exp) => walk_expr(cx, &**sub_exp, scope_stack, scope_map), None => () }, @@ -3538,7 +3536,7 @@ fn populate_scope_map(cx: &CrateContext, .unwrap() .scope_metadata); walk_pattern(cx, - *pattern, + &**pattern, scope_stack, scope_map); walk_block(cx, &**body, scope_stack, scope_map); @@ -3570,7 +3568,7 @@ fn populate_scope_map(cx: &CrateContext, scope_map, |cx, scope_stack, scope_map| { for &ast::Arg { pat: ref pattern, .. } in decl.inputs.iter() { - walk_pattern(cx, pattern.clone(), scope_stack, scope_map); + walk_pattern(cx, &**pattern, scope_stack, scope_map); } walk_block(cx, &**block, scope_stack, scope_map); @@ -3607,8 +3605,8 @@ fn populate_scope_map(cx: &CrateContext, scope_stack, scope_map, |cx, scope_stack, scope_map| { - for &pat in arm_ref.pats.iter() { - walk_pattern(cx, pat, scope_stack, scope_map); + for pat in arm_ref.pats.iter() { + walk_pattern(cx, &**pat, scope_stack, scope_map); } for guard_exp in arm_ref.guard.iter() { diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index 54394b8d74169..8a6f3dd6ffab6 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -77,8 +77,7 @@ use middle::trans::type_::Type; use syntax::ast; use syntax::codemap; use syntax::print::pprust::{expr_to_string}; - -use std::gc::Gc; +use syntax::ptr::P; // Destinations @@ -597,7 +596,7 @@ fn trans_datum_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } } - ast::ExprLit(ref lit) => trans_immediate_lit(bcx, expr, (**lit).clone()), + ast::ExprLit(ref lit) => trans_immediate_lit(bcx, expr, &**lit), ast::ExprBinary(op, ref lhs, ref rhs) => { trans_binary(bcx, expr, op, &**lhs, &**rhs) } @@ -882,8 +881,8 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ast::ExprAgain(label_opt) => { controlflow::trans_cont(bcx, expr.id, label_opt) } - ast::ExprRet(ex) => { - controlflow::trans_ret(bcx, ex) + ast::ExprRet(ref ex) => { + controlflow::trans_ret(bcx, ex.as_ref().map(|e| &**e)) } ast::ExprWhile(ref cond, ref body, _) => { controlflow::trans_while(bcx, expr.id, &**cond, &**body) @@ -891,7 +890,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ast::ExprForLoop(ref pat, ref head, ref body, _) => { controlflow::trans_for(bcx, expr_info(expr), - *pat, + &**pat, &**head, &**body) } @@ -928,7 +927,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } } ast::ExprAssignOp(op, ref dst, ref src) => { - trans_assign_op(bcx, expr, op, &**dst, src.clone()) + trans_assign_op(bcx, expr, op, &**dst, &**src) } ast::ExprInlineAsm(ref a) => { asm::trans_inline_asm(bcx, a) @@ -958,8 +957,8 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ast::ExprPath(_) => { trans_def_dps_unadjusted(bcx, expr, bcx.def(expr.id), dest) } - ast::ExprIf(ref cond, ref thn, els) => { - controlflow::trans_if(bcx, expr.id, &**cond, thn.clone(), els, dest) + ast::ExprIf(ref cond, ref thn, ref els) => { + controlflow::trans_if(bcx, expr.id, &**cond, &**thn, els.as_ref().map(|e| &**e), dest) } ast::ExprMatch(ref discr, ref arms) => { _match::trans_match(bcx, expr, &**discr, arms.as_slice(), dest) @@ -967,20 +966,20 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ast::ExprBlock(ref blk) => { controlflow::trans_block(bcx, &**blk, dest) } - ast::ExprStruct(_, ref fields, base) => { + ast::ExprStruct(_, ref fields, ref base) => { trans_struct(bcx, fields.as_slice(), - base, + base.as_ref().map(|e| &**e), expr.span, expr.id, dest) } ast::ExprTup(ref args) => { - let numbered_fields: Vec<(uint, Gc)> = - args.iter().enumerate().map(|(i, arg)| (i, *arg)).collect(); + let numbered_fields: Vec<(uint, &ast::Expr)> = + args.iter().enumerate().map(|(i, arg)| (i, &**arg)).collect(); trans_adt(bcx, expr_ty(bcx, expr), 0, numbered_fields.as_slice(), None, dest) } - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match lit.node { ast::LitStr(ref s, _) => { tvec::trans_lit_str(bcx, expr, (*s).clone(), dest) @@ -1005,14 +1004,14 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr_to_string(expr), expr_ty.repr(tcx)); closure::trans_expr_fn(bcx, store, &**decl, &**body, expr.id, dest) } - ast::ExprUnboxedFn(_, _, decl, body) => { - closure::trans_unboxed_closure(bcx, &*decl, &*body, expr.id, dest) + ast::ExprUnboxedFn(_, _, ref decl, ref body) => { + closure::trans_unboxed_closure(bcx, &**decl, &**body, expr.id, dest) } ast::ExprCall(ref f, ref args) => { if bcx.tcx().is_method_call(expr.id) { trans_overloaded_call(bcx, expr, - *f, + &**f, args.as_slice(), Some(dest)) } else { @@ -1061,7 +1060,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } } ast::ExprAssignOp(op, ref dst, ref src) => { - trans_assign_op(bcx, expr, op, &**dst, src.clone()) + trans_assign_op(bcx, expr, op, &**dst, &**src) } _ => { bcx.tcx().sess.span_bug( @@ -1263,7 +1262,7 @@ pub fn with_field_tys(tcx: &ty::ctxt, fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, fields: &[ast::Field], - base: Option>, + base: Option<&ast::Expr>, expr_span: codemap::Span, id: ast::NodeId, dest: Dest) -> Block<'blk, 'tcx> { @@ -1281,7 +1280,7 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, match opt_pos { Some(i) => { *need_base.get_mut(i) = false; - (i, field.expr) + (i, &*field.expr) } None => { tcx.sess.span_bug(field.span, @@ -1320,11 +1319,12 @@ fn trans_struct<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, * Note that `fields` may be empty; the base expression must always be * evaluated for side-effects. */ -pub struct StructBaseInfo { +pub struct StructBaseInfo<'a> { /// The base expression; will be evaluated after all explicit fields. - expr: Gc, + expr: &'a ast::Expr, /// The indices of fields to copy paired with their types. - fields: Vec<(uint, ty::t)> } + fields: Vec<(uint, ty::t)> +} /** * Constructs an ADT instance: @@ -1339,7 +1339,7 @@ pub struct StructBaseInfo { pub fn trans_adt<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, ty: ty::t, discr: ty::Disr, - fields: &[(uint, Gc)], + fields: &[(uint, &ast::Expr)], optbase: Option, dest: Dest) -> Block<'blk, 'tcx> { let _icx = push_ctxt("trans_adt"); @@ -1407,7 +1407,7 @@ pub fn trans_adt<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, fn trans_immediate_lit<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr, - lit: ast::Lit) + lit: &ast::Lit) -> DatumBlock<'blk, 'tcx, Expr> { // must not be a string constant, that is a RvalueDpsExpr let _icx = push_ctxt("trans_immediate_lit"); @@ -1750,12 +1750,12 @@ fn trans_overloaded_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, dest) } -fn trans_overloaded_call<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, - expr: &ast::Expr, - callee: Gc, - args: &[Gc], - dest: Option) - -> Block<'blk, 'tcx> { +fn trans_overloaded_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, + expr: &ast::Expr, + callee: &'a ast::Expr, + args: &'a [P], + dest: Option) + -> Block<'blk, 'tcx> { let method_call = MethodCall::expr(expr.id); let method_type = bcx.tcx() .method_map @@ -1763,7 +1763,7 @@ fn trans_overloaded_call<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, .get(&method_call) .ty; let mut all_args = vec!(callee); - all_args.push_all(args); + all_args.extend(args.iter().map(|e| &**e)); unpack_result!(bcx, callee::trans_call_inner(bcx, Some(expr_info(expr)), @@ -1776,8 +1776,7 @@ fn trans_overloaded_call<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, None, arg_cleanup_scope) }, - callee::ArgOverloadedCall( - all_args.as_slice()), + callee::ArgOverloadedCall(all_args), dest)); bcx } @@ -1957,7 +1956,7 @@ fn trans_assign_op<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, expr: &ast::Expr, op: ast::BinOp, dst: &ast::Expr, - src: Gc) + src: &ast::Expr) -> Block<'blk, 'tcx> { let _icx = push_ctxt("trans_assign_op"); let mut bcx = bcx; diff --git a/src/librustc/middle/trans/inline.rs b/src/librustc/middle/trans/inline.rs index af271d039bde9..e1e728fbea018 100644 --- a/src/librustc/middle/trans/inline.rs +++ b/src/librustc/middle/trans/inline.rs @@ -47,12 +47,12 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId) ccx.external().borrow_mut().insert(fn_id, None); return None; } - csearch::found(ast::IIItem(item)) => { + csearch::found(&ast::IIItem(ref item)) => { ccx.external().borrow_mut().insert(fn_id, Some(item.id)); ccx.external_srcs().borrow_mut().insert(item.id, fn_id); ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1); - trans_item(ccx, &*item); + trans_item(ccx, &**item); let linkage = match item.node { ast::ItemFn(_, _, _, ref generics, _) => { @@ -104,12 +104,12 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId) local_def(item.id) } - csearch::found(ast::IIForeign(item)) => { + csearch::found(&ast::IIForeign(ref item)) => { ccx.external().borrow_mut().insert(fn_id, Some(item.id)); ccx.external_srcs().borrow_mut().insert(item.id, fn_id); local_def(item.id) } - csearch::found_parent(parent_id, ast::IIItem(item)) => { + csearch::found_parent(parent_id, &ast::IIItem(ref item)) => { ccx.external().borrow_mut().insert(parent_id, Some(item.id)); ccx.external_srcs().borrow_mut().insert(item.id, parent_id); @@ -135,32 +135,37 @@ fn instantiate_inline(ccx: &CrateContext, fn_id: ast::DefId) _ => ccx.sess().bug("maybe_instantiate_inline: item has a \ non-enum, non-struct parent") } - trans_item(ccx, &*item); + trans_item(ccx, &**item); local_def(my_id) } csearch::found_parent(_, _) => { ccx.sess().bug("maybe_get_item_ast returned a found_parent \ with a non-item parent"); } - csearch::found(ast::IITraitItem(impl_did, impl_item)) => { - match impl_item { - ast::ProvidedInlinedTraitItem(mth) | - ast::RequiredInlinedTraitItem(mth) => { + csearch::found(&ast::IITraitItem(_, ref trait_item)) => { + match *trait_item { + ast::RequiredMethod(_) => ccx.sess().bug("found RequiredMethod IITraitItem"), + ast::ProvidedMethod(ref mth) => { ccx.external().borrow_mut().insert(fn_id, Some(mth.id)); ccx.external_srcs().borrow_mut().insert(mth.id, fn_id); ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1); - } - } - match impl_item { - ast::ProvidedInlinedTraitItem(mth) => { // If this is a default method, we can't look up the // impl type. But we aren't going to translate anyways, so // don't. local_def(mth.id) } - ast::RequiredInlinedTraitItem(mth) => { + } + } + csearch::found(&ast::IIImplItem(impl_did, ref impl_item)) => { + match *impl_item { + ast::MethodImplItem(ref mth) => { + ccx.external().borrow_mut().insert(fn_id, Some(mth.id)); + ccx.external_srcs().borrow_mut().insert(mth.id, fn_id); + + ccx.stats().n_inlines.set(ccx.stats().n_inlines.get() + 1); + let impl_tpt = ty::lookup_item_type(ccx.tcx(), impl_did); let unparameterized = impl_tpt.generics.types.is_empty() && mth.pe_generics().ty_params.is_empty(); diff --git a/src/librustc/middle/trans/meth.rs b/src/librustc/middle/trans/meth.rs index 384502025cb1f..4202c2deff6fc 100644 --- a/src/librustc/middle/trans/meth.rs +++ b/src/librustc/middle/trans/meth.rs @@ -66,8 +66,8 @@ pub fn trans_impl(ccx: &CrateContext, let mut v = TransItemVisitor{ ccx: ccx }; for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(method) => { - visit::walk_method_helper(&mut v, &*method); + ast::MethodImplItem(ref method) => { + visit::walk_method_helper(&mut v, &**method); } } } @@ -75,14 +75,14 @@ pub fn trans_impl(ccx: &CrateContext, } for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(method) => { + ast::MethodImplItem(ref method) => { if method.pe_generics().ty_params.len() == 0u { let trans_everywhere = attr::requests_inline(method.attrs.as_slice()); for (ref ccx, is_origin) in ccx.maybe_iter(trans_everywhere) { let llfn = get_item_val(ccx, method.id); trans_fn(ccx, - &*method.pe_fn_decl(), - &*method.pe_body(), + method.pe_fn_decl(), + method.pe_body(), llfn, ¶m_substs::empty(), method.id, @@ -96,7 +96,7 @@ pub fn trans_impl(ccx: &CrateContext, let mut v = TransItemVisitor { ccx: ccx, }; - visit::walk_method_helper(&mut v, &*method); + visit::walk_method_helper(&mut v, &**method); } } } diff --git a/src/librustc/middle/trans/monomorphize.rs b/src/librustc/middle/trans/monomorphize.rs index 1cf3e55967d4e..57004922ef7d1 100644 --- a/src/librustc/middle/trans/monomorphize.rs +++ b/src/librustc/middle/trans/monomorphize.rs @@ -221,13 +221,13 @@ pub fn monomorphic_fn(ccx: &CrateContext, } ast_map::NodeImplItem(ii) => { match *ii { - ast::MethodImplItem(mth) => { + ast::MethodImplItem(ref mth) => { let d = mk_lldecl(abi::Rust); let needs_body = setup_lldecl(d, mth.attrs.as_slice()); if needs_body { trans_fn(ccx, - &*mth.pe_fn_decl(), - &*mth.pe_body(), + mth.pe_fn_decl(), + mth.pe_body(), d, &psubsts, mth.id, @@ -239,11 +239,11 @@ pub fn monomorphic_fn(ccx: &CrateContext, } ast_map::NodeTraitItem(method) => { match *method { - ast::ProvidedMethod(mth) => { + ast::ProvidedMethod(ref mth) => { let d = mk_lldecl(abi::Rust); let needs_body = setup_lldecl(d, mth.attrs.as_slice()); if needs_body { - trans_fn(ccx, &*mth.pe_fn_decl(), &*mth.pe_body(), d, + trans_fn(ccx, mth.pe_fn_decl(), mth.pe_body(), d, &psubsts, mth.id, []); } d diff --git a/src/librustc/middle/trans/tvec.rs b/src/librustc/middle/trans/tvec.rs index 8a915bbfff454..f5c3ed388b7f9 100644 --- a/src/librustc/middle/trans/tvec.rs +++ b/src/librustc/middle/trans/tvec.rs @@ -156,7 +156,7 @@ pub fn trans_slice_vec<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Handle the "..." case (returns a slice since strings are always unsized): match content_expr.node { - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match lit.node { ast::LitStr(ref s, _) => { let scratch = rvalue_scratch_datum(bcx, vec_ty, ""); @@ -255,7 +255,7 @@ pub fn write_content<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, bcx.expr_to_string(vstore_expr)); match content_expr.node { - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match lit.node { ast::LitStr(ref s, _) => { match dest { @@ -363,7 +363,7 @@ pub fn elements_required(bcx: Block, content_expr: &ast::Expr) -> uint { //! Figure out the number of elements we need to store this content match content_expr.node { - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match lit.node { ast::LitStr(ref s, _) => s.get().len(), _ => { diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 48173cc680428..bf35e25635aa2 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -44,7 +44,6 @@ use std::cmp; use std::fmt::Show; use std::fmt; use std::hash::{Hash, sip, Writer}; -use std::gc::Gc; use std::iter::AdditiveIterator; use std::mem; use std::ops; @@ -459,7 +458,7 @@ pub struct ctxt<'tcx> { pub trait_refs: RefCell>>, pub trait_defs: RefCell>>, - pub map: ast_map::Map, + pub map: ast_map::Map<'tcx>, pub intrinsic_defs: RefCell>, pub freevars: RefCell, pub tcache: type_cache, @@ -533,8 +532,8 @@ pub struct ctxt<'tcx> { /// These two caches are used by const_eval when decoding external statics /// and variants that are found. - pub extern_const_statics: RefCell>>>, - pub extern_const_variants: RefCell>>>, + pub extern_const_statics: RefCell>, + pub extern_const_variants: RefCell>, pub method_map: typeck::MethodMap, pub vtable_map: typeck::vtable_map, @@ -1382,7 +1381,7 @@ pub fn mk_ctxt<'tcx>(s: Session, type_arena: &'tcx TypedArena, dm: resolve::DefMap, named_region_map: resolve_lifetime::NamedRegionMap, - map: ast_map::Map, + map: ast_map::Map<'tcx>, freevars: freevars::freevar_map, capture_modes: freevars::CaptureModeMap, region_maps: middle::region::RegionMaps, @@ -3619,7 +3618,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { RvalueDpsExpr } - ast::ExprLit(lit) if lit_is_str(lit) => { + ast::ExprLit(ref lit) if lit_is_str(&**lit) => { RvalueDpsExpr } @@ -3668,7 +3667,7 @@ pub fn expr_kind(tcx: &ctxt, expr: &ast::Expr) -> ExprKind { RvalueDatumExpr } - ast::ExprBox(place, _) => { + ast::ExprBox(ref place, _) => { // Special case `Box`/`Gc` for now: let definition = match tcx.def_map.borrow().find(&place.id) { Some(&def) => def, @@ -3959,16 +3958,15 @@ pub fn provided_trait_methods(cx: &ctxt, id: ast::DefId) -> Vec> { Some(ast_map::NodeItem(item)) => { match item.node { ItemTrait(_, _, _, ref ms) => { - let (_, p) = ast_util::split_trait_methods(ms.as_slice()); - p.iter() - .map(|m| { - match impl_or_trait_item( - cx, - ast_util::local_def(m.id)) { - MethodTraitItem(m) => m, + ms.iter().filter_map(|m| match *m { + ast::RequiredMethod(_) => None, + ast::ProvidedMethod(ref m) => { + match impl_or_trait_item(cx, + ast_util::local_def(m.id)) { + MethodTraitItem(m) => Some(m), + } } - }) - .collect() + }).collect() } _ => { cx.sess.bug(format!("provided_trait_methods: `{}` is \ @@ -4289,11 +4287,11 @@ pub fn enum_variants(cx: &ctxt, id: ast::DefId) -> Rc>> { expr, since check_enum_variants also updates the enum_var_cache */ match cx.map.get(id.node) { - ast_map::NodeItem(item) => { + ast_map::NodeItem(ref item) => { match item.node { ast::ItemEnum(ref enum_definition, _) => { let mut last_discriminant: Option = None; - Rc::new(enum_definition.variants.iter().map(|&variant| { + Rc::new(enum_definition.variants.iter().map(|variant| { let mut discriminant = match last_discriminant { Some(val) => val + 1, @@ -4324,7 +4322,7 @@ pub fn enum_variants(cx: &ctxt, id: ast::DefId) -> Rc>> { }; last_discriminant = Some(discriminant); - Rc::new(VariantInfo::from_ast_variant(cx, &*variant, + Rc::new(VariantInfo::from_ast_variant(cx, &**variant, discriminant)) }).collect()) } diff --git a/src/librustc/middle/typeck/astconv.rs b/src/librustc/middle/typeck/astconv.rs index f5fa6168a415c..f2fe7fe628221 100644 --- a/src/librustc/middle/typeck/astconv.rs +++ b/src/librustc/middle/typeck/astconv.rs @@ -451,13 +451,10 @@ pub fn ast_ty_to_builtin_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( for inner_ast_type in path.segments .iter() .flat_map(|s| s.types.iter()) { - let mt = ast::MutTy { - ty: *inner_ast_type, - mutbl: ast::MutImmutable, - }; return Some(mk_pointer(this, rscope, - &mt, + ast::MutImmutable, + &**inner_ast_type, Uniq, |typ| ty::mk_uniq(this.tcx(), typ))); } @@ -478,13 +475,10 @@ pub fn ast_ty_to_builtin_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( for inner_ast_type in path.segments .iter() .flat_map(|s| s.types.iter()) { - let mt = ast::MutTy { - ty: *inner_ast_type, - mutbl: ast::MutImmutable, - }; return Some(mk_pointer(this, rscope, - &mt, + ast::MutImmutable, + &**inner_ast_type, Box, |typ| { match ty::get(typ).sty { @@ -578,14 +572,15 @@ pub fn trait_ref_for_unboxed_function<'tcx, AC: AstConv<'tcx>, fn mk_pointer<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( this: &AC, rscope: &RS, - a_seq_ty: &ast::MutTy, + a_seq_mutbl: ast::Mutability, + a_seq_ty: &ast::Ty, ptr_ty: PointerTy, constr: |ty::t| -> ty::t) -> ty::t { let tcx = this.tcx(); debug!("mk_pointer(ptr_ty={})", ptr_ty); - match a_seq_ty.ty.node { + match a_seq_ty.node { ast::TyVec(ref ty) => { let ty = ast_ty_to_ty(this, rscope, &**ty); return constr(ty::mk_vec(tcx, ty, None)); @@ -610,11 +605,11 @@ fn mk_pointer<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( RPtr(r) => { return ty::mk_rptr(this.tcx(), r, - ty::mt {mutbl: a_seq_ty.mutbl, ty: tr}); + ty::mt {mutbl: a_seq_mutbl, ty: tr}); } _ => { tcx.sess.span_err( - a_seq_ty.ty.span, + a_seq_ty.span, "~trait or &trait are the only supported \ forms of casting-to-trait"); return ty::mk_err(); @@ -671,7 +666,7 @@ fn mk_pointer<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( return ty::mk_uniq(tcx, tr); } RPtr(r) => { - return ty::mk_rptr(tcx, r, ty::mt{mutbl: a_seq_ty.mutbl, ty: tr}); + return ty::mk_rptr(tcx, r, ty::mt{mutbl: a_seq_mutbl, ty: tr}); } _ => { tcx.sess.span_err( @@ -688,7 +683,7 @@ fn mk_pointer<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( _ => {} } - constr(ast_ty_to_ty(this, rscope, &*a_seq_ty.ty)) + constr(ast_ty_to_ty(this, rscope, a_seq_ty)) } // Parses the programmer's textual representation of a type into our @@ -716,17 +711,16 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( match ast_ty.node { ast::TyNil => ty::mk_nil(), ast::TyBot => ty::mk_bot(), - ast::TyBox(ty) => { - let mt = ast::MutTy { ty: ty, mutbl: ast::MutImmutable }; - mk_pointer(this, rscope, &mt, Box, |ty| ty::mk_box(tcx, ty)) + ast::TyBox(ref ty) => { + mk_pointer(this, rscope, ast::MutImmutable, &**ty, Box, + |ty| ty::mk_box(tcx, ty)) } - ast::TyUniq(ty) => { - let mt = ast::MutTy { ty: ty, mutbl: ast::MutImmutable }; - mk_pointer(this, rscope, &mt, Uniq, + ast::TyUniq(ref ty) => { + mk_pointer(this, rscope, ast::MutImmutable, &**ty, Uniq, |ty| ty::mk_uniq(tcx, ty)) } - ast::TyVec(ty) => { - ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &*ty), None) + ast::TyVec(ref ty) => { + ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), None) } ast::TyPtr(ref mt) => { ty::mk_ptr(tcx, ty::mt { @@ -737,7 +731,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( ast::TyRptr(ref region, ref mt) => { let r = opt_ast_region_to_region(this, rscope, ast_ty.span, region); debug!("ty_rptr r={}", r.repr(this.tcx())); - mk_pointer(this, rscope, mt, RPtr(r), + mk_pointer(this, rscope, mt.mutbl, &*mt.ty, RPtr(r), |ty| ty::mk_rptr(tcx, r, ty::mt {ty: ty, mutbl: mt.mutbl})) } ast::TyTup(ref fields) => { @@ -870,15 +864,15 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( } } } - ast::TyFixedLengthVec(ty, e) => { - match const_eval::eval_const_expr_partial(tcx, &*e) { + ast::TyFixedLengthVec(ref ty, ref e) => { + match const_eval::eval_const_expr_partial(tcx, &**e) { Ok(ref r) => { match *r { const_eval::const_int(i) => - ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &*ty), + ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), Some(i as uint)), const_eval::const_uint(i) => - ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &*ty), + ty::mk_vec(tcx, ast_ty_to_ty(this, rscope, &**ty), Some(i as uint)), _ => { tcx.sess.span_fatal( @@ -895,7 +889,7 @@ pub fn ast_ty_to_ty<'tcx, AC: AstConv<'tcx>, RS: RegionScope>( } } } - ast::TyTypeof(_e) => { + ast::TyTypeof(ref _e) => { tcx.sess.span_bug(ast_ty.span, "typeof is reserved but unimplemented"); } ast::TyInfer => { @@ -925,7 +919,7 @@ pub fn ty_of_arg<'tcx, AC: AstConv<'tcx>, RS: RegionScope>(this: &AC, rscope: &R struct SelfInfo<'a> { untransformed_self_ty: ty::t, - explicit_self: ast::ExplicitSelf, + explicit_self: &'a ast::ExplicitSelf, } pub fn ty_of_method<'tcx, AC: AstConv<'tcx>>( @@ -933,7 +927,7 @@ pub fn ty_of_method<'tcx, AC: AstConv<'tcx>>( id: ast::NodeId, fn_style: ast::FnStyle, untransformed_self_ty: ty::t, - explicit_self: ast::ExplicitSelf, + explicit_self: &ast::ExplicitSelf, decl: &ast::FnDecl, abi: abi::Abi) -> (ty::BareFnTy, ty::ExplicitSelfCategory) { @@ -1087,8 +1081,8 @@ fn determine_explicit_self_category<'tcx, AC: AstConv<'tcx>, lifetime); ty::ByReferenceExplicitSelfCategory(region, mutability) } - ast::SelfExplicit(ast_type, _) => { - let explicit_type = ast_ty_to_ty(this, rscope, &*ast_type); + ast::SelfExplicit(ref ast_type, _) => { + let explicit_type = ast_ty_to_ty(this, rscope, &**ast_type); { let inference_context = infer::new_infer_ctxt(this.tcx()); diff --git a/src/librustc/middle/typeck/check/_match.rs b/src/librustc/middle/typeck/check/_match.rs index 247178770d21a..1602dfeaa280a 100644 --- a/src/librustc/middle/typeck/check/_match.rs +++ b/src/librustc/middle/typeck/check/_match.rs @@ -24,12 +24,12 @@ use middle::typeck::require_same_types; use util::ppaux; use std::collections::{HashMap, HashSet}; -use std::gc::Gc; use syntax::ast; use syntax::ast_util; use syntax::parse::token; use syntax::codemap::Span; use syntax::print::pprust; +use syntax::ptr::P; pub fn check_match(fcx: &FnCtxt, expr: &ast::Expr, @@ -66,17 +66,17 @@ pub fn check_match(fcx: &FnCtxt, let mut guard_err = false; let mut guard_bot = false; match arm.guard { - Some(ref e) => { - check_expr_has_type(fcx, &**e, ty::mk_bool()); - let e_ty = fcx.expr_ty(&**e); - if ty::type_is_error(e_ty) { - guard_err = true; - } - else if ty::type_is_bot(e_ty) { - guard_bot = true; - } - }, - None => () + Some(ref e) => { + check_expr_has_type(fcx, &**e, ty::mk_bool()); + let e_ty = fcx.expr_ty(&**e); + if ty::type_is_error(e_ty) { + guard_err = true; + } + else if ty::type_is_bot(e_ty) { + guard_bot = true; + } + }, + None => () } check_expr(fcx, &*arm.body); let bty = fcx.node_ty(arm.body.id); @@ -113,7 +113,7 @@ pub struct pat_ctxt<'a, 'tcx: 'a> { } pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path, - subpats: &Option>>, expected: ty::t) { + subpats: &Option>>, expected: ty::t) { // Typecheck the path. let fcx = pcx.fcx; @@ -457,7 +457,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { demand::suptype(fcx, pat.span, expected, const_pty.ty); fcx.write_ty(pat.id, const_pty.ty); } - ast::PatIdent(bm, ref path1, sub) if pat_is_binding(&tcx.def_map, pat) => { + ast::PatIdent(bm, ref path1, ref sub) if pat_is_binding(&tcx.def_map, pat) => { let typ = fcx.local_ty(pat.span, pat.id); match bm { @@ -491,9 +491,9 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { ppaux::ty_to_string(tcx, expected), pat.id); - match sub { - Some(ref p) => check_pat(pcx, &**p, expected), - _ => () + match *sub { + Some(ref p) => check_pat(pcx, &**p, expected), + _ => () } } // it's not a binding, it's an enum in disguise: @@ -624,14 +624,14 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { ast::PatRegion(ref inner) => { check_pointer_pat(pcx, Borrowed, &**inner, pat.id, pat.span, expected); } - ast::PatVec(ref before, slice, ref after) => { + ast::PatVec(ref before, ref slice, ref after) => { let default_region_var = fcx.infcx().next_region_var( infer::PatternRegion(pat.span)); let check_err = |found: String| { - for &elt in before.iter() { - check_pat(pcx, &*elt, ty::mk_err()); + for elt in before.iter() { + check_pat(pcx, &**elt, ty::mk_err()); } for elt in slice.iter() { check_pat(pcx, &**elt, ty::mk_err()); @@ -690,7 +690,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { }; let min_len = before.len() + after.len(); - fixed.and_then(|count| match slice { + fixed.and_then(|count| match *slice { Some(_) if count < min_len => Some(format!("a fixed vector pattern of size at least {}", min_len)), @@ -703,7 +703,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { for elt in before.iter() { check_pat(pcx, &**elt, elt_type); } - match slice { + match *slice { Some(ref slice_pat) => { let slice_ty = ty::mk_slice(tcx, region_var, diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 47a4b6f86229e..8ac9f072a0b32 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -124,7 +124,7 @@ use std::cell::{Cell, RefCell}; use std::collections::HashMap; use std::mem::replace; use std::rc::Rc; -use std::gc::Gc; +use std::slice; use syntax::abi; use syntax::ast::{ProvidedMethod, RequiredMethod}; use syntax::ast; @@ -137,6 +137,7 @@ use syntax::codemap; use syntax::owned_slice::OwnedSlice; use syntax::parse::token; use syntax::print::pprust; +use syntax::ptr::P; use syntax::visit; use syntax::visit::Visitor; use syntax; @@ -401,7 +402,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckItemSizedTypesVisitor<'a, 'tcx> { } } -pub fn check_item_types(ccx: &CrateCtxt, krate: &ast::Crate) { +pub fn check_item_types(ccx: &CrateCtxt) { + let krate = ccx.tcx.map.krate(); + let mut visit = CheckTypeWellFormedVisitor { ccx: ccx }; visit::walk_crate(&mut visit, krate); @@ -624,7 +627,7 @@ fn span_for_field(tcx: &ty::ctxt, field: &ty::field_ty, struct_id: ast::DefId) - }; match item.node { - ast::ItemStruct(struct_def, _) => { + ast::ItemStruct(ref struct_def, _) => { match struct_def.fields.iter().find(|f| match f.node.kind { ast::NamedField(ident, _) => ident.name == field.name, _ => false, @@ -818,8 +821,8 @@ pub fn check_item(ccx: &CrateCtxt, it: &ast::Item) { let impl_pty = ty::lookup_item_type(ccx.tcx, ast_util::local_def(it.id)); for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(m) => { - check_method_body(ccx, &impl_pty.generics, &*m); + ast::MethodImplItem(ref m) => { + check_method_body(ccx, &impl_pty.generics, &**m); } } } @@ -841,14 +844,14 @@ pub fn check_item(ccx: &CrateCtxt, it: &ast::Item) { } ast::ItemTrait(_, _, _, ref trait_methods) => { let trait_def = ty::lookup_trait_def(ccx.tcx, local_def(it.id)); - for trait_method in (*trait_methods).iter() { + for trait_method in trait_methods.iter() { match *trait_method { RequiredMethod(..) => { // Nothing to do, since required methods don't have // bodies to check. } - ProvidedMethod(m) => { - check_method_body(ccx, &trait_def.generics, &*m); + ProvidedMethod(ref m) => { + check_method_body(ccx, &trait_def.generics, &**m); } } } @@ -930,7 +933,7 @@ fn check_impl_items_against_trait(ccx: &CrateCtxt, // and compatible with trait signature for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(impl_method) => { + ast::MethodImplItem(ref impl_method) => { let impl_method_def_id = local_def(impl_method.id); let impl_item_ty = ty::impl_or_trait_item(ccx.tcx, impl_method_def_id); @@ -983,7 +986,7 @@ fn check_impl_items_against_trait(ccx: &CrateCtxt, let is_implemented = impl_items.iter().any(|ii| { match *ii { - ast::MethodImplItem(m) => { + ast::MethodImplItem(ref m) => { m.pe_ident().name == trait_method.ident.name } } @@ -1968,9 +1971,9 @@ pub fn autoderef(fcx: &FnCtxt, sp: Span, base_ty: ty::t, /// Attempts to resolve a call expression as an overloaded call. fn try_overloaded_call(fcx: &FnCtxt, call_expression: &ast::Expr, - callee: Gc, + callee: &ast::Expr, callee_type: ty::t, - args: &[Gc]) + args: &[P]) -> bool { // Bail out if the callee is a bare function or a closure. We check those // manually. @@ -2072,9 +2075,9 @@ fn try_overloaded_deref(fcx: &FnCtxt, fn try_overloaded_index(fcx: &FnCtxt, method_call: Option, expr: &ast::Expr, - base_expr: Gc, + base_expr: &ast::Expr, base_ty: ty::t, - index_expr: Gc, + index_expr: &P, lvalue_pref: LvaluePreference) -> Option { // Try `IndexMut` first, if preferred. @@ -2119,7 +2122,7 @@ fn try_overloaded_index(fcx: &FnCtxt, expr.span, method_type, expr, - [base_expr, index_expr], + slice::ref_slice(index_expr), DoDerefArgs, DontTupleArguments); @@ -2145,7 +2148,7 @@ fn try_overloaded_index(fcx: &FnCtxt, /// The return type of this function represents the concrete element type /// `A` in the type `Iterator` that the method returns. fn lookup_method_for_for_loop(fcx: &FnCtxt, - iterator_expr: Gc, + iterator_expr: &ast::Expr, loop_id: ast::NodeId) -> ty::t { let trait_did = match fcx.tcx().lang_items.require(IteratorItem) { @@ -2188,8 +2191,8 @@ fn lookup_method_for_for_loop(fcx: &FnCtxt, let return_type = check_method_argument_types(fcx, iterator_expr.span, method_type, - &*iterator_expr, - [iterator_expr], + iterator_expr, + &[], DontDerefArgs, DontTupleArguments); @@ -2224,23 +2227,17 @@ fn check_method_argument_types(fcx: &FnCtxt, sp: Span, method_fn_ty: ty::t, callee_expr: &ast::Expr, - args: &[Gc], + args_no_rcvr: &[P], deref_args: DerefArgs, tuple_arguments: TupleArgumentsFlag) -> ty::t { - // HACK(eddyb) ignore provided self (it has special typeck rules). - let args = if tuple_arguments == DontTupleArguments { - args.slice_from(1) - } else { - args - }; if ty::type_is_error(method_fn_ty) { - let err_inputs = err_args(args.len()); + let err_inputs = err_args(args_no_rcvr.len()); check_argument_types(fcx, sp, err_inputs.as_slice(), callee_expr, - args, + args_no_rcvr, deref_args, false, tuple_arguments); @@ -2253,7 +2250,7 @@ fn check_method_argument_types(fcx: &FnCtxt, sp, fty.sig.inputs.slice_from(1), callee_expr, - args, + args_no_rcvr, deref_args, fty.sig.variadic, tuple_arguments); @@ -2271,7 +2268,7 @@ fn check_argument_types(fcx: &FnCtxt, sp: Span, fn_inputs: &[ty::t], callee_expr: &ast::Expr, - args: &[Gc], + args: &[P], deref_args: DerefArgs, variadic: bool, tuple_arguments: TupleArgumentsFlag) { @@ -2665,7 +2662,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fn check_call(fcx: &FnCtxt, call_expr: &ast::Expr, f: &ast::Expr, - args: &[Gc]) { + args: &[P]) { // Store the type of `f` as the type of the callee let fn_ty = fcx.expr_ty(f); @@ -2716,9 +2713,9 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fn check_method_call(fcx: &FnCtxt, expr: &ast::Expr, method_name: ast::SpannedIdent, - args: &[Gc], - tps: &[ast::P]) { - let rcvr = args[0].clone(); + args: &[P], + tps: &[P]) { + let rcvr = &*args[0]; // We can't know if we need &mut self before we look up the method, // so treat the receiver as mutable just in case - only explicit // overloaded dereferences care about the distinction. @@ -2779,7 +2776,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, method_name.span, fn_ty, expr, - args, + args.slice_from(1), DontDerefArgs, DontTupleArguments); @@ -2791,7 +2788,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fn check_then_else(fcx: &FnCtxt, cond_expr: &ast::Expr, then_blk: &ast::Block, - opt_else_expr: Option>, + opt_else_expr: Option<&ast::Expr>, id: ast::NodeId, sp: Span, expected: Expectation) { @@ -2852,22 +2849,31 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fcx.write_ty(id, if_ty); } - fn lookup_op_method(fcx: &FnCtxt, - op_ex: &ast::Expr, - self_t: ty::t, - opname: ast::Name, - trait_did: Option, - args: &[Gc], - autoderef_receiver: AutoderefReceiverFlag, - unbound_method: ||) -> ty::t { + fn lookup_op_method<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, + op_ex: &ast::Expr, + lhs_ty: ty::t, + opname: ast::Name, + trait_did: Option, + lhs: &'a ast::Expr, + rhs: Option<&P>, + autoderef_receiver: AutoderefReceiverFlag, + unbound_method: ||) -> ty::t { let method = match trait_did { Some(trait_did) => { - method::lookup_in_trait(fcx, op_ex.span, Some(&*args[0]), opname, - trait_did, self_t, [], autoderef_receiver, + method::lookup_in_trait(fcx, op_ex.span, Some(lhs), opname, + trait_did, lhs_ty, &[], autoderef_receiver, IgnoreStaticMethods) } None => None }; + let args = match rhs { + Some(rhs) => slice::ref_slice(rhs), + None => { + // Work around the lack of coercion. + let empty: &[_] = &[]; + empty + } + }; match method { Some(method) => { let method_ty = method.ty; @@ -2903,8 +2909,8 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fn check_binop(fcx: &FnCtxt, expr: &ast::Expr, op: ast::BinOp, - lhs: Gc, - rhs: Gc, + lhs: &ast::Expr, + rhs: &P, is_binop_assignment: IsBinopAssignment) { let tcx = fcx.ccx.tcx; @@ -2920,7 +2926,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, if ty::type_is_integral(lhs_t) && ast_util::is_shift_binop(op) { // Shift is a special case: rhs must be uint, no matter what lhs is - check_expr_has_type(fcx, &*rhs, ty::mk_uint()); + check_expr_has_type(fcx, &**rhs, ty::mk_uint()); fcx.write_ty(expr.id, lhs_t); return; } @@ -2928,7 +2934,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, if ty::is_binopable(tcx, lhs_t, op) { let tvar = fcx.infcx().next_ty_var(); demand::suptype(fcx, expr.span, tvar, lhs_t); - check_expr_has_type(fcx, &*rhs, tvar); + check_expr_has_type(fcx, &**rhs, tvar); let result_t = match op { ast::BiEq | ast::BiNe | ast::BiLt | ast::BiLe | ast::BiGe | @@ -2993,7 +2999,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, }, lhs_t, None); - check_expr(fcx, &*rhs); + check_expr(fcx, &**rhs); ty::mk_err() }; @@ -3005,10 +3011,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fn check_user_binop(fcx: &FnCtxt, ex: &ast::Expr, - lhs_expr: Gc, + lhs_expr: &ast::Expr, lhs_resolved_t: ty::t, op: ast::BinOp, - rhs: Gc) -> ty::t { + rhs: &P) -> ty::t { let tcx = fcx.ccx.tcx; let lang = &tcx.lang_items; let (name, trait_did) = match op { @@ -3029,12 +3035,12 @@ fn check_expr_with_unifier(fcx: &FnCtxt, ast::BiEq => ("eq", lang.eq_trait()), ast::BiNe => ("ne", lang.eq_trait()), ast::BiAnd | ast::BiOr => { - check_expr(fcx, &*rhs); + check_expr(fcx, &**rhs); return ty::mk_err(); } }; lookup_op_method(fcx, ex, lhs_resolved_t, token::intern(name), - trait_did, [lhs_expr, rhs], DontAutoderefReceiver, || { + trait_did, lhs_expr, Some(rhs), DontAutoderefReceiver, || { fcx.type_error_message(ex.span, |actual| { format!("binary operation `{}` cannot be applied to type `{}`", ast_util::binop_to_string(op), @@ -3048,10 +3054,10 @@ fn check_expr_with_unifier(fcx: &FnCtxt, mname: &str, trait_did: Option, ex: &ast::Expr, - rhs_expr: Gc, + rhs_expr: &ast::Expr, rhs_t: ty::t) -> ty::t { lookup_op_method(fcx, ex, rhs_t, token::intern(mname), - trait_did, [rhs_expr], DontAutoderefReceiver, || { + trait_did, rhs_expr, None, DontAutoderefReceiver, || { fcx.type_error_message(ex.span, |actual| { format!("cannot apply unary operator `{}` to type `{}`", op_str, actual) @@ -3063,7 +3069,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, expr: &ast::Expr, kind: ast::UnboxedClosureKind, decl: &ast::FnDecl, - body: ast::P) { + body: &ast::Block) { let mut fn_ty = astconv::ty_of_closure( fcx, expr.id, @@ -3131,7 +3137,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, expr: &ast::Expr, store: ty::TraitStore, decl: &ast::FnDecl, - body: ast::P, + body: &ast::Block, expected: Expectation) { let tcx = fcx.ccx.tcx; @@ -3228,7 +3234,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, lvalue_pref: LvaluePreference, base: &ast::Expr, field: &ast::SpannedIdent, - tys: &[ast::P]) { + tys: &[P]) { let tcx = fcx.ccx.tcx; check_expr_with_lvalue_pref(fcx, base, lvalue_pref); let expr_t = structurally_resolved_type(fcx, expr.span, @@ -3302,7 +3308,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, lvalue_pref: LvaluePreference, base: &ast::Expr, idx: codemap::Spanned, - _tys: &[ast::P]) { + _tys: &[P]) { let tcx = fcx.ccx.tcx; check_expr_with_lvalue_pref(fcx, base, lvalue_pref); let expr_t = structurally_resolved_type(fcx, expr.span, @@ -3453,7 +3459,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, span: codemap::Span, class_id: ast::DefId, fields: &[ast::Field], - base_expr: Option>) { + base_expr: Option<&ast::Expr>) { let tcx = fcx.ccx.tcx; // Look up the number of type parameters and the raw type, and @@ -3527,14 +3533,14 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fn check_struct_fields_on_error(fcx: &FnCtxt, id: ast::NodeId, fields: &[ast::Field], - base_expr: Option>) { + base_expr: &Option>) { // Make sure to still write the types // otherwise we might ICE fcx.write_error(id); for field in fields.iter() { check_expr(fcx, &*field.expr); } - match base_expr { + match *base_expr { Some(ref base) => check_expr(fcx, &**base), None => {} } @@ -3578,12 +3584,12 @@ fn check_expr_with_unifier(fcx: &FnCtxt, } } - ast::ExprLit(lit) => { - let typ = check_lit(fcx, &*lit, expected); + ast::ExprLit(ref lit) => { + let typ = check_lit(fcx, &**lit, expected); fcx.write_ty(id, typ); } ast::ExprBinary(op, ref lhs, ref rhs) => { - check_binop(fcx, expr, op, lhs.clone(), rhs.clone(), SimpleBinop); + check_binop(fcx, expr, op, &**lhs, rhs, SimpleBinop); let lhs_ty = fcx.expr_ty(&**lhs); let rhs_ty = fcx.expr_ty(&**rhs); @@ -3597,7 +3603,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, } } ast::ExprAssignOp(op, ref lhs, ref rhs) => { - check_binop(fcx, expr, op, lhs.clone(), rhs.clone(), BinopAssignment); + check_binop(fcx, expr, op, &**lhs, rhs, BinopAssignment); let lhs_t = fcx.expr_ty(&**lhs); let result_t = fcx.expr_ty(expr); @@ -3691,7 +3697,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, ty::get(oprnd_t).sty == ty::ty_bool) { oprnd_t = check_user_unop(fcx, "!", "not", tcx.lang_items.not_trait(), - expr, oprnd.clone(), oprnd_t); + expr, &**oprnd, oprnd_t); } } ast::UnNeg => { @@ -3701,7 +3707,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, ty::type_is_fp(oprnd_t)) { oprnd_t = check_user_unop(fcx, "-", "neg", tcx.lang_items.neg_trait(), - expr, oprnd.clone(), oprnd_t); + expr, &**oprnd, oprnd_t); } } } @@ -3802,12 +3808,12 @@ fn check_expr_with_unifier(fcx: &FnCtxt, } fcx.write_bot(id); } - ast::ExprParen(a) => { + ast::ExprParen(ref a) => { check_expr_with_expectation_and_lvalue_pref(fcx, - &*a, + &**a, expected, lvalue_pref); - fcx.write_ty(id, fcx.expr_ty(&*a)); + fcx.write_ty(id, fcx.expr_ty(&**a)); } ast::ExprAssign(ref lhs, ref rhs) => { check_expr_with_lvalue_pref(fcx, &**lhs, PreferMutLvalue); @@ -3831,7 +3837,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, } } ast::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => { - check_then_else(fcx, &**cond, &**then_blk, opt_else_expr.clone(), + check_then_else(fcx, &**cond, &**then_blk, opt_else_expr.as_ref().map(|e| &**e), id, expr.span, expected); } ast::ExprWhile(ref cond, ref body, _) => { @@ -3851,7 +3857,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, } ast::ExprForLoop(ref pat, ref head, ref block, _) => { check_expr(fcx, &**head); - let typ = lookup_method_for_for_loop(fcx, *head, expr.id); + let typ = lookup_method_for_for_loop(fcx, &**head, expr.id); vtable::early_resolve_expr(expr, fcx, true); let pcx = pat_ctxt { @@ -3865,10 +3871,9 @@ fn check_expr_with_unifier(fcx: &FnCtxt, } ast::ExprLoop(ref body, _) => { check_block_no_value(fcx, &**body); - if !may_break(tcx, expr.id, body.clone()) { + if !may_break(tcx, expr.id, &**body) { fcx.write_bot(id); - } - else { + } else { fcx.write_nil(id); } } @@ -3884,7 +3889,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, expr, ty::RegionTraitStore(region, ast::MutMutable), &**decl, - body.clone(), + &**body, expected); } ast::ExprUnboxedFn(_, kind, ref decl, ref body) => { @@ -3892,14 +3897,14 @@ fn check_expr_with_unifier(fcx: &FnCtxt, expr, kind, &**decl, - *body); + &**body); } ast::ExprProc(ref decl, ref body) => { check_expr_fn(fcx, expr, ty::UniqTraitStore, &**decl, - body.clone(), + &**body, expected); } ast::ExprBlock(ref b) => { @@ -3912,7 +3917,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, check_expr(fcx, &**f); let f_ty = fcx.expr_ty(&**f); - if !try_overloaded_call(fcx, expr, f.clone(), f_ty, args.as_slice()) { + if !try_overloaded_call(fcx, expr, &**f, f_ty, args.as_slice()) { check_call(fcx, expr, &**f, args.as_slice()); let (args_bot, args_err) = args.iter().fold((false, false), |(rest_bot, rest_err), a| { @@ -4050,7 +4055,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, fcx.write_ty(id, typ); } } - ast::ExprStruct(ref path, ref fields, base_expr) => { + ast::ExprStruct(ref path, ref fields, ref base_expr) => { // Resolve the path. let def = tcx.def_map.borrow().find(&id).map(|i| *i); let struct_id = match def { @@ -4079,7 +4084,7 @@ fn check_expr_with_unifier(fcx: &FnCtxt, expr.span, struct_did, fields.as_slice(), - base_expr); + base_expr.as_ref().map(|e| &**e)); } _ => { span_err!(tcx.sess, path.span, E0071, @@ -4168,9 +4173,9 @@ fn check_expr_with_unifier(fcx: &FnCtxt, match try_overloaded_index(fcx, Some(method_call), expr, - *base, + &**base, base_t, - *idx, + idx, lvalue_pref) { Some(mt) => fcx.write_ty(id, mt.ty), None => { @@ -4331,7 +4336,7 @@ pub fn check_stmt(fcx: &FnCtxt, stmt: &ast::Stmt) { let mut saw_bot = false; let mut saw_err = false; match stmt.node { - ast::StmtDecl(decl, id) => { + ast::StmtDecl(ref decl, id) => { node_id = id; match decl.node { ast::DeclLocal(ref l) => { @@ -4404,7 +4409,7 @@ fn check_block_with_expected(fcx: &FnCtxt, let s_id = ast_util::stmt_id(&**s); let s_ty = fcx.node_ty(s_id); if last_was_bot && !warned && match s.node { - ast::StmtDecl(decl, _) => { + ast::StmtDecl(ref decl, _) => { match decl.node { ast::DeclLocal(_) => true, _ => false, @@ -4431,14 +4436,12 @@ fn check_block_with_expected(fcx: &FnCtxt, match blk.expr { None => if any_err { fcx.write_error(blk.id); - } - else if any_bot { + } else if any_bot { fcx.write_bot(blk.id); - } - else { + } else { fcx.write_nil(blk.id); }, - Some(e) => { + Some(ref e) => { if any_bot && !warned { fcx.ccx .tcx @@ -4450,12 +4453,12 @@ fn check_block_with_expected(fcx: &FnCtxt, } let ety = match expected { ExpectHasType(ety) => { - check_expr_coercable_to_type(fcx, &*e, ety); + check_expr_coercable_to_type(fcx, &**e, ety); ety } _ => { - check_expr_with_expectation(fcx, &*e, expected); - fcx.expr_ty(&*e) + check_expr_with_expectation(fcx, &**e, expected); + fcx.expr_ty(&**e) } }; @@ -4603,8 +4606,8 @@ pub fn check_simd(tcx: &ty::ctxt, sp: Span, id: ast::NodeId) { pub fn check_enum_variants_sized(ccx: &CrateCtxt, - vs: &[ast::P]) { - for &v in vs.iter() { + vs: &[P]) { + for v in vs.iter() { match v.node.kind { ast::TupleVariantKind(ref args) if args.len() > 0 => { let ctor_ty = ty::node_id_to_type(ccx.tcx, v.node.id); @@ -4626,7 +4629,9 @@ pub fn check_enum_variants_sized(ccx: &CrateCtxt, } } }, - ast::StructVariantKind(struct_def) => check_fields_sized(ccx.tcx, &*struct_def), + ast::StructVariantKind(ref struct_def) => { + check_fields_sized(ccx.tcx, &**struct_def) + } _ => {} } } @@ -4634,7 +4639,7 @@ pub fn check_enum_variants_sized(ccx: &CrateCtxt, pub fn check_enum_variants(ccx: &CrateCtxt, sp: Span, - vs: &[ast::P], + vs: &[P], id: ast::NodeId) { fn disr_in_range(ccx: &CrateCtxt, @@ -4665,7 +4670,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt, } fn do_check(ccx: &CrateCtxt, - vs: &[ast::P], + vs: &[P], id: ast::NodeId, hint: attr::ReprAttr) -> Vec> { @@ -4675,7 +4680,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt, let mut disr_vals: Vec = Vec::new(); let mut prev_disr_val: Option = None; - for &v in vs.iter() { + for v in vs.iter() { // If the discriminant value is specified explicitly in the enum check whether the // initialization expression is valid, otherwise use the last value plus one. @@ -4685,8 +4690,8 @@ pub fn check_enum_variants(ccx: &CrateCtxt, }; match v.node.disr_expr { - Some(e) => { - debug!("disr expr, checking {}", pprust::expr_to_string(&*e)); + Some(ref e) => { + debug!("disr expr, checking {}", pprust::expr_to_string(&**e)); let inh = static_inherited_fields(ccx); let fcx = blank_fn_ctxt(ccx, &inh, rty, e.id); @@ -4699,12 +4704,12 @@ pub fn check_enum_variants(ccx: &CrateCtxt, ty::mk_mach_uint(ity) }, }; - check_const_with_ty(&fcx, e.span, &*e, declty); + check_const_with_ty(&fcx, e.span, &**e, declty); // check_expr (from check_const pass) doesn't guarantee // that the expression is in a form that eval_const_expr can // handle, so we may still get an internal compiler error - match const_eval::eval_const_expr_partial(ccx.tcx, &*e) { + match const_eval::eval_const_expr_partial(ccx.tcx, &**e) { Ok(const_eval::const_int(val)) => current_disr_val = val as Disr, Ok(const_eval::const_uint(val)) => current_disr_val = val as Disr, Ok(_) => { @@ -4742,7 +4747,7 @@ pub fn check_enum_variants(ccx: &CrateCtxt, } disr_vals.push(current_disr_val); - let variant_info = Rc::new(VariantInfo::from_ast_variant(ccx.tcx, &*v, + let variant_info = Rc::new(VariantInfo::from_ast_variant(ccx.tcx, &**v, current_disr_val)); prev_disr_val = Some(current_disr_val); @@ -5051,8 +5056,8 @@ pub fn instantiate_path(fcx: &FnCtxt, { let type_count = type_defs.len(space); assert_eq!(substs.types.len(space), 0); - for (i, &typ) in segment.types.iter().enumerate() { - let t = fcx.to_ty(&*typ); + for (i, typ) in segment.types.iter().enumerate() { + let t = fcx.to_ty(&**typ); if i < type_count { substs.types.push(space, t); } else if i == type_count { @@ -5256,7 +5261,7 @@ pub fn type_is_c_like_enum(fcx: &FnCtxt, sp: Span, typ: ty::t) -> bool { } // Returns true if b contains a break that can exit from b -pub fn may_break(cx: &ty::ctxt, id: ast::NodeId, b: ast::P) -> bool { +pub fn may_break(cx: &ty::ctxt, id: ast::NodeId, b: &ast::Block) -> bool { // First: is there an unlabeled break immediately // inside the loop? (loop_query(&*b, |e| { diff --git a/src/librustc/middle/typeck/check/regionck.rs b/src/librustc/middle/typeck/check/regionck.rs index 843d6a582eac3..95b7e03e6d9aa 100644 --- a/src/librustc/middle/typeck/check/regionck.rs +++ b/src/librustc/middle/typeck/check/regionck.rs @@ -141,7 +141,6 @@ use syntax::visit; use syntax::visit::Visitor; use std::cell::RefCell; -use std::gc::Gc; /////////////////////////////////////////////////////////////////////////// // PUBLIC ENTRY POINTS @@ -614,23 +613,20 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { match expr.node { ast::ExprCall(ref callee, ref args) => { if has_method_map { - constrain_call(rcx, expr, Some(*callee), - args.as_slice(), false); + constrain_call(rcx, expr, Some(&**callee), + args.iter().map(|e| &**e), false); } else { constrain_callee(rcx, callee.id, expr, &**callee); - constrain_call(rcx, - expr, - None, - args.as_slice(), - false); + constrain_call(rcx, expr, None, + args.iter().map(|e| &**e), false); } visit::walk_expr(rcx, expr); } ast::ExprMethodCall(_, _, ref args) => { - constrain_call(rcx, expr, Some(*args.get(0)), - args.slice_from(1), false); + constrain_call(rcx, expr, Some(&**args.get(0)), + args.slice_from(1).iter().map(|e| &**e), false); visit::walk_expr(rcx, expr); } @@ -642,8 +638,8 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { ast::ExprAssignOp(_, ref lhs, ref rhs) => { if has_method_map { - constrain_call(rcx, expr, Some(lhs.clone()), - [rhs.clone()], true); + constrain_call(rcx, expr, Some(&**lhs), + Some(&**rhs).move_iter(), true); } adjust_borrow_kind_for_assignment_lhs(rcx, &**lhs); @@ -657,15 +653,16 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { // overloaded op. Note that we (sadly) currently use an // implicit "by ref" sort of passing style here. This // should be converted to an adjustment! - constrain_call(rcx, expr, Some(lhs.clone()), - [rhs.clone()], true); + constrain_call(rcx, expr, Some(&**lhs), + Some(&**rhs).move_iter(), true); visit::walk_expr(rcx, expr); } ast::ExprUnary(_, ref lhs) if has_method_map => { // As above. - constrain_call(rcx, expr, Some(lhs.clone()), [], true); + constrain_call(rcx, expr, Some(&**lhs), + None::.iter(), true); visit::walk_expr(rcx, expr); } @@ -683,7 +680,8 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { let method_call = MethodCall::expr(expr.id); let base_ty = match rcx.fcx.inh.method_map.borrow().find(&method_call) { Some(method) => { - constrain_call(rcx, expr, Some(base.clone()), [], true); + constrain_call(rcx, expr, Some(&**base), + None::.iter(), true); ty::ty_fn_ret(method.ty) } None => rcx.resolve_node_type(base.id) @@ -1080,11 +1078,11 @@ fn constrain_callee(rcx: &mut Rcx, } } -fn constrain_call(rcx: &mut Rcx, - call_expr: &ast::Expr, - receiver: Option>, - arg_exprs: &[Gc], - implicitly_ref_args: bool) { +fn constrain_call<'a, I: Iterator<&'a ast::Expr>>(rcx: &mut Rcx, + call_expr: &ast::Expr, + receiver: Option<&ast::Expr>, + mut arg_exprs: I, + implicitly_ref_args: bool) { //! Invoked on every call site (i.e., normal calls, method calls, //! and overloaded operators). Constrains the regions which appear //! in the type of the function. Also constrains the regions that @@ -1093,11 +1091,9 @@ fn constrain_call(rcx: &mut Rcx, let tcx = rcx.fcx.tcx(); debug!("constrain_call(call_expr={}, \ receiver={}, \ - arg_exprs={}, \ implicitly_ref_args={:?})", call_expr.repr(tcx), receiver.repr(tcx), - arg_exprs.repr(tcx), implicitly_ref_args); // `callee_region` is the scope representing the time in which the @@ -1109,7 +1105,7 @@ fn constrain_call(rcx: &mut Rcx, debug!("callee_region={}", callee_region.repr(tcx)); - for arg_expr in arg_exprs.iter() { + for arg_expr in arg_exprs { debug!("Argument: {}", arg_expr.repr(tcx)); // ensure that any regions appearing in the argument type are @@ -1123,7 +1119,7 @@ fn constrain_call(rcx: &mut Rcx, // result. modes are going away and the "DerefArgs" code // should be ported to use adjustments if implicitly_ref_args { - link_by_ref(rcx, &**arg_expr, callee_scope); + link_by_ref(rcx, arg_expr, callee_scope); } } @@ -1292,10 +1288,10 @@ fn link_local(rcx: &Rcx, local: &ast::Local) { debug!("regionck::for_local()"); let init_expr = match local.init { None => { return; } - Some(ref expr) => expr, + Some(ref expr) => &**expr, }; let mc = mc::MemCategorizationContext::new(rcx); - let discr_cmt = ignore_err!(mc.cat_expr(&**init_expr)); + let discr_cmt = ignore_err!(mc.cat_expr(init_expr)); link_pattern(rcx, mc, discr_cmt, &*local.pat); } diff --git a/src/librustc/middle/typeck/coherence.rs b/src/librustc/middle/typeck/coherence.rs index ff3372b307260..8de17627e2825 100644 --- a/src/librustc/middle/typeck/coherence.rs +++ b/src/librustc/middle/typeck/coherence.rs @@ -597,7 +597,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { ast_items.iter() .map(|ast_item| { match *ast_item { - ast::MethodImplItem(ast_method) => { + ast::MethodImplItem(ref ast_method) => { MethodTraitItemId( local_def(ast_method.id)) } @@ -820,9 +820,9 @@ fn subst_receiver_types_in_method_ty(tcx: &ty::ctxt, ) } -pub fn check_coherence(crate_context: &CrateCtxt, krate: &Crate) { +pub fn check_coherence(crate_context: &CrateCtxt) { CoherenceChecker { crate_context: crate_context, inference_context: new_infer_ctxt(crate_context.tcx), - }.check(krate); + }.check(crate_context.tcx.map.krate()); } diff --git a/src/librustc/middle/typeck/collect.rs b/src/librustc/middle/typeck/collect.rs index 20e76b01317b8..581bd8acbc97e 100644 --- a/src/librustc/middle/typeck/collect.rs +++ b/src/librustc/middle/typeck/collect.rs @@ -53,22 +53,22 @@ use util::ppaux::{Repr,UserString}; use std::collections::{HashMap, HashSet}; use std::rc::Rc; -use std::gc::Gc; use syntax::abi; use syntax::ast; use syntax::ast_map; -use syntax::ast_util::{local_def, split_trait_methods, PostExpansionMethod}; +use syntax::ast_util::{local_def, PostExpansionMethod}; use syntax::codemap::Span; use syntax::parse::token::{special_idents}; use syntax::parse::token; use syntax::print::pprust::{path_to_string}; +use syntax::ptr::P; use syntax::visit; /////////////////////////////////////////////////////////////////////////// // Main entry point -pub fn collect_item_types(ccx: &CrateCtxt, krate: &ast::Crate) { +pub fn collect_item_types(ccx: &CrateCtxt) { fn collect_intrinsic_type(ccx: &CrateCtxt, lang_item: ast::DefId) { let ty::Polytype { ty: ty, .. } = @@ -84,10 +84,10 @@ pub fn collect_item_types(ccx: &CrateCtxt, krate: &ast::Crate) { } let mut visitor = CollectTraitDefVisitor{ ccx: ccx }; - visit::walk_crate(&mut visitor, krate); + visit::walk_crate(&mut visitor, ccx.tcx.map.krate()); let mut visitor = CollectItemTypesVisitor{ ccx: ccx }; - visit::walk_crate(&mut visitor, krate); + visit::walk_crate(&mut visitor, ccx.tcx.map.krate()); } /////////////////////////////////////////////////////////////////////////// @@ -179,7 +179,7 @@ impl<'a, 'tcx> AstConv<'tcx> for CrateCtxt<'a, 'tcx> { pub fn get_enum_variant_types(ccx: &CrateCtxt, enum_ty: ty::t, - variants: &[ast::P], + variants: &[P], generics: &ast::Generics) { let tcx = ccx.tcx; @@ -199,13 +199,13 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt, enum_ty } - ast::StructVariantKind(struct_def) => { + ast::StructVariantKind(ref struct_def) => { let pty = Polytype { generics: ty_generics_for_type(ccx, generics), ty: enum_ty }; - convert_struct(ccx, &*struct_def, pty, variant.node.id); + convert_struct(ccx, &**struct_def, pty, variant.node.id); let input_tys: Vec<_> = struct_def.fields.iter().map( |f| ty::node_id_to_type(ccx.tcx, f.node.id)).collect(); @@ -332,7 +332,7 @@ fn collect_trait_methods(ccx: &CrateCtxt, *m_id, *m_fn_style, trait_self_ty, - *m_explicit_self, + m_explicit_self, m_decl, m_abi); let ty_generics = @@ -386,13 +386,12 @@ pub fn convert_field(ccx: &CrateCtxt, } } -fn convert_methods(ccx: &CrateCtxt, - container: ImplOrTraitItemContainer, - ms: &[Gc], - untransformed_rcvr_ty: ty::t, - rcvr_ty_generics: &ty::Generics, - rcvr_visibility: ast::Visibility) -{ +fn convert_methods<'a, I: Iterator<&'a ast::Method>>(ccx: &CrateCtxt, + container: ImplOrTraitItemContainer, + mut ms: I, + untransformed_rcvr_ty: ty::t, + rcvr_ty_generics: &ty::Generics, + rcvr_visibility: ast::Visibility) { debug!("convert_methods(untransformed_rcvr_ty={}, \ rcvr_ty_generics={})", untransformed_rcvr_ty.repr(ccx.tcx), @@ -400,14 +399,14 @@ fn convert_methods(ccx: &CrateCtxt, let tcx = ccx.tcx; let mut seen_methods = HashSet::new(); - for m in ms.iter() { + for m in ms { if !seen_methods.insert(m.pe_ident().repr(ccx.tcx)) { tcx.sess.span_err(m.span, "duplicate method in trait impl"); } let mty = Rc::new(ty_of_method(ccx, container, - &**m, + m, untransformed_rcvr_ty, rcvr_ty_generics, rcvr_visibility)); @@ -459,7 +458,7 @@ fn convert_methods(ccx: &CrateCtxt, m.id, m.pe_fn_style(), untransformed_rcvr_ty, - *m.pe_explicit_self(), + m.pe_explicit_self(), &*m.pe_fn_decl(), real_abi); @@ -524,10 +523,10 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { }, ast::ItemImpl(ref generics, ref opt_trait_ref, - selfty, + ref selfty, ref impl_items) => { let ty_generics = ty_generics_for_type(ccx, generics); - let selfty = ccx.to_ty(&ExplicitRscope, &*selfty); + let selfty = ccx.to_ty(&ExplicitRscope, &**selfty); write_ty_to_tcx(tcx, it.id, selfty); tcx.tcache.borrow_mut().insert(local_def(it.id), @@ -554,14 +553,14 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { &BindingRscope::new(method.id), selfty, method.pe_explicit_self()); - methods.push(*method); + methods.push(&**method); } } } convert_methods(ccx, ImplContainer(local_def(it.id)), - methods.as_slice(), + methods.move_iter(), selfty, &ty_generics, parent_visibility); @@ -600,12 +599,13 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { } // Run convert_methods on the provided methods. - let (_, provided_methods) = - split_trait_methods(trait_methods.as_slice()); let untransformed_rcvr_ty = ty::mk_self_type(tcx, local_def(it.id)); convert_methods(ccx, TraitContainer(local_def(it.id)), - provided_methods.as_slice(), + trait_methods.iter().filter_map(|m| match *m { + ast::RequiredMethod(_) => None, + ast::ProvidedMethod(ref m) => Some(&**m) + }), untransformed_rcvr_ty, &trait_def.generics, it.vis); @@ -615,7 +615,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { // static trait methods. This is somewhat unfortunate. collect_trait_methods(ccx, it.id, &*trait_def); }, - ast::ItemStruct(struct_def, _) => { + ast::ItemStruct(ref struct_def, _) => { // Write the class type. let pty = ty_of_item(ccx, it); write_ty_to_tcx(tcx, it.id, pty.ty); @@ -624,14 +624,14 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { // Write the super-struct type, if it exists. match struct_def.super_struct { - Some(ty) => { - let supserty = ccx.to_ty(&ExplicitRscope, &*ty); + Some(ref ty) => { + let supserty = ccx.to_ty(&ExplicitRscope, &**ty); write_ty_to_tcx(tcx, it.id, supserty); }, _ => {}, } - convert_struct(ccx, &*struct_def, pty, it.id); + convert_struct(ccx, &**struct_def, pty, it.id); }, ast::ItemTy(_, ref generics) => { ensure_no_ty_param_bounds(ccx, it.span, generics, "type"); @@ -683,7 +683,7 @@ pub fn convert_struct(ccx: &CrateCtxt, tcx.struct_fields.borrow_mut().insert(local_def(id), Rc::new(field_tys)); let super_struct = match struct_def.super_struct { - Some(t) => match t.node { + Some(ref t) => match t.node { ast::TyPath(_, _, path_id) => { let def_map = tcx.def_map.borrow(); match def_map.find(&path_id) { @@ -692,7 +692,7 @@ pub fn convert_struct(ccx: &CrateCtxt, // Check super-struct is virtual. match tcx.map.find(def_id.node) { Some(ast_map::NodeItem(i)) => match i.node { - ast::ItemStruct(struct_def, _) => { + ast::ItemStruct(ref struct_def, _) => { if !struct_def.is_virtual { span_err!(tcx.sess, t.span, E0126, "struct inheritance is only \ @@ -908,21 +908,21 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) _ => {} } match it.node { - ast::ItemStatic(t, _, _) => { - let typ = ccx.to_ty(&ExplicitRscope, &*t); + ast::ItemStatic(ref t, _, _) => { + let typ = ccx.to_ty(&ExplicitRscope, &**t); let pty = no_params(typ); tcx.tcache.borrow_mut().insert(local_def(it.id), pty.clone()); return pty; } - ast::ItemFn(decl, fn_style, abi, ref generics, _) => { + ast::ItemFn(ref decl, fn_style, abi, ref generics, _) => { let ty_generics = ty_generics_for_fn_or_method(ccx, generics, ty::Generics::empty()); let tofd = astconv::ty_of_bare_fn(ccx, it.id, fn_style, abi, - &*decl); + &**decl); let pty = Polytype { generics: ty_generics, ty: ty::mk_bare_fn(ccx.tcx, tofd) @@ -935,14 +935,14 @@ pub fn ty_of_item(ccx: &CrateCtxt, it: &ast::Item) ccx.tcx.tcache.borrow_mut().insert(local_def(it.id), pty.clone()); return pty; } - ast::ItemTy(t, ref generics) => { + ast::ItemTy(ref t, ref generics) => { match tcx.tcache.borrow_mut().find(&local_def(it.id)) { Some(pty) => return pty.clone(), None => { } } let pty = { - let ty = ccx.to_ty(&ExplicitRscope, &*t); + let ty = ccx.to_ty(&ExplicitRscope, &**t); Polytype { generics: ty_generics_for_type(ccx, generics), ty: ty @@ -990,17 +990,17 @@ pub fn ty_of_foreign_item(ccx: &CrateCtxt, abi: abi::Abi) -> ty::Polytype { match it.node { - ast::ForeignItemFn(fn_decl, ref generics) => { + ast::ForeignItemFn(ref fn_decl, ref generics) => { ty_of_foreign_fn_decl(ccx, - &*fn_decl, + &**fn_decl, local_def(it.id), generics, abi) } - ast::ForeignItemStatic(t, _) => { + ast::ForeignItemStatic(ref t, _) => { ty::Polytype { generics: ty::Generics::empty(), - ty: ast_ty_to_ty(ccx, &ExplicitRscope, &*t) + ty: ast_ty_to_ty(ccx, &ExplicitRscope, &**t) } } } @@ -1163,8 +1163,8 @@ fn ty_generics(ccx: &CrateCtxt, ¶m.unbound, param.span, where_clause); - let default = param.default.map(|path| { - let ty = ast_ty_to_ty(ccx, &ExplicitRscope, &*path); + let default = param.default.as_ref().map(|path| { + let ty = ast_ty_to_ty(ccx, &ExplicitRscope, &**path); let cur_idx = index; ty::walk_ty(ty, |t| { diff --git a/src/librustc/middle/typeck/infer/error_reporting.rs b/src/librustc/middle/typeck/infer/error_reporting.rs index e602e6a7b3c56..b5b4cc80faac1 100644 --- a/src/librustc/middle/typeck/infer/error_reporting.rs +++ b/src/librustc/middle/typeck/infer/error_reporting.rs @@ -60,7 +60,6 @@ time of error detection. */ use std::collections::HashSet; -use std::gc::GC; use middle::def; use middle::subst; use middle::ty; @@ -84,12 +83,12 @@ use std::rc::Rc; use std::string::String; use syntax::ast; use syntax::ast_map; -use syntax::ast_util; use syntax::ast_util::{name_to_dummy_lifetime, PostExpansionMethod}; use syntax::owned_slice::OwnedSlice; use syntax::codemap; use syntax::parse::token; use syntax::print::pprust; +use syntax::ptr::P; use util::ppaux::bound_region_to_string; use util::ppaux::note_and_explain_region; @@ -161,7 +160,7 @@ trait ErrorReportingHelpers { decl: &ast::FnDecl, fn_style: ast::FnStyle, ident: ast::Ident, - opt_explicit_self: Option, + opt_explicit_self: Option<&ast::ExplicitSelf_>, generics: &ast::Generics, span: codemap::Span); } @@ -855,8 +854,8 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { Some(ref node) => match *node { ast_map::NodeItem(ref item) => { match item.node { - ast::ItemFn(fn_decl, ref pur, _, ref gen, _) => { - Some((fn_decl, gen, *pur, item.ident, None, item.span)) + ast::ItemFn(ref fn_decl, pur, _, ref gen, _) => { + Some((&**fn_decl, gen, pur, item.ident, None, item.span)) }, _ => None } @@ -868,7 +867,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { m.pe_generics(), m.pe_fn_style(), m.pe_ident(), - Some(m.pe_explicit_self().node), + Some(&m.pe_explicit_self().node), m.span)) } } @@ -885,7 +884,7 @@ impl<'a, 'tcx> ErrorReporting for InferCtxt<'a, 'tcx> { generics, same_regions, &life_giver); let (fn_decl, expl_self, generics) = rebuilder.rebuild(); self.give_expl_lifetime_param(&fn_decl, fn_style, ident, - expl_self, &generics, span); + expl_self.as_ref(), &generics, span); } } @@ -902,8 +901,8 @@ struct RebuildPathInfo<'a> { struct Rebuilder<'a, 'tcx: 'a> { tcx: &'a ty::ctxt<'tcx>, - fn_decl: ast::P, - expl_self_opt: Option, + fn_decl: &'a ast::FnDecl, + expl_self_opt: Option<&'a ast::ExplicitSelf_>, generics: &'a ast::Generics, same_regions: &'a [SameRegions], life_giver: &'a LifeGiver, @@ -918,8 +917,8 @@ enum FreshOrKept { impl<'a, 'tcx> Rebuilder<'a, 'tcx> { fn new(tcx: &'a ty::ctxt<'tcx>, - fn_decl: ast::P, - expl_self_opt: Option, + fn_decl: &'a ast::FnDecl, + expl_self_opt: Option<&'a ast::ExplicitSelf_>, generics: &'a ast::Generics, same_regions: &'a [SameRegions], life_giver: &'a LifeGiver) @@ -938,9 +937,9 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { fn rebuild(&self) -> (ast::FnDecl, Option, ast::Generics) { - let mut expl_self_opt = self.expl_self_opt; + let mut expl_self_opt = self.expl_self_opt.map(|x| x.clone()); let mut inputs = self.fn_decl.inputs.clone(); - let mut output = self.fn_decl.output; + let mut output = self.fn_decl.output.clone(); let mut ty_params = self.generics.ty_params.clone(); let where_clause = self.generics.where_clause.clone(); let mut kept_lifetimes = HashSet::new(); @@ -958,7 +957,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { &anon_nums, ®ion_names); inputs = self.rebuild_args_ty(inputs.as_slice(), lifetime, &anon_nums, ®ion_names); - output = self.rebuild_arg_ty_or_output(output, lifetime, + output = self.rebuild_arg_ty_or_output(&*output, lifetime, &anon_nums, ®ion_names); ty_params = self.rebuild_ty_params(ty_params, lifetime, ®ion_names); @@ -1068,7 +1067,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { id: ty_param.id, bounds: bounds, unbound: ty_param.unbound.clone(), - default: ty_param.default, + default: ty_param.default.clone(), span: ty_param.span, } }) @@ -1087,8 +1086,8 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { // be passing down a map. ast::RegionTyParamBound(lt) } - &ast::UnboxedFnTyParamBound(unboxed_function_type) => { - ast::UnboxedFnTyParamBound(unboxed_function_type) + &ast::UnboxedFnTyParamBound(ref unboxed_function_type) => { + ast::UnboxedFnTyParamBound((*unboxed_function_type).clone()) } &ast::TraitTyParamBound(ref tr) => { let last_seg = tr.path.segments.last().unwrap(); @@ -1122,7 +1121,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { region_names: &HashSet) -> Option { match expl_self_opt { - Some(expl_self) => match expl_self { + Some(ref expl_self) => match *expl_self { ast::SelfRegion(lt_opt, muta, id) => match lt_opt { Some(lt) => if region_names.contains(<.name) { return Some(ast::SelfRegion(Some(lifetime), muta, id)); @@ -1177,11 +1176,11 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { -> Vec { let mut new_inputs = Vec::new(); for arg in inputs.iter() { - let new_ty = self.rebuild_arg_ty_or_output(arg.ty, lifetime, + let new_ty = self.rebuild_arg_ty_or_output(&*arg.ty, lifetime, anon_nums, region_names); let possibly_new_arg = ast::Arg { ty: new_ty, - pat: arg.pat, + pat: arg.pat.clone(), id: arg.id }; new_inputs.push(possibly_new_arg); @@ -1190,36 +1189,40 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } fn rebuild_arg_ty_or_output(&self, - ty: ast::P, + ty: &ast::Ty, lifetime: ast::Lifetime, anon_nums: &HashSet, region_names: &HashSet) - -> ast::P { - let mut new_ty = ty; + -> P { + let mut new_ty = P(ty.clone()); let mut ty_queue = vec!(ty); - let mut cur_ty; while !ty_queue.is_empty() { - cur_ty = ty_queue.shift().unwrap(); + let cur_ty = ty_queue.shift().unwrap(); match cur_ty.node { - ast::TyRptr(lt_opt, mut_ty) => { - match lt_opt { - Some(lt) => if region_names.contains(<.name) { - new_ty = self.rebuild_ty(new_ty, cur_ty, - lifetime, None); - }, + ast::TyRptr(lt_opt, ref mut_ty) => { + let rebuild = match lt_opt { + Some(lt) => region_names.contains(<.name), None => { let anon = self.cur_anon.get(); - if anon_nums.contains(&anon) { - new_ty = self.rebuild_ty(new_ty, cur_ty, - lifetime, None); + let rebuild = anon_nums.contains(&anon); + if rebuild { self.track_anon(anon); } self.inc_and_offset_cur_anon(1); + rebuild } + }; + if rebuild { + let to = ast::Ty { + id: cur_ty.id, + node: ast::TyRptr(Some(lifetime), mut_ty.clone()), + span: cur_ty.span + }; + new_ty = self.rebuild_ty(new_ty, P(to)); } - ty_queue.push(mut_ty.ty); + ty_queue.push(&*mut_ty.ty); } - ast::TyPath(ref path, _, id) => { + ast::TyPath(ref path, ref bounds, id) => { let a_def = match self.tcx.def_map.borrow().find(&id) { None => { self.tcx @@ -1232,10 +1235,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { }; match a_def { def::DefTy(did) | def::DefStruct(did) => { - let ty::Polytype { - generics: generics, - ty: _ - } = ty::lookup_item_type(self.tcx, did); + let generics = ty::lookup_item_type(self.tcx, did).generics; let expected = generics.regions.len(subst::TypeSpace); @@ -1266,85 +1266,77 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { anon_nums: anon_nums, region_names: region_names }; - new_ty = self.rebuild_ty(new_ty, cur_ty, - lifetime, - Some(rebuild_info)); + let new_path = self.rebuild_path(rebuild_info, lifetime); + let to = ast::Ty { + id: cur_ty.id, + node: ast::TyPath(new_path, bounds.clone(), id), + span: cur_ty.span + }; + new_ty = self.rebuild_ty(new_ty, P(to)); } _ => () } } - _ => ty_queue.push_all_move(ast_util::get_inner_tys(cur_ty)) + + ast::TyPtr(ref mut_ty) => { + ty_queue.push(&*mut_ty.ty); + } + ast::TyBox(ref ty) | + ast::TyVec(ref ty) | + ast::TyUniq(ref ty) | + ast::TyFixedLengthVec(ref ty, _) => { + ty_queue.push(&**ty); + } + ast::TyTup(ref tys) => ty_queue.extend(tys.iter().map(|ty| &**ty)), + _ => {} } } new_ty } fn rebuild_ty(&self, - from: ast::P, - to: ast::P, - lifetime: ast::Lifetime, - rebuild_path_info: Option) - -> ast::P { - - fn build_to(from: ast::P, - to: ast::P) - -> ast::P { - if from.id == to.id { - return to; - } - let new_node = match from.node { - ast::TyRptr(ref lifetime, ref mut_ty) => { - let new_mut_ty = ast::MutTy { - ty: build_to(mut_ty.ty, to), - mutbl: mut_ty.mutbl - }; - ast::TyRptr(*lifetime, new_mut_ty) - } - ast::TyPtr(ref mut_ty) => { - let new_mut_ty = ast::MutTy { - ty: build_to(mut_ty.ty, to), - mutbl: mut_ty.mutbl - }; - ast::TyPtr(new_mut_ty) - } - ast::TyBox(ref ty) => ast::TyBox(build_to(*ty, to)), - ast::TyVec(ref ty) => ast::TyVec(build_to(*ty, to)), - ast::TyUniq(ref ty) => ast::TyUniq(build_to(*ty, to)), - ast::TyFixedLengthVec(ref ty, ref e) => { - ast::TyFixedLengthVec(build_to(*ty, to), *e) - } - ast::TyTup(ref tys) => { - let mut new_tys = Vec::new(); - for ty in tys.iter() { - new_tys.push(build_to(*ty, to)); + from: P, + to: P) + -> P { + + fn build_to(from: P, + to: &mut Option>) + -> P { + if Some(from.id) == to.as_ref().map(|ty| ty.id) { + return to.take().expect("`to` type found more than once during rebuild"); + } + from.map(|ast::Ty {id, node, span}| { + let new_node = match node { + ast::TyRptr(lifetime, mut_ty) => { + ast::TyRptr(lifetime, ast::MutTy { + mutbl: mut_ty.mutbl, + ty: build_to(mut_ty.ty, to), + }) } - ast::TyTup(new_tys) - } - ast::TyParen(ref typ) => ast::TyParen(build_to(*typ, to)), - ref other => other.clone() - }; - box(GC) ast::Ty { id: from.id, node: new_node, span: from.span } + ast::TyPtr(mut_ty) => { + ast::TyPtr(ast::MutTy { + mutbl: mut_ty.mutbl, + ty: build_to(mut_ty.ty, to), + }) + } + ast::TyBox(ty) => ast::TyBox(build_to(ty, to)), + ast::TyVec(ty) => ast::TyVec(build_to(ty, to)), + ast::TyUniq(ty) => ast::TyUniq(build_to(ty, to)), + ast::TyFixedLengthVec(ty, e) => { + ast::TyFixedLengthVec(build_to(ty, to), e) + } + ast::TyTup(tys) => { + ast::TyTup(tys.move_iter().map(|ty| build_to(ty, to)).collect()) + } + ast::TyParen(typ) => ast::TyParen(build_to(typ, to)), + other => other + }; + ast::Ty { id: id, node: new_node, span: span } + }) } - let new_ty_node = match to.node { - ast::TyRptr(_, mut_ty) => ast::TyRptr(Some(lifetime), mut_ty), - ast::TyPath(_, ref bounds, id) => { - let rebuild_info = match rebuild_path_info { - Some(ri) => ri, - None => fail!("expect index_opt in rebuild_ty/ast::TyPath") - }; - let new_path = self.rebuild_path(rebuild_info, lifetime); - ast::TyPath(new_path, bounds.clone(), id) - } - _ => fail!("expect ast::TyRptr or ast::TyPath") - }; - let new_ty = box(GC) ast::Ty { - id: to.id, - node: new_ty_node, - span: to.span - }; - build_to(from, new_ty) + build_to(from, &mut Some(to)) } fn rebuild_path(&self, @@ -1384,8 +1376,8 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } } } - let new_types = last_seg.types.map(|&t| { - self.rebuild_arg_ty_or_output(t, lifetime, anon_nums, region_names) + let new_types = last_seg.types.map(|t| { + self.rebuild_arg_ty_or_output(&**t, lifetime, anon_nums, region_names) }); let new_seg = ast::PathSegment { identifier: last_seg.identifier, @@ -1408,7 +1400,7 @@ impl<'a, 'tcx> ErrorReportingHelpers for InferCtxt<'a, 'tcx> { decl: &ast::FnDecl, fn_style: ast::FnStyle, ident: ast::Ident, - opt_explicit_self: Option, + opt_explicit_self: Option<&ast::ExplicitSelf_>, generics: &ast::Generics, span: codemap::Span) { let suggested_fn = pprust::fun_to_string(decl, fn_style, ident, @@ -1686,7 +1678,7 @@ fn lifetimes_in_scope(tcx: &ty::ctxt, }, ast_map::NodeImplItem(ii) => { match *ii { - ast::MethodImplItem(m) => { + ast::MethodImplItem(ref m) => { taken.push_all(m.pe_generics().lifetimes.as_slice()); Some(m.id) } diff --git a/src/librustc/middle/typeck/infer/test.rs b/src/librustc/middle/typeck/infer/test.rs index 198857fca5055..c0236cefca3f9 100644 --- a/src/librustc/middle/typeck/infer/test.rs +++ b/src/librustc/middle/typeck/infer/test.rs @@ -36,13 +36,12 @@ use middle::typeck::infer::glb::Glb; use syntax::codemap; use syntax::codemap::{Span, CodeMap, DUMMY_SP}; use syntax::diagnostic::{Level, RenderSpan, Bug, Fatal, Error, Warning, Note}; -use syntax::ast; +use syntax::{ast, ast_map}; use util::ppaux::{ty_to_string, UserString}; use arena::TypedArena; struct Env<'a, 'tcx: 'a> { - krate: ast::Crate, infcx: &'a infer::InferCtxt<'a, 'tcx>, } @@ -117,19 +116,22 @@ fn test_env(_test_name: &str, let krate_config = Vec::new(); let input = driver::StrInput(source_string.to_string()); let krate = driver::phase_1_parse_input(&sess, krate_config, &input); - let (krate, ast_map) = - driver::phase_2_configure_and_expand(&sess, krate, "test", None) - .expect("phase 2 aborted"); + let krate = driver::phase_2_configure_and_expand(&sess, krate, "test", None) + .expect("phase 2 aborted"); + + let mut forest = ast_map::Forest::new(krate); + let ast_map = driver::assign_node_ids_and_map(&sess, &mut forest); + let krate = ast_map.krate(); // run just enough stuff to build a tcx: - let lang_items = lang_items::collect_language_items(&krate, &sess); + let lang_items = lang_items::collect_language_items(krate, &sess); let resolve::CrateMap { def_map: def_map, .. } = - resolve::resolve_crate(&sess, &lang_items, &krate); + resolve::resolve_crate(&sess, &lang_items, krate); let (freevars_map, captures_map) = freevars::annotate_freevars(&def_map, - &krate); - let named_region_map = resolve_lifetime::krate(&sess, &krate); - let region_map = region::resolve_crate(&sess, &krate); - let stability_index = stability::Index::build(&krate); + krate); + let named_region_map = resolve_lifetime::krate(&sess, krate); + let region_map = region::resolve_crate(&sess, krate); + let stability_index = stability::Index::build(krate); let type_arena = TypedArena::new(); let tcx = ty::mk_ctxt(sess, &type_arena, @@ -142,11 +144,7 @@ fn test_env(_test_name: &str, lang_items, stability_index); let infcx = infer::new_infer_ctxt(&tcx); - let env = Env { - krate: krate, - infcx: &infcx - }; - body(env); + body(Env { infcx: &infcx }); infcx.resolve_regions_and_report_errors(); assert_eq!(tcx.sess.err_count(), expected_err_count); } @@ -171,7 +169,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { } pub fn lookup_item(&self, names: &[String]) -> ast::NodeId { - return match search_mod(self, &self.krate.module, 0, names) { + return match search_mod(self, &self.infcx.tcx.map.krate().module, 0, names) { Some(id) => id, None => { fail!("no item found: `{}`", names.connect("::")); diff --git a/src/librustc/middle/typeck/mod.rs b/src/librustc/middle/typeck/mod.rs index 7104cb9584444..e59f1aa3ce4c1 100644 --- a/src/librustc/middle/typeck/mod.rs +++ b/src/librustc/middle/typeck/mod.rs @@ -308,7 +308,7 @@ pub fn write_substs_to_tcx(tcx: &ty::ctxt, } pub fn lookup_def_tcx(tcx:&ty::ctxt, sp: Span, id: ast::NodeId) -> def::Def { match tcx.def_map.borrow().find(&id) { - Some(&x) => x, + Some(x) => x.clone(), _ => { tcx.sess.span_fatal(sp, "internal error looking up a definition") } @@ -474,9 +474,7 @@ fn check_for_entry_fn(ccx: &CrateCtxt) { } } -pub fn check_crate(tcx: &ty::ctxt, - trait_map: resolve::TraitMap, - krate: &ast::Crate) { +pub fn check_crate(tcx: &ty::ctxt, trait_map: resolve::TraitMap) { let time_passes = tcx.sess.time_passes(); let ccx = CrateCtxt { trait_map: trait_map, @@ -484,20 +482,20 @@ pub fn check_crate(tcx: &ty::ctxt, }; time(time_passes, "type collecting", (), |_| - collect::collect_item_types(&ccx, krate)); + collect::collect_item_types(&ccx)); // this ensures that later parts of type checking can assume that items // have valid types and not error tcx.sess.abort_if_errors(); time(time_passes, "variance inference", (), |_| - variance::infer_variance(tcx, krate)); + variance::infer_variance(tcx)); time(time_passes, "coherence checking", (), |_| - coherence::check_coherence(&ccx, krate)); + coherence::check_coherence(&ccx)); time(time_passes, "type checking", (), |_| - check::check_item_types(&ccx, krate)); + check::check_item_types(&ccx)); check_for_entry_fn(&ccx); tcx.sess.abort_if_errors(); diff --git a/src/librustc/middle/typeck/variance.rs b/src/librustc/middle/typeck/variance.rs index 9526e5d3eb5b5..547fbce573bcc 100644 --- a/src/librustc/middle/typeck/variance.rs +++ b/src/librustc/middle/typeck/variance.rs @@ -208,8 +208,8 @@ use syntax::visit; use syntax::visit::Visitor; use util::ppaux::Repr; -pub fn infer_variance(tcx: &ty::ctxt, - krate: &ast::Crate) { +pub fn infer_variance(tcx: &ty::ctxt) { + let krate = tcx.map.krate(); let mut arena = arena::Arena::new(); let terms_cx = determine_parameters_to_be_inferred(tcx, &mut arena, krate); let constraints_cx = add_constraints_from_crate(terms_cx, krate); diff --git a/src/librustc/util/common.rs b/src/librustc/util/common.rs index b3ac44a3574d9..c39f011189a60 100644 --- a/src/librustc/util/common.rs +++ b/src/librustc/util/common.rs @@ -99,7 +99,7 @@ impl<'a, 'v> Visitor<'v> for BlockQueryVisitor<'a> { // Takes a predicate p, returns true iff p is true for any subexpressions // of b -- skipping any inner loops (loop, while, loop_body) -pub fn block_query(b: ast::P, p: |&ast::Expr| -> bool) -> bool { +pub fn block_query(b: &ast::Block, p: |&ast::Expr| -> bool) -> bool { let mut v = BlockQueryVisitor { p: p, flag: false, diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 11f16f1ea9511..ba1f7ca2cb231 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -27,7 +27,6 @@ use middle::typeck; use middle::typeck::check::regionmanip; use middle::typeck::infer; -use std::gc::Gc; use std::rc::Rc; use syntax::abi; use syntax::ast_map; @@ -546,9 +545,9 @@ impl Repr for Rc { } } -impl Repr for Gc { +impl<'a, T:Repr> Repr for &'a T { fn repr(&self, tcx: &ctxt) -> String { - (&**self).repr(tcx) + (*self).repr(tcx) } } @@ -822,21 +821,19 @@ impl Repr for ast::DefId { // a path for a def-id, so I'll just make a best effort for now // and otherwise fallback to just printing the crate/node pair if self.krate == ast::LOCAL_CRATE { - { - match tcx.map.find(self.node) { - Some(ast_map::NodeItem(..)) | - Some(ast_map::NodeForeignItem(..)) | - Some(ast_map::NodeImplItem(..)) | - Some(ast_map::NodeTraitItem(..)) | - Some(ast_map::NodeVariant(..)) | - Some(ast_map::NodeStructCtor(..)) => { - return format!( + match tcx.map.find(self.node) { + Some(ast_map::NodeItem(..)) | + Some(ast_map::NodeForeignItem(..)) | + Some(ast_map::NodeImplItem(..)) | + Some(ast_map::NodeTraitItem(..)) | + Some(ast_map::NodeVariant(..)) | + Some(ast_map::NodeStructCtor(..)) => { + return format!( "{:?}:{}", *self, ty::item_path_str(tcx, *self)) - } - _ => {} } + _ => {} } } return format!("{:?}", *self) diff --git a/src/librustc_back/svh.rs b/src/librustc_back/svh.rs index c3d9edecc6e2b..415141c0b9400 100644 --- a/src/librustc_back/svh.rs +++ b/src/librustc_back/svh.rs @@ -264,7 +264,7 @@ mod svh_visitor { ExprTup(..) => SawExprTup, ExprBinary(op, _, _) => SawExprBinary(op), ExprUnary(op, _) => SawExprUnary(op), - ExprLit(lit) => SawExprLit(lit.node.clone()), + ExprLit(ref lit) => SawExprLit(lit.node.clone()), ExprCast(..) => SawExprCast, ExprIf(..) => SawExprIf, ExprWhile(..) => SawExprWhile, diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index c1a91f26dbf80..e6fcbbe9b6ff3 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -20,6 +20,7 @@ use syntax::attr::{AttributeMethods, AttrMetaMethods}; use syntax::codemap::Pos; use syntax::parse::token::InternedString; use syntax::parse::token; +use syntax::ptr::P; use rustc::back::link; use rustc::driver::driver; @@ -34,7 +35,6 @@ use rustc::middle::stability; use std::rc::Rc; use std::u32; -use std::gc::{Gc, GC}; use core::DocContext; use doctree; @@ -67,7 +67,7 @@ impl, U> Clean> for VecPerParamSpace { } } -impl, U> Clean for Gc { +impl, U> Clean for P { fn clean(&self, cx: &DocContext) -> U { (**self).clean(cx) } @@ -408,7 +408,7 @@ impl Clean for ast::MetaItem { impl Clean for ast::Attribute { fn clean(&self, cx: &DocContext) -> Attribute { - self.desugar_doc().node.value.clean(cx) + self.with_desugared_doc(|a| a.node.value.clean(cx)) } } @@ -430,12 +430,12 @@ impl attr::AttrMetaMethods for Attribute { _ => None, } } - fn meta_item_list<'a>(&'a self) -> Option<&'a [Gc]> { None } + fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { None } } impl<'a> attr::AttrMetaMethods for &'a Attribute { fn name(&self) -> InternedString { (**self).name() } fn value_str(&self) -> Option { (**self).value_str() } - fn meta_item_list<'a>(&'a self) -> Option<&'a [Gc]> { None } + fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { None } } #[deriving(Clone, Encodable, Decodable, PartialEq)] @@ -758,10 +758,10 @@ impl Clean for ast::ExplicitSelf_ { match *self { ast::SelfStatic => SelfStatic, ast::SelfValue(_) => SelfValue, - ast::SelfRegion(lt, mt, _) => { + ast::SelfRegion(ref lt, ref mt, _) => { SelfBorrowed(lt.clean(cx), mt.clean(cx)) } - ast::SelfExplicit(typ, _) => SelfExplicit(typ.clean(cx)), + ast::SelfExplicit(ref typ, _) => SelfExplicit(typ.clean(cx)), } } } @@ -1189,11 +1189,11 @@ impl Clean for ast::Ty { TyRptr(ref l, ref m) => BorrowedRef {lifetime: l.clean(cx), mutability: m.mutbl.clean(cx), type_: box m.ty.clean(cx)}, - TyBox(ty) => Managed(box ty.clean(cx)), - TyUniq(ty) => Unique(box ty.clean(cx)), - TyVec(ty) => Vector(box ty.clean(cx)), - TyFixedLengthVec(ty, ref e) => FixedVector(box ty.clean(cx), - e.span.to_src(cx)), + TyBox(ref ty) => Managed(box ty.clean(cx)), + TyUniq(ref ty) => Unique(box ty.clean(cx)), + TyVec(ref ty) => Vector(box ty.clean(cx)), + TyFixedLengthVec(ref ty, ref e) => FixedVector(box ty.clean(cx), + e.span.to_src(cx)), TyTup(ref tys) => Tuple(tys.clean(cx)), TyPath(ref p, ref tpbs, id) => { resolve_type(cx, p.clean(cx), tpbs.clean(cx), id) @@ -1799,7 +1799,7 @@ impl Clean> for ast::ViewItem { remaining, b.clone()); let path = syntax::codemap::dummy_spanned(path); - ret.push(convert(&ast::ViewItemUse(box(GC) path))); + ret.push(convert(&ast::ViewItemUse(P(path)))); } } ast::ViewPathSimple(ident, _, id) => { @@ -1985,8 +1985,8 @@ fn name_from_pat(p: &ast::Pat) -> String { }, PatTup(ref elts) => format!("({})", elts.iter().map(|p| name_from_pat(&**p)) .collect::>().connect(", ")), - PatBox(p) => name_from_pat(&*p), - PatRegion(p) => name_from_pat(&*p), + PatBox(ref p) => name_from_pat(&**p), + PatRegion(ref p) => name_from_pat(&**p), PatLit(..) => { warn!("tried to get argument name from PatLit, \ which is silly in function arguments"); diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index a8cd9f18d60a5..ddb4b994ca38e 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -8,18 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rustc; -use rustc::{driver, middle}; +use rustc::driver::{config, driver, session}; use rustc::middle::{privacy, ty}; use rustc::lint; use rustc::back::link; -use syntax::ast; +use syntax::{ast, ast_map, codemap, diagnostic}; use syntax::parse::token; -use syntax; +use syntax::ptr::P; use std::cell::RefCell; -use std::gc::GC; use std::os; use std::collections::{HashMap, HashSet}; use arena::TypedArena; @@ -30,15 +28,15 @@ use clean::Clean; /// Are we generating documentation (`Typed`) or tests (`NotTyped`)? pub enum MaybeTyped<'tcx> { - Typed(middle::ty::ctxt<'tcx>), - NotTyped(driver::session::Session) + Typed(ty::ctxt<'tcx>), + NotTyped(session::Session) } pub type ExternalPaths = RefCell, clean::TypeKind)>>>; pub struct DocContext<'tcx> { - pub krate: ast::Crate, + pub krate: &'tcx ast::Crate, pub maybe_typed: MaybeTyped<'tcx>, pub src: Path, pub external_paths: ExternalPaths, @@ -49,7 +47,7 @@ pub struct DocContext<'tcx> { } impl<'tcx> DocContext<'tcx> { - pub fn sess<'a>(&'a self) -> &'a driver::session::Session { + pub fn sess<'a>(&'a self) -> &'a session::Session { match self.maybe_typed { Typed(ref tcx) => &tcx.sess, NotTyped(ref sess) => sess @@ -80,64 +78,60 @@ pub struct CrateAnalysis { pub type Externs = HashMap>; -/// Parses, resolves, and typechecks the given crate -fn get_ast_and_resolve<'tcx>(cpath: &Path, libs: Vec, cfgs: Vec, - externs: Externs, triple: Option, - type_arena: &'tcx TypedArena) - -> (DocContext<'tcx>, CrateAnalysis) { - use syntax::codemap::dummy_spanned; - use rustc::driver::driver::{FileInput, - phase_1_parse_input, - phase_2_configure_and_expand, - phase_3_run_analysis_passes}; - use rustc::driver::config::build_configuration; +pub fn run_core(libs: Vec, cfgs: Vec, externs: Externs, + cpath: &Path, triple: Option) + -> (clean::Crate, CrateAnalysis) { - let input = FileInput(cpath.clone()); + // Parse, resolve, and typecheck the given crate. + + let input = driver::FileInput(cpath.clone()); let warning_lint = lint::builtin::WARNINGS.name_lower(); - let sessopts = driver::config::Options { + let sessopts = config::Options { maybe_sysroot: Some(os::self_exe_path().unwrap().dir_path()), addl_lib_search_paths: RefCell::new(libs), - crate_types: vec!(driver::config::CrateTypeRlib), + crate_types: vec!(config::CrateTypeRlib), lint_opts: vec!((warning_lint, lint::Allow)), externs: externs, - target_triple: triple.unwrap_or(driver::driver::host_triple().to_string()), - ..rustc::driver::config::basic_options().clone() + target_triple: triple.unwrap_or(driver::host_triple().to_string()), + ..config::basic_options().clone() }; - let codemap = syntax::codemap::CodeMap::new(); - let diagnostic_handler = syntax::diagnostic::default_handler(syntax::diagnostic::Auto, None); + let codemap = codemap::CodeMap::new(); + let diagnostic_handler = diagnostic::default_handler(diagnostic::Auto, None); let span_diagnostic_handler = - syntax::diagnostic::mk_span_handler(diagnostic_handler, codemap); + diagnostic::mk_span_handler(diagnostic_handler, codemap); - let sess = driver::session::build_session_(sessopts, - Some(cpath.clone()), - span_diagnostic_handler); + let sess = session::build_session_(sessopts, + Some(cpath.clone()), + span_diagnostic_handler); - let mut cfg = build_configuration(&sess); + let mut cfg = config::build_configuration(&sess); for cfg_ in cfgs.move_iter() { let cfg_ = token::intern_and_get_ident(cfg_.as_slice()); - cfg.push(box(GC) dummy_spanned(ast::MetaWord(cfg_))); + cfg.push(P(codemap::dummy_spanned(ast::MetaWord(cfg_)))); } - let krate = phase_1_parse_input(&sess, cfg, &input); + let krate = driver::phase_1_parse_input(&sess, cfg, &input); let name = link::find_crate_name(Some(&sess), krate.attrs.as_slice(), &input); - let (krate, ast_map) - = phase_2_configure_and_expand(&sess, krate, name.as_slice(), None) - .expect("phase_2_configure_and_expand aborted in rustdoc!"); + let krate = driver::phase_2_configure_and_expand(&sess, krate, name.as_slice(), None) + .expect("phase_2_configure_and_expand aborted in rustdoc!"); + + let mut forest = ast_map::Forest::new(krate); + let ast_map = driver::assign_node_ids_and_map(&sess, &mut forest); - let driver::driver::CrateAnalysis { + let type_arena = TypedArena::new(); + let driver::CrateAnalysis { exported_items, public_items, ty_cx, .. - } = phase_3_run_analysis_passes(sess, &krate, ast_map, type_arena, name); + } = driver::phase_3_run_analysis_passes(sess, ast_map, &type_arena, name); - debug!("crate: {:?}", krate); - (DocContext { - krate: krate, + let ctxt = DocContext { + krate: ty_cx.map.krate(), maybe_typed: Typed(ty_cx), src: cpath.clone(), external_traits: RefCell::new(Some(HashMap::new())), @@ -145,26 +139,21 @@ fn get_ast_and_resolve<'tcx>(cpath: &Path, libs: Vec, cfgs: Vec, external_paths: RefCell::new(Some(HashMap::new())), inlined: RefCell::new(Some(HashSet::new())), populated_crate_impls: RefCell::new(HashSet::new()), - }, CrateAnalysis { + }; + debug!("crate: {:?}", ctxt.krate); + + let analysis = CrateAnalysis { exported_items: exported_items, public_items: public_items, external_paths: RefCell::new(None), external_traits: RefCell::new(None), external_typarams: RefCell::new(None), inlined: RefCell::new(None), - }) -} - -pub fn run_core(libs: Vec, cfgs: Vec, externs: Externs, - path: &Path, triple: Option) - -> (clean::Crate, CrateAnalysis) { - let type_arena = TypedArena::new(); - let (ctxt, analysis) = get_ast_and_resolve(path, libs, cfgs, externs, - triple, &type_arena); + }; let krate = { let mut v = RustdocVisitor::new(&ctxt, Some(&analysis)); - v.visit(&ctxt.krate); + v.visit(ctxt.krate); v.clean(&ctxt) }; diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index 4d2cf852b8ab7..72964609049bf 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -16,8 +16,7 @@ use syntax::codemap::Span; use syntax::ast; use syntax::attr; use syntax::ast::{Ident, NodeId}; - -use std::gc::Gc; +use syntax::ptr::P; pub struct Module { pub name: Option, @@ -130,7 +129,7 @@ pub struct Function { } pub struct Typedef { - pub ty: ast::P, + pub ty: P, pub gen: ast::Generics, pub name: Ident, pub id: ast::NodeId, @@ -141,9 +140,9 @@ pub struct Typedef { } pub struct Static { - pub type_: ast::P, + pub type_: P, pub mutability: ast::Mutability, - pub expr: Gc, + pub expr: P, pub name: Ident, pub attrs: Vec, pub vis: ast::Visibility, @@ -167,7 +166,7 @@ pub struct Trait { pub struct Impl { pub generics: ast::Generics, pub trait_: Option, - pub for_: ast::P, + pub for_: P, pub items: Vec, pub attrs: Vec, pub whence: Span, diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 0eb0a9afd751c..b7c602d9d730f 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -11,7 +11,6 @@ use std::cell::RefCell; use std::char; use std::dynamic_lib::DynamicLibrary; -use std::gc::GC; use std::io::{Command, TempDir}; use std::io; use std::os; @@ -28,6 +27,7 @@ use syntax::ast; use syntax::codemap::{CodeMap, dummy_spanned}; use syntax::diagnostic; use syntax::parse::token; +use syntax::ptr::P; use core; use clean; @@ -67,15 +67,15 @@ pub fn run(input: &str, let mut cfg = config::build_configuration(&sess); cfg.extend(cfgs.move_iter().map(|cfg_| { let cfg_ = token::intern_and_get_ident(cfg_.as_slice()); - box(GC) dummy_spanned(ast::MetaWord(cfg_)) + P(dummy_spanned(ast::MetaWord(cfg_))) })); let krate = driver::phase_1_parse_input(&sess, cfg, &input); - let (krate, _) = driver::phase_2_configure_and_expand(&sess, krate, - "rustdoc-test", None) + let krate = driver::phase_2_configure_and_expand(&sess, krate, + "rustdoc-test", None) .expect("phase_2_configure_and_expand aborted in rustdoc!"); let ctx = core::DocContext { - krate: krate, + krate: &krate, maybe_typed: core::NotTyped(sess), src: input_path, external_paths: RefCell::new(Some(HashMap::new())), @@ -86,7 +86,7 @@ pub fn run(input: &str, }; let mut v = RustdocVisitor::new(&ctx, None); - v.visit(&ctx.krate); + v.visit(ctx.krate); let mut krate = v.clean(&ctx); match crate_name { Some(name) => krate.name = name, diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 79576cac20af3..a9e0c9cb260f6 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -18,11 +18,10 @@ use syntax::ast_map; use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::codemap::Span; +use syntax::ptr::P; use rustc::middle::stability; -use std::gc::{Gc, GC}; - use core; use doctree::*; @@ -57,13 +56,10 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { } pub fn visit(&mut self, krate: &ast::Crate) { - self.attrs = krate.attrs.iter().map(|x| (*x).clone()).collect(); + self.attrs = krate.attrs.clone(); self.module = self.visit_mod_contents(krate.span, - krate.attrs - .iter() - .map(|x| *x) - .collect(), + krate.attrs.clone(), ast::Public, ast::CRATE_NODE_ID, &krate.module, @@ -74,51 +70,50 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { self.module.is_crate = true; } - pub fn visit_struct_def(&mut self, item: &ast::Item, sd: Gc, + pub fn visit_struct_def(&mut self, item: &ast::Item, + name: ast::Ident, sd: &ast::StructDef, generics: &ast::Generics) -> Struct { debug!("Visiting struct"); let struct_type = struct_type_from_def(&*sd); Struct { id: item.id, struct_type: struct_type, - name: item.ident, + name: name, vis: item.vis, stab: self.stability(item.id), - attrs: item.attrs.iter().map(|x| *x).collect(), + attrs: item.attrs.clone(), generics: generics.clone(), - fields: sd.fields.iter().map(|x| (*x).clone()).collect(), + fields: sd.fields.clone(), whence: item.span } } - pub fn visit_enum_def(&mut self, it: &ast::Item, def: &ast::EnumDef, + pub fn visit_enum_def(&mut self, it: &ast::Item, + name: ast::Ident, def: &ast::EnumDef, params: &ast::Generics) -> Enum { debug!("Visiting enum"); - let mut vars: Vec = Vec::new(); - for x in def.variants.iter() { - vars.push(Variant { - name: x.node.name, - attrs: x.node.attrs.iter().map(|x| *x).collect(), - vis: x.node.vis, - stab: self.stability(x.node.id), - id: x.node.id, - kind: x.node.kind.clone(), - whence: x.span, - }); - } Enum { - name: it.ident, - variants: vars, + name: name, + variants: def.variants.iter().map(|v| Variant { + name: v.node.name, + attrs: v.node.attrs.clone(), + vis: v.node.vis, + stab: self.stability(v.node.id), + id: v.node.id, + kind: v.node.kind.clone(), + whence: v.span, + }).collect(), vis: it.vis, stab: self.stability(it.id), generics: params.clone(), - attrs: it.attrs.iter().map(|x| *x).collect(), + attrs: it.attrs.clone(), id: it.id, whence: it.span, } } - pub fn visit_fn(&mut self, item: &ast::Item, fd: &ast::FnDecl, + pub fn visit_fn(&mut self, item: &ast::Item, + name: ast::Ident, fd: &ast::FnDecl, fn_style: &ast::FnStyle, _abi: &abi::Abi, gen: &ast::Generics) -> Function { debug!("Visiting fn"); @@ -126,9 +121,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { id: item.id, vis: item.vis, stab: self.stability(item.id), - attrs: item.attrs.iter().map(|x| *x).collect(), + attrs: item.attrs.clone(), decl: fd.clone(), - name: item.ident, + name: name, whence: item.span, generics: gen.clone(), fn_style: *fn_style, @@ -150,7 +145,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { om.stab = self.stability(id); om.id = id; for i in m.items.iter() { - self.visit_item(&**i, &mut om); + self.visit_item(&**i, None, &mut om); } om } @@ -169,7 +164,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { }); let item = match item.node { ast::ViewItemUse(ref vpath) => { - match self.visit_view_path(*vpath, om, please_inline) { + match self.visit_view_path(&**vpath, om, please_inline) { None => return, Some(path) => { ast::ViewItem { @@ -184,9 +179,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { om.view_items.push(item); } - fn visit_view_path(&mut self, path: Gc, + fn visit_view_path(&mut self, path: &ast::ViewPath, om: &mut Module, - please_inline: bool) -> Option> { + please_inline: bool) -> Option> { match path.node { ast::ViewPathSimple(dst, _, id) => { if self.resolve_id(id, Some(dst), false, om, please_inline) { @@ -203,10 +198,10 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { } if mine.len() == 0 { return None } - return Some(box(GC) ::syntax::codemap::Spanned { + return Some(P(::syntax::codemap::Spanned { node: ast::ViewPathList(p.clone(), mine, b.clone()), span: path.span, - }) + })) } // these are feature gated anyway @@ -216,7 +211,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { } } } - return Some(path); + Some(P(path.clone())) } fn resolve_id(&mut self, id: ast::NodeId, renamed: Option, @@ -236,15 +231,6 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { match tcx.map.get(def.node) { ast_map::NodeItem(it) => { - let it = match renamed { - Some(ident) => { - box(GC) ast::Item { - ident: ident, - ..(*it).clone() - } - } - None => it, - }; if glob { match it.node { ast::ItemMod(ref m) => { @@ -252,13 +238,13 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { self.visit_view_item(vi, om); } for i in m.items.iter() { - self.visit_item(&**i, om); + self.visit_item(&**i, None, om); } } _ => { fail!("glob not mapped to a module"); } } } else { - self.visit_item(&*it, om); + self.visit_item(it, renamed, om); } true } @@ -266,47 +252,46 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { } } - pub fn visit_item(&mut self, item: &ast::Item, om: &mut Module) { + pub fn visit_item(&mut self, item: &ast::Item, + renamed: Option, om: &mut Module) { debug!("Visiting item {:?}", item); + let name = renamed.unwrap_or(item.ident); match item.node { ast::ItemMod(ref m) => { om.mods.push(self.visit_mod_contents(item.span, - item.attrs - .iter() - .map(|x| *x) - .collect(), + item.attrs.clone(), item.vis, item.id, m, - Some(item.ident))); + Some(name))); }, ast::ItemEnum(ref ed, ref gen) => - om.enums.push(self.visit_enum_def(item, ed, gen)), - ast::ItemStruct(sd, ref gen) => - om.structs.push(self.visit_struct_def(item, sd, gen)), + om.enums.push(self.visit_enum_def(item, name, ed, gen)), + ast::ItemStruct(ref sd, ref gen) => + om.structs.push(self.visit_struct_def(item, name, &**sd, gen)), ast::ItemFn(ref fd, ref pur, ref abi, ref gen, _) => - om.fns.push(self.visit_fn(item, &**fd, pur, abi, gen)), - ast::ItemTy(ty, ref gen) => { + om.fns.push(self.visit_fn(item, name, &**fd, pur, abi, gen)), + ast::ItemTy(ref ty, ref gen) => { let t = Typedef { - ty: ty, + ty: ty.clone(), gen: gen.clone(), - name: item.ident, + name: name, id: item.id, - attrs: item.attrs.iter().map(|x| *x).collect(), + attrs: item.attrs.clone(), whence: item.span, vis: item.vis, stab: self.stability(item.id), }; om.typedefs.push(t); }, - ast::ItemStatic(ty, ref mut_, ref exp) => { + ast::ItemStatic(ref ty, ref mut_, ref exp) => { let s = Static { - type_: ty, + type_: ty.clone(), mutability: mut_.clone(), expr: exp.clone(), id: item.id, - name: item.ident, - attrs: item.attrs.iter().map(|x| *x).collect(), + name: name, + attrs: item.attrs.clone(), whence: item.span, vis: item.vis, stab: self.stability(item.id), @@ -315,25 +300,25 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { }, ast::ItemTrait(ref gen, _, ref b, ref items) => { let t = Trait { - name: item.ident, - items: items.iter().map(|x| (*x).clone()).collect(), + name: name, + items: items.clone(), generics: gen.clone(), bounds: b.iter().map(|x| (*x).clone()).collect(), id: item.id, - attrs: item.attrs.iter().map(|x| *x).collect(), + attrs: item.attrs.clone(), whence: item.span, vis: item.vis, stab: self.stability(item.id), }; om.traits.push(t); }, - ast::ItemImpl(ref gen, ref tr, ty, ref items) => { + ast::ItemImpl(ref gen, ref tr, ref ty, ref items) => { let i = Impl { generics: gen.clone(), trait_: tr.clone(), - for_: ty, - items: items.iter().map(|x| *x).collect(), - attrs: item.attrs.iter().map(|x| *x).collect(), + for_: ty.clone(), + items: items.clone(), + attrs: item.attrs.clone(), id: item.id, whence: item.span, vis: item.vis, @@ -354,7 +339,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { fn visit_macro(&self, item: &ast::Item) -> Macro { Macro { id: item.id, - attrs: item.attrs.iter().map(|x| *x).collect(), + attrs: item.attrs.clone(), name: item.ident, whence: item.span, stab: self.stability(item.id), diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 4e65082fe3ad2..5546f868ba08d 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -16,25 +16,14 @@ use ast_util; use owned_slice::OwnedSlice; use parse::token::{InternedString, str_to_ident}; use parse::token; +use ptr::P; use std::fmt; use std::num::Zero; use std::fmt::Show; -use std::option::Option; use std::rc::Rc; -use std::gc::{Gc, GC}; use serialize::{Encodable, Decodable, Encoder, Decoder}; -/// A pointer abstraction. -// FIXME(eddyb) #10676 use Rc in the future. -pub type P = Gc; - -#[allow(non_snake_case)] -/// Construct a P from a T value. -pub fn P(value: T) -> P { - box(GC) value -} - // FIXME #6993: in librustc, uses of "ident" should be replaced // by just "Name". @@ -277,7 +266,7 @@ pub struct WherePredicate { /// The set of MetaItems that define the compilation environment of the crate, /// used to drive conditional compilation -pub type CrateConfig = Vec>; +pub type CrateConfig = Vec> ; #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct Crate { @@ -285,7 +274,7 @@ pub struct Crate { pub attrs: Vec, pub config: CrateConfig, pub span: Span, - pub exported_macros: Vec> + pub exported_macros: Vec> } pub type MetaItem = Spanned; @@ -293,7 +282,7 @@ pub type MetaItem = Spanned; #[deriving(Clone, Eq, Encodable, Decodable, Hash, Show)] pub enum MetaItem_ { MetaWord(InternedString), - MetaList(InternedString, Vec>), + MetaList(InternedString, Vec>), MetaNameValue(InternedString, Lit), } @@ -325,8 +314,8 @@ impl PartialEq for MetaItem_ { #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct Block { pub view_items: Vec, - pub stmts: Vec>, - pub expr: Option>, + pub stmts: Vec>, + pub expr: Option>, pub id: NodeId, pub rules: BlockCheckMode, pub span: Span, @@ -342,7 +331,7 @@ pub struct Pat { #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct FieldPat { pub ident: Ident, - pub pat: Gc, + pub pat: P, } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] @@ -372,20 +361,20 @@ pub enum Pat_ { /// which it is. The resolver determines this, and /// records this pattern's NodeId in an auxiliary /// set (of "PatIdents that refer to nullary enums") - PatIdent(BindingMode, SpannedIdent, Option>), + PatIdent(BindingMode, SpannedIdent, Option>), /// "None" means a * pattern where we don't bind the fields to names. - PatEnum(Path, Option>>), + PatEnum(Path, Option>>), PatStruct(Path, Vec, bool), - PatTup(Vec>), - PatBox(Gc), - PatRegion(Gc), // reference pattern - PatLit(Gc), - PatRange(Gc, Gc), + PatTup(Vec>), + PatBox(P), + PatRegion(P), // reference pattern + PatLit(P), + PatRange(P, P), /// [a, b, ..i, y, z] is represented as: /// PatVec(~[a, b], Some(i), ~[y, z]) - PatVec(Vec>, Option>, Vec>), + PatVec(Vec>, Option>, Vec>), PatMac(Mac), } @@ -431,13 +420,13 @@ pub type Stmt = Spanned; #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub enum Stmt_ { /// Could be an item or a local (let) binding: - StmtDecl(Gc, NodeId), + StmtDecl(P, NodeId), /// Expr without trailing semi-colon (must have unit type): - StmtExpr(Gc, NodeId), + StmtExpr(P, NodeId), /// Expr with trailing semi-colon (may have any type): - StmtSemi(Gc, NodeId), + StmtSemi(P, NodeId), /// bool: is there a trailing sem-colon? StmtMac(Mac, bool), @@ -457,8 +446,8 @@ pub enum LocalSource { #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct Local { pub ty: P, - pub pat: Gc, - pub init: Option>, + pub pat: P, + pub init: Option>, pub id: NodeId, pub span: Span, pub source: LocalSource, @@ -469,24 +458,24 @@ pub type Decl = Spanned; #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub enum Decl_ { /// A local (let) binding: - DeclLocal(Gc), + DeclLocal(P), /// An item binding: - DeclItem(Gc), + DeclItem(P), } /// represents one arm of a 'match' #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct Arm { pub attrs: Vec, - pub pats: Vec>, - pub guard: Option>, - pub body: Gc, + pub pats: Vec>, + pub guard: Option>, + pub body: P, } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct Field { pub ident: SpannedIdent, - pub expr: Gc, + pub expr: P, pub span: Span, } @@ -514,56 +503,56 @@ pub struct Expr { #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub enum Expr_ { /// First expr is the place; second expr is the value. - ExprBox(Gc, Gc), - ExprVec(Vec>), - ExprCall(Gc, Vec>), - ExprMethodCall(SpannedIdent, Vec>, Vec>), - ExprTup(Vec>), - ExprBinary(BinOp, Gc, Gc), - ExprUnary(UnOp, Gc), - ExprLit(Gc), - ExprCast(Gc, P), - ExprIf(Gc, P, Option>), + ExprBox(P, P), + ExprVec(Vec>), + ExprCall(P, Vec>), + ExprMethodCall(SpannedIdent, Vec>, Vec>), + ExprTup(Vec>), + ExprBinary(BinOp, P, P), + ExprUnary(UnOp, P), + ExprLit(P), + ExprCast(P, P), + ExprIf(P, P, Option>), // FIXME #6993: change to Option ... or not, if these are hygienic. - ExprWhile(Gc, P, Option), + ExprWhile(P, P, Option), // FIXME #6993: change to Option ... or not, if these are hygienic. - ExprForLoop(Gc, Gc, P, Option), + ExprForLoop(P, P, P, Option), // Conditionless loop (can be exited with break, cont, or ret) // FIXME #6993: change to Option ... or not, if these are hygienic. ExprLoop(P, Option), - ExprMatch(Gc, Vec), + ExprMatch(P, Vec), ExprFnBlock(CaptureClause, P, P), ExprProc(P, P), ExprUnboxedFn(CaptureClause, UnboxedClosureKind, P, P), ExprBlock(P), - ExprAssign(Gc, Gc), - ExprAssignOp(BinOp, Gc, Gc), - ExprField(Gc, SpannedIdent, Vec>), - ExprTupField(Gc, Spanned, Vec>), - ExprIndex(Gc, Gc), + ExprAssign(P, P), + ExprAssignOp(BinOp, P, P), + ExprField(P, SpannedIdent, Vec>), + ExprTupField(P, Spanned, Vec>), + ExprIndex(P, P), /// Variable reference, possibly containing `::` and/or /// type parameters, e.g. foo::bar:: ExprPath(Path), - ExprAddrOf(Mutability, Gc), + ExprAddrOf(Mutability, P), ExprBreak(Option), ExprAgain(Option), - ExprRet(Option>), + ExprRet(Option>), ExprInlineAsm(InlineAsm), ExprMac(Mac), /// A struct literal expression. - ExprStruct(Path, Vec , Option> /* base */), + ExprStruct(Path, Vec, Option> /* base */), /// A vector literal constructed from one repeated element. - ExprRepeat(Gc /* element */, Gc /* count */), + ExprRepeat(P /* element */, P /* count */), /// No-op: used solely so we can pretty-print faithfully - ExprParen(Gc) + ExprParen(P) } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] @@ -776,12 +765,12 @@ pub struct TypeMethod { #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub enum TraitItem { RequiredMethod(TypeMethod), - ProvidedMethod(Gc), + ProvidedMethod(P), } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub enum ImplItem { - MethodImplItem(Gc), + MethodImplItem(P), } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash)] @@ -889,7 +878,7 @@ impl fmt::Show for Onceness { } /// Represents the type of a closure -#[deriving(PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct ClosureTy { pub lifetimes: Vec, pub fn_style: FnStyle, @@ -898,7 +887,7 @@ pub struct ClosureTy { pub bounds: TyParamBounds, } -#[deriving(PartialEq, Eq, Encodable, Decodable, Hash, Show)] +#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct BareFnTy { pub fn_style: FnStyle, pub abi: Abi, @@ -919,18 +908,18 @@ pub enum Ty_ { TyBox(P), TyUniq(P), TyVec(P), - TyFixedLengthVec(P, Gc), + TyFixedLengthVec(P, P), TyPtr(MutTy), TyRptr(Option, MutTy), - TyClosure(Gc), - TyProc(Gc), - TyBareFn(Gc), - TyUnboxedFn(Gc), + TyClosure(P), + TyProc(P), + TyBareFn(P), + TyUnboxedFn(P), TyTup(Vec> ), TyPath(Path, Option, NodeId), // for #7264; see above /// No-op; kept solely so that we can pretty-print faithfully TyParen(P), - TyTypeof(Gc), + TyTypeof(P), /// TyInfer means the type should be inferred instead of it having been /// specified. This can appear anywhere in a type. TyInfer, @@ -946,8 +935,8 @@ pub enum AsmDialect { pub struct InlineAsm { pub asm: InternedString, pub asm_str_style: StrStyle, - pub outputs: Vec<(InternedString, Gc, bool)>, - pub inputs: Vec<(InternedString, Gc)>, + pub outputs: Vec<(InternedString, P, bool)>, + pub inputs: Vec<(InternedString, P)>, pub clobbers: InternedString, pub volatile: bool, pub alignstack: bool, @@ -958,7 +947,7 @@ pub struct InlineAsm { #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct Arg { pub ty: P, - pub pat: Gc, + pub pat: P, pub id: NodeId, } @@ -972,11 +961,11 @@ impl Arg { node: TyInfer, span: DUMMY_SP, }), - pat: box(GC) Pat { + pat: P(Pat { id: DUMMY_NODE_ID, node: PatIdent(BindByValue(mutability), path, None), span: span - }, + }), id: DUMMY_NODE_ID } } @@ -1062,14 +1051,14 @@ pub struct Mod { /// to the last token in the external file. pub inner: Span, pub view_items: Vec, - pub items: Vec>, + pub items: Vec>, } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub struct ForeignMod { pub abi: Abi, pub view_items: Vec, - pub items: Vec>, + pub items: Vec>, } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] @@ -1081,7 +1070,7 @@ pub struct VariantArg { #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub enum VariantKind { TupleVariantKind(Vec), - StructVariantKind(Gc), + StructVariantKind(P), } #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] @@ -1095,7 +1084,7 @@ pub struct Variant_ { pub attrs: Vec, pub kind: VariantKind, pub id: NodeId, - pub disr_expr: Option>, + pub disr_expr: Option>, pub vis: Visibility, } @@ -1151,7 +1140,7 @@ pub enum ViewItem_ { /// (containing arbitrary characters) from which to fetch the crate sources /// For example, extern crate whatever = "github.com/rust-lang/rust" ViewItemExternCrate(Ident, Option<(InternedString,StrStyle)>, NodeId), - ViewItemUse(Gc), + ViewItemUse(P), } /// Meta-data associated with an item @@ -1174,7 +1163,7 @@ pub struct AttrId(pub uint); pub struct Attribute_ { pub id: AttrId, pub style: AttrStyle, - pub value: Gc, + pub value: P, pub is_sugared_doc: bool, } @@ -1269,13 +1258,13 @@ pub struct Item { #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub enum Item_ { - ItemStatic(P, Mutability, Gc), + ItemStatic(P, Mutability, P), ItemFn(P, FnStyle, Abi, Generics, P), ItemMod(Mod), ItemForeignMod(ForeignMod), ItemTy(P, Generics), ItemEnum(EnumDef, Generics), - ItemStruct(Gc, Generics), + ItemStruct(P, Generics), /// Represents a Trait Declaration ItemTrait(Generics, Option, // (optional) default bound not required for Self. @@ -1318,15 +1307,10 @@ pub enum UnboxedClosureKind { /// that we trans. #[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] pub enum InlinedItem { - IIItem(Gc), - IITraitItem(DefId /* impl id */, InlinedTraitItem), - IIForeign(Gc), -} - -#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)] -pub enum InlinedTraitItem { - ProvidedInlinedTraitItem(Gc), - RequiredInlinedTraitItem(Gc), + IIItem(P), + IITraitItem(DefId /* impl id */, TraitItem), + IIImplItem(DefId /* impl id */, ImplItem), + IIForeign(P), } #[cfg(test)] diff --git a/src/libsyntax/ast_map/blocks.rs b/src/libsyntax/ast_map/blocks.rs index b00c6ce14b5b8..1400e494917f1 100644 --- a/src/libsyntax/ast_map/blocks.rs +++ b/src/libsyntax/ast_map/blocks.rs @@ -22,7 +22,7 @@ //! for the `Code` associated with a particular NodeId. use abi; -use ast::{P, Block, FnDecl, NodeId}; +use ast::{Block, FnDecl, NodeId}; use ast; use ast_map::{Node}; use ast_map; @@ -39,7 +39,7 @@ use visit; /// - The default implementation for a trait method. /// /// To construct one, use the `Code::from_node` function. -pub struct FnLikeNode { node: ast_map::Node } +pub struct FnLikeNode<'a> { node: ast_map::Node<'a> } /// MaybeFnLike wraps a method that indicates if an object /// corresponds to some FnLikeNode. @@ -47,8 +47,8 @@ pub trait MaybeFnLike { fn is_fn_like(&self) -> bool; } /// Components shared by fn-like things (fn items, methods, closures). pub struct FnParts<'a> { - pub decl: P, - pub body: P, + pub decl: &'a FnDecl, + pub body: &'a Block, pub kind: visit::FnKind<'a>, pub span: Span, pub id: NodeId, @@ -78,12 +78,12 @@ impl MaybeFnLike for ast::Expr { /// Carries either an FnLikeNode or a Block, as these are the two /// constructs that correspond to "code" (as in, something from which /// we can construct a control-flow graph). -pub enum Code { - FnLikeCode(FnLikeNode), - BlockCode(P), +pub enum Code<'a> { + FnLikeCode(FnLikeNode<'a>), + BlockCode(&'a Block), } -impl Code { +impl<'a> Code<'a> { pub fn id(&self) -> ast::NodeId { match *self { FnLikeCode(node) => node.id(), @@ -115,32 +115,32 @@ impl Code { /// use when implementing FnLikeNode operations. struct ItemFnParts<'a> { ident: ast::Ident, - decl: P, + decl: &'a ast::FnDecl, style: ast::FnStyle, abi: abi::Abi, generics: &'a ast::Generics, - body: P, + body: &'a Block, id: ast::NodeId, span: Span } /// These are all the components one can extract from a closure expr /// for use when implementing FnLikeNode operations. -struct ClosureParts { - decl: P, - body: P, +struct ClosureParts<'a> { + decl: &'a FnDecl, + body: &'a Block, id: NodeId, span: Span } -impl ClosureParts { - fn new(d: P, b: P, id: NodeId, s: Span) -> ClosureParts { +impl<'a> ClosureParts<'a> { + fn new(d: &'a FnDecl, b: &'a Block, id: NodeId, s: Span) -> ClosureParts<'a> { ClosureParts { decl: d, body: b, id: id, span: s } } } -impl FnLikeNode { - pub fn to_fn_parts<'a>(&'a self) -> FnParts<'a> { +impl<'a> FnLikeNode<'a> { + pub fn to_fn_parts(self) -> FnParts<'a> { FnParts { decl: self.decl(), body: self.body(), @@ -150,31 +150,31 @@ impl FnLikeNode { } } - pub fn body<'a>(&'a self) -> P { - self.handle(|i: ItemFnParts| i.body, - |m: &'a ast::Method| m.pe_body(), - |c: ClosureParts| c.body) + pub fn body(self) -> &'a Block { + self.handle(|i: ItemFnParts<'a>| &*i.body, + |m: &'a ast::Method| m.pe_body(), + |c: ClosureParts<'a>| c.body) } - pub fn decl<'a>(&'a self) -> P { - self.handle(|i: ItemFnParts| i.decl, - |m: &'a ast::Method| m.pe_fn_decl(), - |c: ClosureParts| c.decl) + pub fn decl(self) -> &'a FnDecl { + self.handle(|i: ItemFnParts<'a>| &*i.decl, + |m: &'a ast::Method| m.pe_fn_decl(), + |c: ClosureParts<'a>| c.decl) } - pub fn span<'a>(&'a self) -> Span { + pub fn span(self) -> Span { self.handle(|i: ItemFnParts| i.span, |m: &'a ast::Method| m.span, |c: ClosureParts| c.span) } - pub fn id<'a>(&'a self) -> NodeId { + pub fn id(self) -> NodeId { self.handle(|i: ItemFnParts| i.id, |m: &'a ast::Method| m.id, |c: ClosureParts| c.id) } - pub fn kind<'a>(&'a self) -> visit::FnKind<'a> { + pub fn kind(self) -> visit::FnKind<'a> { let item = |p: ItemFnParts<'a>| -> visit::FnKind<'a> { visit::FkItemFn(p.ident, p.generics, p.style, p.abi) }; @@ -187,33 +187,33 @@ impl FnLikeNode { self.handle(item, method, closure) } - fn handle<'a, A>(&'a self, - item_fn: |ItemFnParts<'a>| -> A, - method: |&'a ast::Method| -> A, - closure: |ClosureParts| -> A) -> A { + fn handle(self, + item_fn: |ItemFnParts<'a>| -> A, + method: |&'a ast::Method| -> A, + closure: |ClosureParts<'a>| -> A) -> A { match self.node { - ast_map::NodeItem(ref i) => match i.node { - ast::ItemFn(decl, style, abi, ref generics, block) => + ast_map::NodeItem(i) => match i.node { + ast::ItemFn(ref decl, style, abi, ref generics, ref block) => item_fn(ItemFnParts{ - ident: i.ident, decl: decl, style: style, body: block, + ident: i.ident, decl: &**decl, style: style, body: &**block, generics: generics, abi: abi, id: i.id, span: i.span }), _ => fail!("item FnLikeNode that is not fn-like"), }, - ast_map::NodeTraitItem(ref t) => match **t { + ast_map::NodeTraitItem(t) => match *t { ast::ProvidedMethod(ref m) => method(&**m), _ => fail!("trait method FnLikeNode that is not fn-like"), }, - ast_map::NodeImplItem(ref ii) => { - match **ii { + ast_map::NodeImplItem(ii) => { + match *ii { ast::MethodImplItem(ref m) => method(&**m), } } - ast_map::NodeExpr(ref e) => match e.node { + ast_map::NodeExpr(e) => match e.node { ast::ExprFnBlock(_, ref decl, ref block) => - closure(ClosureParts::new(*decl, *block, e.id, e.span)), + closure(ClosureParts::new(&**decl, &**block, e.id, e.span)), ast::ExprProc(ref decl, ref block) => - closure(ClosureParts::new(*decl, *block, e.id, e.span)), + closure(ClosureParts::new(&**decl, &**block, e.id, e.span)), _ => fail!("expr FnLikeNode that is not fn-like"), }, _ => fail!("other FnLikeNode that is not fn-like"), diff --git a/src/libsyntax/ast_map/mod.rs b/src/libsyntax/ast_map/mod.rs index d1f78c71e19df..20594a428481e 100644 --- a/src/libsyntax/ast_map/mod.rs +++ b/src/libsyntax/ast_map/mod.rs @@ -11,18 +11,19 @@ use abi; use ast::*; use ast_util; -use codemap::{Span, Spanned}; +use ast_util::PostExpansionMethod; +use codemap::{DUMMY_SP, Span, Spanned}; use fold::Folder; -use fold; use parse::token; use print::pprust; -use util::small_vector::SmallVector; +use visit::{mod, Visitor}; +use arena::TypedArena; use std::cell::RefCell; use std::fmt; -use std::gc::{Gc, GC}; use std::io::IoResult; use std::iter; +use std::mem; use std::slice; pub mod blocks; @@ -95,62 +96,85 @@ pub fn path_to_string>(mut path: PI) -> String { }).to_string() } -#[deriving(Clone)] -pub enum Node { - NodeItem(Gc), - NodeForeignItem(Gc), - NodeTraitItem(Gc), - NodeImplItem(Gc), - NodeVariant(P), - NodeExpr(Gc), - NodeStmt(Gc), - NodeArg(Gc), - NodeLocal(Gc), - NodePat(Gc), - NodeBlock(P), +pub enum Node<'ast> { + NodeItem(&'ast Item), + NodeForeignItem(&'ast ForeignItem), + NodeTraitItem(&'ast TraitItem), + NodeImplItem(&'ast ImplItem), + NodeVariant(&'ast Variant), + NodeExpr(&'ast Expr), + NodeStmt(&'ast Stmt), + NodeArg(&'ast Pat), + NodeLocal(&'ast Pat), + NodePat(&'ast Pat), + NodeBlock(&'ast Block), /// NodeStructCtor represents a tuple struct. - NodeStructCtor(Gc), + NodeStructCtor(&'ast StructDef), - NodeLifetime(Gc), + NodeLifetime(&'ast Lifetime), } /// Represents an entry and its parent Node ID /// The odd layout is to bring down the total size. -#[deriving(Clone)] -enum MapEntry { +#[deriving(Show)] +enum MapEntry<'ast> { /// Placeholder for holes in the map. NotPresent, /// All the node types, with a parent ID. - EntryItem(NodeId, Gc), - EntryForeignItem(NodeId, Gc), - EntryTraitItem(NodeId, Gc), - EntryImplItem(NodeId, Gc), - EntryVariant(NodeId, P), - EntryExpr(NodeId, Gc), - EntryStmt(NodeId, Gc), - EntryArg(NodeId, Gc), - EntryLocal(NodeId, Gc), - EntryPat(NodeId, Gc), - EntryBlock(NodeId, P), - EntryStructCtor(NodeId, Gc), - EntryLifetime(NodeId, Gc), + EntryItem(NodeId, &'ast Item), + EntryForeignItem(NodeId, &'ast ForeignItem), + EntryTraitItem(NodeId, &'ast TraitItem), + EntryImplItem(NodeId, &'ast ImplItem), + EntryVariant(NodeId, &'ast Variant), + EntryExpr(NodeId, &'ast Expr), + EntryStmt(NodeId, &'ast Stmt), + EntryArg(NodeId, &'ast Pat), + EntryLocal(NodeId, &'ast Pat), + EntryPat(NodeId, &'ast Pat), + EntryBlock(NodeId, &'ast Block), + EntryStructCtor(NodeId, &'ast StructDef), + EntryLifetime(NodeId, &'ast Lifetime), /// Roots for node trees. RootCrate, - RootInlinedParent(P) + RootInlinedParent(&'ast InlinedParent) +} + +impl<'ast> Clone for MapEntry<'ast> { + fn clone(&self) -> MapEntry<'ast> { + *self + } } +#[deriving(Show)] struct InlinedParent { - path: Vec , - /// RequiredMethod by NodeTraitItem and NodeImplItem. - def_id: DefId + path: Vec, + ii: InlinedItem } -impl MapEntry { - fn parent(&self) -> Option { - Some(match *self { +impl<'ast> MapEntry<'ast> { + fn from_node(p: NodeId, node: Node<'ast>) -> MapEntry<'ast> { + match node { + NodeItem(n) => EntryItem(p, n), + NodeForeignItem(n) => EntryForeignItem(p, n), + NodeTraitItem(n) => EntryTraitItem(p, n), + NodeImplItem(n) => EntryImplItem(p, n), + NodeVariant(n) => EntryVariant(p, n), + NodeExpr(n) => EntryExpr(p, n), + NodeStmt(n) => EntryStmt(p, n), + NodeArg(n) => EntryArg(p, n), + NodeLocal(n) => EntryLocal(p, n), + NodePat(n) => EntryPat(p, n), + NodeBlock(n) => EntryBlock(p, n), + NodeStructCtor(n) => EntryStructCtor(p, n), + NodeLifetime(n) => EntryLifetime(p, n) + } + } + + fn parent(self) -> Option { + Some(match self { EntryItem(id, _) => id, EntryForeignItem(id, _) => id, EntryTraitItem(id, _) => id, @@ -168,29 +192,51 @@ impl MapEntry { }) } - fn to_node(&self) -> Option { - Some(match *self { - EntryItem(_, p) => NodeItem(p), - EntryForeignItem(_, p) => NodeForeignItem(p), - EntryTraitItem(_, p) => NodeTraitItem(p), - EntryImplItem(_, p) => NodeImplItem(p), - EntryVariant(_, p) => NodeVariant(p), - EntryExpr(_, p) => NodeExpr(p), - EntryStmt(_, p) => NodeStmt(p), - EntryArg(_, p) => NodeArg(p), - EntryLocal(_, p) => NodeLocal(p), - EntryPat(_, p) => NodePat(p), - EntryBlock(_, p) => NodeBlock(p), - EntryStructCtor(_, p) => NodeStructCtor(p), - EntryLifetime(_, p) => NodeLifetime(p), + fn to_node(self) -> Option> { + Some(match self { + EntryItem(_, n) => NodeItem(n), + EntryForeignItem(_, n) => NodeForeignItem(n), + EntryTraitItem(_, n) => NodeTraitItem(n), + EntryImplItem(_, n) => NodeImplItem(n), + EntryVariant(_, n) => NodeVariant(n), + EntryExpr(_, n) => NodeExpr(n), + EntryStmt(_, n) => NodeStmt(n), + EntryArg(_, n) => NodeArg(n), + EntryLocal(_, n) => NodeLocal(n), + EntryPat(_, n) => NodePat(n), + EntryBlock(_, n) => NodeBlock(n), + EntryStructCtor(_, n) => NodeStructCtor(n), + EntryLifetime(_, n) => NodeLifetime(n), _ => return None }) } } +/// Stores a crate and any number of inlined items from other crates. +pub struct Forest { + krate: Crate, + inlined_items: TypedArena +} + +impl Forest { + pub fn new(krate: Crate) -> Forest { + Forest { + krate: krate, + inlined_items: TypedArena::new() + } + } + + pub fn krate<'ast>(&'ast self) -> &'ast Crate { + &self.krate + } +} + /// Represents a mapping from Node IDs to AST elements and their parent /// Node IDs -pub struct Map { +pub struct Map<'ast> { + /// The backing storage for all the AST nodes. + forest: &'ast Forest, + /// NodeIds are sequential integers from 0, so we can be /// super-compact by storing them in a vector. Not everything with /// a NodeId is in the map, but empirically the occupancy is about @@ -200,26 +246,25 @@ pub struct Map { /// /// Also, indexing is pretty quick when you've got a vector and /// plain old integers. - map: RefCell > + map: RefCell>> } -impl Map { +impl<'ast> Map<'ast> { fn entry_count(&self) -> uint { self.map.borrow().len() } - fn find_entry(&self, id: NodeId) -> Option { - let map = self.map.borrow(); - if map.len() > id as uint { - Some(*map.get(id as uint)) - } else { - None - } + fn find_entry(&self, id: NodeId) -> Option> { + self.map.borrow().as_slice().get(id as uint).map(|e| *e) + } + + pub fn krate(&self) -> &'ast Crate { + &self.forest.krate } /// Retrieve the Node corresponding to `id`, failing if it cannot /// be found. - pub fn get(&self, id: NodeId) -> Node { + pub fn get(&self, id: NodeId) -> Node<'ast> { match self.find(id) { Some(node) => node, None => fail!("couldn't find node id {} in the AST map", id) @@ -228,7 +273,7 @@ impl Map { /// Retrieve the Node corresponding to `id`, returning None if /// cannot be found. - pub fn find(&self, id: NodeId) -> Option { + pub fn find(&self, id: NodeId) -> Option> { self.find_entry(id).and_then(|x| x.to_node()) } @@ -241,7 +286,8 @@ impl Map { pub fn get_parent_did(&self, id: NodeId) -> DefId { let parent = self.get_parent(id); match self.find_entry(parent) { - Some(RootInlinedParent(data)) => data.def_id, + Some(RootInlinedParent(&InlinedParent {ii: IITraitItem(did, _), ..})) => did, + Some(RootInlinedParent(&InlinedParent {ii: IIImplItem(did, _), ..})) => did, _ => ast_util::local_def(parent) } } @@ -249,10 +295,12 @@ impl Map { pub fn get_foreign_abi(&self, id: NodeId) -> abi::Abi { let parent = self.get_parent(id); let abi = match self.find_entry(parent) { - Some(EntryItem(_, i)) => match i.node { - ItemForeignMod(ref nm) => Some(nm.abi), - _ => None - }, + Some(EntryItem(_, i)) => { + match i.node { + ItemForeignMod(ref nm) => Some(nm.abi), + _ => None + } + } /// Wrong but OK, because the only inlined foreign items are intrinsics. Some(RootInlinedParent(_)) => Some(abi::RustIntrinsic), _ => None @@ -272,24 +320,24 @@ impl Map { } } - pub fn expect_item(&self, id: NodeId) -> Gc { + pub fn expect_item(&self, id: NodeId) -> &'ast Item { match self.find(id) { Some(NodeItem(item)) => item, _ => fail!("expected item, found {}", self.node_to_string(id)) } } - pub fn expect_struct(&self, id: NodeId) -> Gc { + pub fn expect_struct(&self, id: NodeId) -> &'ast StructDef { match self.find(id) { Some(NodeItem(i)) => { match i.node { - ItemStruct(struct_def, _) => struct_def, + ItemStruct(ref struct_def, _) => &**struct_def, _ => fail!("struct ID bound to non-struct") } } - Some(NodeVariant(ref variant)) => { - match (*variant).node.kind { - StructVariantKind(struct_def) => struct_def, + Some(NodeVariant(variant)) => { + match variant.node.kind { + StructVariantKind(ref struct_def) => &**struct_def, _ => fail!("struct ID bound to enum variant that isn't struct-like"), } } @@ -297,20 +345,27 @@ impl Map { } } - pub fn expect_variant(&self, id: NodeId) -> P { + pub fn expect_variant(&self, id: NodeId) -> &'ast Variant { match self.find(id) { Some(NodeVariant(variant)) => variant, _ => fail!(format!("expected variant, found {}", self.node_to_string(id))), } } - pub fn expect_foreign_item(&self, id: NodeId) -> Gc { + pub fn expect_foreign_item(&self, id: NodeId) -> &'ast ForeignItem { match self.find(id) { Some(NodeForeignItem(item)) => item, _ => fail!("expected foreign item, found {}", self.node_to_string(id)) } } + pub fn expect_expr(&self, id: NodeId) -> &'ast Expr { + match self.find(id) { + Some(NodeExpr(expr)) => expr, + _ => fail!("expected expr, found {}", self.node_to_string(id)) + } + } + /// returns the name associated with the given NodeId's AST pub fn get_path_elem(&self, id: NodeId) -> PathElem { let node = self.get(id); @@ -340,7 +395,7 @@ impl Map { }, NodeTraitItem(tm) => match *tm { RequiredMethod(ref m) => PathName(m.ident.name), - ProvidedMethod(m) => match m.node { + ProvidedMethod(ref m) => match m.node { MethDecl(ident, _, _, _, _, _, _, _) => { PathName(ident.name) } @@ -399,10 +454,9 @@ impl Map { /// Given a node ID and a closure, apply the closure to the array /// of attributes associated with the AST corresponding to the Node ID pub fn with_attrs(&self, id: NodeId, f: |Option<&[Attribute]>| -> T) -> T { - let node = self.get(id); - let attrs = match node { - NodeItem(ref i) => Some(i.attrs.as_slice()), - NodeForeignItem(ref fi) => Some(fi.attrs.as_slice()), + let attrs = match self.get(id) { + NodeItem(i) => Some(i.attrs.as_slice()), + NodeForeignItem(fi) => Some(fi.attrs.as_slice()), NodeTraitItem(ref tm) => match **tm { RequiredMethod(ref type_m) => Some(type_m.attrs.as_slice()), ProvidedMethod(ref m) => Some(m.attrs.as_slice()) @@ -432,7 +486,7 @@ impl Map { /// such as `foo::bar::quux`, `bar::quux`, `other::bar::quux`, and /// any other such items it can find in the map. pub fn nodes_matching_suffix<'a, S:Str>(&'a self, parts: &'a [S]) - -> NodesMatchingSuffix<'a,S> { + -> NodesMatchingSuffix<'a, 'ast, S> { NodesMatchingSuffix { map: self, item_name: parts.last().unwrap(), @@ -478,14 +532,14 @@ impl Map { } } -pub struct NodesMatchingSuffix<'a, S:'a> { - map: &'a Map, +pub struct NodesMatchingSuffix<'a, 'ast:'a, S:'a> { + map: &'a Map<'ast>, item_name: &'a S, in_which: &'a [S], idx: NodeId, } -impl<'a,S:Str> NodesMatchingSuffix<'a,S> { +impl<'a, 'ast, S:Str> NodesMatchingSuffix<'a, 'ast, S> { /// Returns true only if some suffix of the module path for parent /// matches `self.in_which`. /// @@ -542,7 +596,7 @@ impl<'a,S:Str> NodesMatchingSuffix<'a,S> { } } -impl<'a,S:Str> Iterator for NodesMatchingSuffix<'a,S> { +impl<'a, 'ast, S:Str> Iterator for NodesMatchingSuffix<'a, 'ast, S> { fn next(&mut self) -> Option { loop { let idx = self.idx; @@ -551,11 +605,11 @@ impl<'a,S:Str> Iterator for NodesMatchingSuffix<'a,S> { } self.idx += 1; let (p, name) = match self.map.find_entry(idx) { - Some(EntryItem(p, n)) => (p, n.name()), - Some(EntryForeignItem(p, n)) => (p, n.name()), - Some(EntryTraitItem(p, n)) => (p, n.name()), - Some(EntryImplItem(p, n)) => (p, n.name()), - Some(EntryVariant(p, n)) => (p, n.name()), + Some(EntryItem(p, n)) => (p, n.name()), + Some(EntryForeignItem(p, n))=> (p, n.name()), + Some(EntryTraitItem(p, n)) => (p, n.name()), + Some(EntryImplItem(p, n)) => (p, n.name()), + Some(EntryVariant(p, n)) => (p, n.name()), _ => continue, }; if self.matches_names(p, name) { @@ -578,7 +632,7 @@ impl Named for TraitItem { fn name(&self) -> Name { match *self { RequiredMethod(ref tm) => tm.ident.name, - ProvidedMethod(m) => m.name(), + ProvidedMethod(ref m) => m.name(), } } } @@ -602,193 +656,203 @@ pub trait FoldOps { fn new_id(&self, id: NodeId) -> NodeId { id } + fn new_def_id(&self, def_id: DefId) -> DefId { + def_id + } fn new_span(&self, span: Span) -> Span { span } } -/// A Folder that walks over an AST and constructs a Node ID Map. Its -/// fold_ops argument has the opportunity to replace Node IDs and spans. -pub struct Ctx<'a, F> { - map: &'a Map, - /// The node in which we are currently mapping (an item or a method). - /// When equal to DUMMY_NODE_ID, the next mapped node becomes the parent. - parent: NodeId, +/// A Folder that updates IDs and Span's according to fold_ops. +struct IdAndSpanUpdater { fold_ops: F } -impl<'a, F> Ctx<'a, F> { - fn insert(&self, id: NodeId, entry: MapEntry) { - (*self.map.map.borrow_mut()).grow_set(id as uint, &NotPresent, entry); - } -} - -impl<'a, F: FoldOps> Folder for Ctx<'a, F> { +impl Folder for IdAndSpanUpdater { fn new_id(&mut self, id: NodeId) -> NodeId { - let id = self.fold_ops.new_id(id); - if self.parent == DUMMY_NODE_ID { - self.parent = id; - } - id + self.fold_ops.new_id(id) } fn new_span(&mut self, span: Span) -> Span { self.fold_ops.new_span(span) } +} - fn fold_item(&mut self, i: Gc) -> SmallVector> { - let parent = self.parent; - self.parent = DUMMY_NODE_ID; +/// A Visitor that walks over an AST and collects Node's into an AST Map. +struct NodeCollector<'ast> { + map: Vec>, + /// The node in which we are currently mapping (an item or a method). + parent: NodeId +} - let i = fold::noop_fold_item(&*i, self).expect_one("expected one item"); - assert_eq!(self.parent, i.id); +impl<'ast> NodeCollector<'ast> { + fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) { + self.map.grow_set(id as uint, &NotPresent, entry); + debug!("ast_map: {} => {}", id, entry); + } + + fn insert(&mut self, id: NodeId, node: Node<'ast>) { + let entry = MapEntry::from_node(self.parent, node); + self.insert_entry(id, entry); + } + + fn visit_fn_decl(&mut self, decl: &'ast FnDecl) { + for a in decl.inputs.iter() { + self.insert(a.id, NodeArg(&*a.pat)); + } + } +} +impl<'ast> Visitor<'ast> for NodeCollector<'ast> { + fn visit_item(&mut self, i: &'ast Item) { + self.insert(i.id, NodeItem(i)); + let parent = self.parent; + self.parent = i.id; match i.node { ItemImpl(_, _, _, ref impl_items) => { for impl_item in impl_items.iter() { - match *impl_item { - MethodImplItem(m) => { - self.insert(m.id, - EntryImplItem(self.parent, - box(GC) *impl_item)); - } - } + let id = match *impl_item { + MethodImplItem(ref m) => m.id + }; + self.insert(id, NodeImplItem(impl_item)); } } ItemEnum(ref enum_definition, _) => { - for &v in enum_definition.variants.iter() { - self.insert(v.node.id, EntryVariant(self.parent, v)); + for v in enum_definition.variants.iter() { + self.insert(v.node.id, NodeVariant(&**v)); } } ItemForeignMod(ref nm) => { for nitem in nm.items.iter() { - self.insert(nitem.id, EntryForeignItem(self.parent, - nitem.clone())); + self.insert(nitem.id, NodeForeignItem(&**nitem)); } } ItemStruct(ref struct_def, _) => { // If this is a tuple-like struct, register the constructor. match struct_def.ctor_id { Some(ctor_id) => { - self.insert(ctor_id, EntryStructCtor(self.parent, - struct_def.clone())); + self.insert(ctor_id, NodeStructCtor(&**struct_def)); } None => {} } } ItemTrait(_, _, _, ref methods) => { for tm in methods.iter() { - match *tm { - RequiredMethod(ref m) => { - self.insert(m.id, EntryTraitItem(self.parent, - box(GC) (*tm).clone())); - } - ProvidedMethod(m) => { - self.insert(m.id, EntryTraitItem(self.parent, - box(GC) ProvidedMethod(m))); - } - } + let id = match *tm { + RequiredMethod(ref m) => m.id, + ProvidedMethod(ref m) => m.id + }; + self.insert(id, NodeTraitItem(tm)); } } _ => {} } - + visit::walk_item(self, i); self.parent = parent; - self.insert(i.id, EntryItem(self.parent, i)); - - SmallVector::one(i) } - fn fold_pat(&mut self, pat: Gc) -> Gc { - let pat = fold::noop_fold_pat(pat, self); - match pat.node { - PatIdent(..) => { - // Note: this is at least *potentially* a pattern... - self.insert(pat.id, EntryLocal(self.parent, pat)); - } - _ => { - self.insert(pat.id, EntryPat(self.parent, pat)); - } - } - - pat + fn visit_pat(&mut self, pat: &'ast Pat) { + self.insert(pat.id, match pat.node { + // Note: this is at least *potentially* a pattern... + PatIdent(..) => NodeLocal(pat), + _ => NodePat(pat) + }); + visit::walk_pat(self, pat); } - fn fold_expr(&mut self, expr: Gc) -> Gc { - let expr = fold::noop_fold_expr(expr, self); - - self.insert(expr.id, EntryExpr(self.parent, expr)); - - expr + fn visit_expr(&mut self, expr: &'ast Expr) { + self.insert(expr.id, NodeExpr(expr)); + visit::walk_expr(self, expr); } - fn fold_stmt(&mut self, stmt: &Stmt) -> SmallVector> { - let stmt = fold::noop_fold_stmt(stmt, self).expect_one("expected one statement"); - self.insert(ast_util::stmt_id(&*stmt), EntryStmt(self.parent, stmt)); - SmallVector::one(stmt) + fn visit_stmt(&mut self, stmt: &'ast Stmt) { + self.insert(ast_util::stmt_id(stmt), NodeStmt(stmt)); + visit::walk_stmt(self, stmt); } - fn fold_type_method(&mut self, m: &TypeMethod) -> TypeMethod { + fn visit_ty_method(&mut self, m: &'ast TypeMethod) { let parent = self.parent; - self.parent = DUMMY_NODE_ID; - let m = fold::noop_fold_type_method(m, self); - assert_eq!(self.parent, m.id); + self.parent = m.id; + self.visit_fn_decl(&*m.decl); + visit::walk_ty_method(self, m); self.parent = parent; - m } - fn fold_method(&mut self, m: Gc) -> SmallVector> { - let parent = self.parent; - self.parent = DUMMY_NODE_ID; - let m = fold::noop_fold_method(&*m, self).expect_one( - "noop_fold_method must produce exactly one method"); - assert_eq!(self.parent, m.id); - self.parent = parent; - SmallVector::one(m) + fn visit_fn(&mut self, fk: visit::FnKind<'ast>, fd: &'ast FnDecl, + b: &'ast Block, s: Span, id: NodeId) { + match fk { + visit::FkMethod(..) => { + let parent = self.parent; + self.parent = id; + self.visit_fn_decl(fd); + visit::walk_fn(self, fk, fd, b, s); + self.parent = parent; + } + _ => { + self.visit_fn_decl(fd); + visit::walk_fn(self, fk, fd, b, s); + } + } } - fn fold_fn_decl(&mut self, decl: &FnDecl) -> P { - let decl = fold::noop_fold_fn_decl(decl, self); - for a in decl.inputs.iter() { - self.insert(a.id, EntryArg(self.parent, a.pat)); + fn visit_ty(&mut self, ty: &'ast Ty) { + match ty.node { + TyClosure(ref fd) | TyProc(ref fd) => { + self.visit_fn_decl(&*fd.decl); + } + TyBareFn(ref fd) => { + self.visit_fn_decl(&*fd.decl); + } + TyUnboxedFn(ref fd) => { + self.visit_fn_decl(&*fd.decl); + } + _ => {} } - decl + visit::walk_ty(self, ty); } - fn fold_block(&mut self, block: P) -> P { - let block = fold::noop_fold_block(block, self); - self.insert(block.id, EntryBlock(self.parent, block)); - block + fn visit_block(&mut self, block: &'ast Block) { + self.insert(block.id, NodeBlock(block)); + visit::walk_block(self, block); } - fn fold_lifetime(&mut self, lifetime: &Lifetime) -> Lifetime { - let lifetime = fold::noop_fold_lifetime(lifetime, self); - self.insert(lifetime.id, EntryLifetime(self.parent, box(GC) lifetime)); - lifetime + fn visit_lifetime_ref(&mut self, lifetime: &'ast Lifetime) { + self.insert(lifetime.id, NodeLifetime(lifetime)); } - fn fold_mac(&mut self, mac: &Mac) -> Mac { - fold::noop_fold_mac(mac, self) + fn visit_lifetime_decl(&mut self, def: &'ast LifetimeDef) { + self.visit_lifetime_ref(&def.lifetime); } } -pub fn map_crate(krate: Crate, fold_ops: F) -> (Crate, Map) { - let map = Map { map: RefCell::new(Vec::new()) }; - let krate = { - let mut cx = Ctx { - map: &map, - parent: CRATE_NODE_ID, - fold_ops: fold_ops - }; - cx.insert(CRATE_NODE_ID, RootCrate); - cx.fold_crate(krate) +pub fn map_crate<'ast, F: FoldOps>(forest: &'ast mut Forest, fold_ops: F) -> Map<'ast> { + // Replace the crate with an empty one to take it out. + let krate = mem::replace(&mut forest.krate, Crate { + module: Mod { + inner: DUMMY_SP, + view_items: vec![], + items: vec![], + }, + attrs: vec![], + config: vec![], + exported_macros: vec![], + span: DUMMY_SP + }); + forest.krate = IdAndSpanUpdater { fold_ops: fold_ops }.fold_crate(krate); + + let mut collector = NodeCollector { + map: vec![], + parent: CRATE_NODE_ID }; + collector.insert_entry(CRATE_NODE_ID, RootCrate); + visit::walk_crate(&mut collector, &forest.krate); + let map = collector.map; if log_enabled!(::log::DEBUG) { - let map = map.map.borrow(); // This only makes sense for ordered stores; note the // enumerate to count the number of entries. - let (entries_less_1, _) = (*map).iter().filter(|&x| { + let (entries_less_1, _) = map.iter().filter(|&x| { match *x { NotPresent => false, _ => true @@ -796,63 +860,88 @@ pub fn map_crate(krate: Crate, fold_ops: F) -> (Crate, Map) { }).enumerate().last().expect("AST map was empty after folding?"); let entries = entries_less_1 + 1; - let vector_length = (*map).len(); + let vector_length = map.len(); debug!("The AST map has {} entries with a maximum of {}: occupancy {:.1}%", entries, vector_length, (entries as f64 / vector_length as f64) * 100.); } - (krate, map) + Map { + forest: forest, + map: RefCell::new(map) + } } /// Used for items loaded from external crate that are being inlined into this /// crate. The `path` should be the path to the item but should not include /// the item itself. -pub fn map_decoded_item(map: &Map, - path: Vec , - fold_ops: F, - fold: |&mut Ctx| -> InlinedItem) - -> InlinedItem { - let mut cx = Ctx { - map: map, - parent: DUMMY_NODE_ID, - fold_ops: fold_ops +pub fn map_decoded_item<'ast, F: FoldOps>(map: &Map<'ast>, + path: Vec, + ii: InlinedItem, + fold_ops: F) + -> &'ast InlinedItem { + let mut fld = IdAndSpanUpdater { fold_ops: fold_ops }; + let ii = match ii { + IIItem(i) => IIItem(fld.fold_item(i).expect_one("expected one item")), + IITraitItem(d, ti) => match ti { + ProvidedMethod(m) => { + IITraitItem(fld.fold_ops.new_def_id(d), + ProvidedMethod(fld.fold_method(m) + .expect_one("expected one method"))) + } + RequiredMethod(ty_m) => { + IITraitItem(fld.fold_ops.new_def_id(d), + RequiredMethod(fld.fold_type_method(ty_m))) + } + }, + IIImplItem(d, m) => match m { + MethodImplItem(m) => { + IIImplItem(fld.fold_ops.new_def_id(d), + MethodImplItem(fld.fold_method(m) + .expect_one("expected one method"))) + } + }, + IIForeign(i) => IIForeign(fld.fold_foreign_item(i)) }; - // Generate a NodeId for the RootInlinedParent inserted below. - cx.new_id(DUMMY_NODE_ID); + let ii_parent = map.forest.inlined_items.alloc(InlinedParent { + path: path, + ii: ii + }); + + let mut collector = NodeCollector { + map: mem::replace(&mut *map.map.borrow_mut(), vec![]), + parent: fld.new_id(DUMMY_NODE_ID) + }; + let ii_parent_id = collector.parent; + collector.insert_entry(ii_parent_id, RootInlinedParent(ii_parent)); + visit::walk_inlined_item(&mut collector, &ii_parent.ii); // Methods get added to the AST map when their impl is visited. Since we // don't decode and instantiate the impl, but just the method, we have to // add it to the table now. Likewise with foreign items. - let mut def_id = DefId { krate: LOCAL_CRATE, node: DUMMY_NODE_ID }; - let ii = fold(&mut cx); - match ii { + match ii_parent.ii { IIItem(_) => {} - IITraitItem(impl_did, inlined_trait_item) => { - let (trait_item_id, entry) = match inlined_trait_item { - ProvidedInlinedTraitItem(m) => { - (m.id, - EntryTraitItem(cx.parent, box(GC) ProvidedMethod(m))) - } - RequiredInlinedTraitItem(m) => { - (m.id, - EntryImplItem(cx.parent, box(GC) MethodImplItem(m))) - } + IITraitItem(_, ref trait_item) => { + let trait_item_id = match *trait_item { + ProvidedMethod(ref m) => m.id, + RequiredMethod(ref m) => m.id }; - cx.insert(trait_item_id, entry); - def_id = impl_did; + + collector.insert(trait_item_id, NodeTraitItem(trait_item)); } - IIForeign(i) => { - cx.insert(i.id, EntryForeignItem(cx.parent, i)); + IIImplItem(_, ref impl_item) => { + let impl_item_id = match *impl_item { + MethodImplItem(ref m) => m.id + }; + + collector.insert(impl_item_id, NodeImplItem(impl_item)); + } + IIForeign(ref i) => { + collector.insert(i.id, NodeForeignItem(&**i)); } } - - cx.insert(cx.parent, RootInlinedParent(P(InlinedParent { - path: path, - def_id: def_id - }))); - - ii + *map.map.borrow_mut() = collector.map; + &ii_parent.ii } pub trait NodePrinter { @@ -920,10 +1009,13 @@ fn node_id_to_string(map: &Map, id: NodeId) -> String { } } } - Some(NodeTraitItem(ref tm)) => { - let m = ast_util::trait_item_to_ty_method(&**tm); + Some(NodeTraitItem(ref ti)) => { + let ident = match **ti { + ProvidedMethod(ref m) => m.pe_ident(), + RequiredMethod(ref m) => m.ident + }; format!("method {} in {} (id={})", - token::get_ident(m.ident), + token::get_ident(ident), map.path_to_string(id), id) } Some(NodeVariant(ref variant)) => { diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index ff733673bd262..becfe715f29e2 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -19,12 +19,12 @@ use codemap::Span; use owned_slice::OwnedSlice; use parse::token; use print::pprust; +use ptr::P; use visit::Visitor; use visit; use std::cell::Cell; use std::cmp; -use std::gc::{Gc, GC}; use std::u32; pub fn path_name_i(idents: &[Ident]) -> String { @@ -98,7 +98,7 @@ pub fn unop_to_string(op: UnOp) -> &'static str { } } -pub fn is_path(e: Gc) -> bool { +pub fn is_path(e: P) -> bool { return match e.node { ExprPath(_) => true, _ => false }; } @@ -166,21 +166,6 @@ pub fn float_ty_to_string(t: FloatTy) -> String { } } -pub fn is_call_expr(e: Gc) -> bool { - match e.node { ExprCall(..) => true, _ => false } -} - -pub fn block_from_expr(e: Gc) -> P { - P(Block { - view_items: Vec::new(), - stmts: Vec::new(), - expr: Some(e), - id: e.id, - rules: DefaultBlock, - span: e.span - }) -} - // convert a span and an identifier to the corresponding // 1-segment path pub fn ident_to_path(s: Span, identifier: Ident) -> Path { @@ -197,10 +182,12 @@ pub fn ident_to_path(s: Span, identifier: Ident) -> Path { } } -pub fn ident_to_pat(id: NodeId, s: Span, i: Ident) -> Gc { - box(GC) ast::Pat { id: id, - node: PatIdent(BindByValue(MutImmutable), codemap::Spanned{span:s, node:i}, None), - span: s } +pub fn ident_to_pat(id: NodeId, s: Span, i: Ident) -> P { + P(Pat { + id: id, + node: PatIdent(BindByValue(MutImmutable), codemap::Spanned{span:s, node:i}, None), + span: s + }) } pub fn name_to_dummy_lifetime(name: Name) -> Lifetime { @@ -226,57 +213,6 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> Ident { token::gensym_ident(pretty.as_slice()) } -pub fn trait_method_to_ty_method(method: &Method) -> TypeMethod { - match method.node { - MethDecl(ident, - ref generics, - abi, - explicit_self, - fn_style, - decl, - _, - vis) => { - TypeMethod { - ident: ident, - attrs: method.attrs.clone(), - fn_style: fn_style, - decl: decl, - generics: generics.clone(), - explicit_self: explicit_self, - id: method.id, - span: method.span, - vis: vis, - abi: abi, - } - }, - MethMac(_) => fail!("expected non-macro method declaration") - } -} - -/// extract a TypeMethod from a TraitItem. if the TraitItem is -/// a default, pull out the useful fields to make a TypeMethod -// -// NB: to be used only after expansion is complete, and macros are gone. -pub fn trait_item_to_ty_method(method: &TraitItem) -> TypeMethod { - match *method { - RequiredMethod(ref m) => (*m).clone(), - ProvidedMethod(ref m) => trait_method_to_ty_method(&**m), - } -} - -pub fn split_trait_methods(trait_methods: &[TraitItem]) - -> (Vec , Vec> ) { - let mut reqd = Vec::new(); - let mut provd = Vec::new(); - for trt_method in trait_methods.iter() { - match *trt_method { - RequiredMethod(ref tm) => reqd.push((*tm).clone()), - ProvidedMethod(m) => provd.push(m) - } - }; - (reqd, provd) -} - pub fn struct_field_visibility(field: ast::StructField) -> Visibility { match field.node.kind { ast::NamedField(_, v) | ast::UnnamedField(v) => v @@ -538,6 +474,14 @@ impl<'a, 'v, O: IdVisitingOperation> Visitor<'v> for IdVisitor<'a, O> { } visit::walk_trait_item(self, tm); } + + fn visit_lifetime_ref(&mut self, lifetime: &'v Lifetime) { + self.operation.visit_id(lifetime.id); + } + + fn visit_lifetime_decl(&mut self, def: &'v LifetimeDef) { + self.visit_lifetime_ref(&def.lifetime); + } } pub fn visit_ids_for_inlined_item(item: &InlinedItem, @@ -595,13 +539,6 @@ pub fn compute_id_range_for_fn_body(fk: visit::FnKind, visitor.result.get() } -pub fn is_item_impl(item: Gc) -> bool { - match item.node { - ItemImpl(..) => true, - _ => false - } -} - pub fn walk_pat(pat: &Pat, it: |&Pat| -> bool) -> bool { if !it(pat) { return false; @@ -670,7 +607,7 @@ pub fn struct_def_is_tuple_like(struct_def: &ast::StructDef) -> bool { /// Returns true if the given pattern consists solely of an identifier /// and false otherwise. -pub fn pat_is_ident(pat: Gc) -> bool { +pub fn pat_is_ident(pat: P) -> bool { match pat.node { ast::PatIdent(..) => true, _ => false, @@ -705,28 +642,13 @@ pub fn segments_name_eq(a : &[ast::PathSegment], b : &[ast::PathSegment]) -> boo } /// Returns true if this literal is a string and false otherwise. -pub fn lit_is_str(lit: Gc) -> bool { +pub fn lit_is_str(lit: &Lit) -> bool { match lit.node { LitStr(..) => true, _ => false, } } -pub fn get_inner_tys(ty: P) -> Vec> { - match ty.node { - ast::TyRptr(_, mut_ty) | ast::TyPtr(mut_ty) => { - vec!(mut_ty.ty) - } - ast::TyBox(ty) - | ast::TyVec(ty) - | ast::TyUniq(ty) - | ast::TyFixedLengthVec(ty, _) => vec!(ty), - ast::TyTup(ref tys) => tys.clone(), - ast::TyParen(ty) => get_inner_tys(ty), - _ => Vec::new() - } -} - /// Returns true if the static with the given mutability and attributes /// has a significant address and false otherwise. pub fn static_has_significant_address(mutbl: ast::Mutability, @@ -749,13 +671,13 @@ pub trait PostExpansionMethod { fn pe_abi(&self) -> Abi; fn pe_explicit_self<'a>(&'a self) -> &'a ast::ExplicitSelf; fn pe_fn_style(&self) -> ast::FnStyle; - fn pe_fn_decl(&self) -> P; - fn pe_body(&self) -> P; + fn pe_fn_decl<'a>(&'a self) -> &'a ast::FnDecl; + fn pe_body<'a>(&'a self) -> &'a ast::Block; fn pe_vis(&self) -> ast::Visibility; } macro_rules! mf_method{ - ($meth_name:ident, $field_ty:ty, $field_pat:pat, $result:ident) => { + ($meth_name:ident, $field_ty:ty, $field_pat:pat, $result:expr) => { fn $meth_name<'a>(&'a self) -> $field_ty { match self.node { $field_pat => $result, @@ -776,8 +698,8 @@ impl PostExpansionMethod for Method { mf_method!(pe_explicit_self,&'a ast::ExplicitSelf, MethDecl(_,_,_,ref explicit_self,_,_,_,_),explicit_self) mf_method!(pe_fn_style,ast::FnStyle,MethDecl(_,_,_,_,fn_style,_,_,_),fn_style) - mf_method!(pe_fn_decl,P,MethDecl(_,_,_,_,_,decl,_,_),decl) - mf_method!(pe_body,P,MethDecl(_,_,_,_,_,_,body,_),body) + mf_method!(pe_fn_decl,&'a ast::FnDecl,MethDecl(_,_,_,_,_,ref decl,_,_),&**decl) + mf_method!(pe_body,&'a ast::Block,MethDecl(_,_,_,_,_,_,ref body,_),&**body) mf_method!(pe_vis,ast::Visibility,MethDecl(_,_,_,_,_,_,_,vis),vis) } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index dd422d021493f..80e4d148bdec8 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -18,10 +18,10 @@ use diagnostic::SpanHandler; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::token::InternedString; use parse::token; +use ptr::P; use std::collections::HashSet; use std::collections::BitvSet; -use std::gc::{Gc, GC}; local_data_key!(used_attrs: BitvSet) @@ -50,7 +50,7 @@ pub trait AttrMetaMethods { /// containing a string, otherwise None. fn value_str(&self) -> Option; /// Gets a list of inner meta items from a list MetaItem type. - fn meta_item_list<'a>(&'a self) -> Option<&'a [Gc]>; + fn meta_item_list<'a>(&'a self) -> Option<&'a [P]>; } impl AttrMetaMethods for Attribute { @@ -65,7 +65,7 @@ impl AttrMetaMethods for Attribute { fn value_str(&self) -> Option { self.meta().value_str() } - fn meta_item_list<'a>(&'a self) -> Option<&'a [Gc]> { + fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { self.node.value.meta_item_list() } } @@ -91,7 +91,7 @@ impl AttrMetaMethods for MetaItem { } } - fn meta_item_list<'a>(&'a self) -> Option<&'a [Gc]> { + fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { match self.node { MetaList(_, ref l) => Some(l.as_slice()), _ => None @@ -100,30 +100,30 @@ impl AttrMetaMethods for MetaItem { } // Annoying, but required to get test_cfg to work -impl AttrMetaMethods for Gc { +impl AttrMetaMethods for P { fn name(&self) -> InternedString { (**self).name() } fn value_str(&self) -> Option { (**self).value_str() } - fn meta_item_list<'a>(&'a self) -> Option<&'a [Gc]> { + fn meta_item_list<'a>(&'a self) -> Option<&'a [P]> { (**self).meta_item_list() } } pub trait AttributeMethods { - fn meta(&self) -> Gc; - fn desugar_doc(&self) -> Attribute; + fn meta<'a>(&'a self) -> &'a MetaItem; + fn with_desugared_doc(&self, f: |&Attribute| -> T) -> T; } impl AttributeMethods for Attribute { /// Extract the MetaItem from inside this Attribute. - fn meta(&self) -> Gc { - self.node.value + fn meta<'a>(&'a self) -> &'a MetaItem { + &*self.node.value } /// Convert self to a normal #[doc="foo"] comment, if it is a /// comment like `///` or `/** */`. (Returns self unchanged for /// non-sugared doc attributes.) - fn desugar_doc(&self) -> Attribute { + fn with_desugared_doc(&self, f: |&Attribute| -> T) -> T { if self.node.is_sugared_doc { let comment = self.value_str().unwrap(); let meta = mk_name_value_item_str( @@ -131,12 +131,12 @@ impl AttributeMethods for Attribute { token::intern_and_get_ident(strip_doc_comment_decoration( comment.get()).as_slice())); if self.node.style == ast::AttrOuter { - mk_attr_outer(self.node.id, meta) + f(&mk_attr_outer(self.node.id, meta)) } else { - mk_attr_inner(self.node.id, meta) + f(&mk_attr_inner(self.node.id, meta)) } } else { - *self + f(self) } } } @@ -144,23 +144,22 @@ impl AttributeMethods for Attribute { /* Constructors */ pub fn mk_name_value_item_str(name: InternedString, value: InternedString) - -> Gc { + -> P { let value_lit = dummy_spanned(ast::LitStr(value, ast::CookedStr)); mk_name_value_item(name, value_lit) } pub fn mk_name_value_item(name: InternedString, value: ast::Lit) - -> Gc { - box(GC) dummy_spanned(MetaNameValue(name, value)) + -> P { + P(dummy_spanned(MetaNameValue(name, value))) } -pub fn mk_list_item(name: InternedString, - items: Vec>) -> Gc { - box(GC) dummy_spanned(MetaList(name, items)) +pub fn mk_list_item(name: InternedString, items: Vec>) -> P { + P(dummy_spanned(MetaList(name, items))) } -pub fn mk_word_item(name: InternedString) -> Gc { - box(GC) dummy_spanned(MetaWord(name)) +pub fn mk_word_item(name: InternedString) -> P { + P(dummy_spanned(MetaWord(name))) } local_data_key!(next_attr_id: uint) @@ -172,7 +171,7 @@ pub fn mk_attr_id() -> AttrId { } /// Returns an inner attribute with the given value. -pub fn mk_attr_inner(id: AttrId, item: Gc) -> Attribute { +pub fn mk_attr_inner(id: AttrId, item: P) -> Attribute { dummy_spanned(Attribute_ { id: id, style: ast::AttrInner, @@ -182,7 +181,7 @@ pub fn mk_attr_inner(id: AttrId, item: Gc) -> Attribute { } /// Returns an outer attribute with the given value. -pub fn mk_attr_outer(id: AttrId, item: Gc) -> Attribute { +pub fn mk_attr_outer(id: AttrId, item: P) -> Attribute { dummy_spanned(Attribute_ { id: id, style: ast::AttrOuter, @@ -199,8 +198,8 @@ pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, let attr = Attribute_ { id: id, style: style, - value: box(GC) spanned(lo, hi, MetaNameValue(InternedString::new("doc"), - lit)), + value: P(spanned(lo, hi, MetaNameValue(InternedString::new("doc"), + lit))), is_sugared_doc: true }; spanned(lo, hi, attr) @@ -210,8 +209,7 @@ pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, /// Check if `needle` occurs in `haystack` by a structural /// comparison. This is slightly subtle, and relies on ignoring the /// span included in the `==` comparison a plain MetaItem. -pub fn contains(haystack: &[Gc], - needle: Gc) -> bool { +pub fn contains(haystack: &[P], needle: &MetaItem) -> bool { debug!("attr::contains (name={})", needle.name()); haystack.iter().any(|item| { debug!(" testing: {}", item.name()); @@ -234,7 +232,7 @@ pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) .and_then(|at| at.value_str()) } -pub fn last_meta_item_value_str_by_name(items: &[Gc], name: &str) +pub fn last_meta_item_value_str_by_name(items: &[P], name: &str) -> Option { items.iter() .rev() @@ -244,28 +242,25 @@ pub fn last_meta_item_value_str_by_name(items: &[Gc], name: &str) /* Higher-level applications */ -pub fn sort_meta_items(items: &[Gc]) -> Vec> { +pub fn sort_meta_items(items: Vec>) -> Vec> { // This is sort of stupid here, but we need to sort by // human-readable strings. - let mut v = items.iter() - .map(|&mi| (mi.name(), mi)) - .collect::)> >(); + let mut v = items.move_iter() + .map(|mi| (mi.name(), mi)) + .collect::)>>(); v.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); // There doesn't seem to be a more optimal way to do this - v.move_iter().map(|(_, m)| { - match m.node { - MetaList(ref n, ref mis) => { - box(GC) Spanned { - node: MetaList((*n).clone(), - sort_meta_items(mis.as_slice())), - .. /*bad*/ (*m).clone() - } - } - _ => m + v.move_iter().map(|(_, m)| m.map(|Spanned {node, span}| { + Spanned { + node: match node { + MetaList(n, mis) => MetaList(n, sort_meta_items(mis)), + _ => node + }, + span: span } - }).collect() + })).collect() } pub fn find_crate_name(attrs: &[Attribute]) -> Option { @@ -318,8 +313,8 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool { /// test_cfg(`[foo="a", bar]`, `[cfg(not(bar))]`) == false /// test_cfg(`[foo="a", bar]`, `[cfg(bar, foo="a")]`) == true /// test_cfg(`[foo="a", bar]`, `[cfg(bar, foo="b")]`) == false -pub fn test_cfg> - (cfg: &[Gc], mut metas: It) -> bool { +pub fn test_cfg<'a, AM: AttrMetaMethods, It: Iterator<&'a AM>> + (cfg: &[P], mut metas: It) -> bool { // having no #[cfg(...)] attributes counts as matching. let mut no_cfgs = true; @@ -344,10 +339,10 @@ pub fn test_cfg> // not match. !not_cfgs.iter().all(|mi| { debug!("cfg(not({}[...]))", mi.name()); - contains(cfg, *mi) + contains(cfg, &**mi) }) } - _ => contains(cfg, *cfg_mi) + _ => contains(cfg, &**cfg_mi) } }) } @@ -397,7 +392,7 @@ pub fn find_stability_generic<'a, }; return Some((Stability { - level: level, + level: level, text: attr.value_str() }, attr)); } @@ -412,7 +407,7 @@ pub fn find_stability(attrs: &[Attribute]) -> Option { }) } -pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[Gc]) { +pub fn require_unique_names(diagnostic: &SpanHandler, metas: &[P]) { let mut set = HashSet::new(); for meta in metas.iter() { let name = meta.name(); diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 78d3d86b29692..faa3946b74d0f 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -543,10 +543,9 @@ fn print_macro_backtrace(w: &mut EmitterWriter, Ok(()) } -pub fn expect(diag: &SpanHandler, opt: Option, msg: || -> String) - -> T { +pub fn expect(diag: &SpanHandler, opt: Option, msg: || -> String) -> T { match opt { - Some(ref t) => (*t).clone(), - None => diag.handler().bug(msg().as_slice()), + Some(t) => t, + None => diag.handler().bug(msg().as_slice()), } } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 25a6a4c01bd47..132b59c89b203 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -10,13 +10,13 @@ use std::cell::RefCell; use std::collections::HashMap; -use std::gc::Gc; use ast; use ast::{Ident, Name, TokenTree}; use codemap::Span; use ext::base::{ExtCtxt, MacExpr, MacItem, MacResult}; use ext::build::AstBuilder; use parse::token; +use ptr::P; local_data_key!(registered_diagnostics: RefCell>>) local_data_key!(used_diagnostics: RefCell>) @@ -116,7 +116,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, let (count, expr) = with_used_diagnostics(|diagnostics_in_use| { with_registered_diagnostics(|diagnostics| { - let descriptions: Vec> = diagnostics + let descriptions: Vec> = diagnostics .iter().filter_map(|(code, description)| { if !diagnostics_in_use.contains_key(code) { ecx.span_warn(span, format!( diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 8028d51a7b5cf..4b8c3376cad2e 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -18,8 +18,7 @@ use ext::base; use ext::base::*; use parse::token::InternedString; use parse::token; - -use std::gc::GC; +use ptr::P; enum State { Asm, @@ -199,7 +198,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } - MacExpr::new(box(GC) ast::Expr { + MacExpr::new(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprInlineAsm(ast::InlineAsm { asm: token::intern_and_get_ident(asm.get()), @@ -212,5 +211,5 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) dialect: dialect }), span: sp - }) + })) } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 4976e68cc6464..6e25b6b73ade6 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -18,6 +18,7 @@ use parse; use parse::parser; use parse::token; use parse::token::{InternedString, intern, str_to_ident}; +use ptr::P; use util::small_vector::SmallVector; use ext::mtwt; use fold::Folder; @@ -43,18 +44,18 @@ pub trait ItemDecorator { fn expand(&self, ecx: &mut ExtCtxt, sp: Span, - meta_item: Gc, - item: Gc, - push: |Gc|); + meta_item: &ast::MetaItem, + item: &ast::Item, + push: |P|); } -impl ItemDecorator for fn(&mut ExtCtxt, Span, Gc, Gc, |Gc|) { +impl ItemDecorator for fn(&mut ExtCtxt, Span, &ast::MetaItem, &ast::Item, |P|) { fn expand(&self, ecx: &mut ExtCtxt, sp: Span, - meta_item: Gc, - item: Gc, - push: |Gc|) { + meta_item: &ast::MetaItem, + item: &ast::Item, + push: |P|) { (*self)(ecx, sp, meta_item, item, push) } } @@ -63,18 +64,18 @@ pub trait ItemModifier { fn expand(&self, ecx: &mut ExtCtxt, span: Span, - meta_item: Gc, - item: Gc) - -> Gc; + meta_item: &ast::MetaItem, + item: P) + -> P; } -impl ItemModifier for fn(&mut ExtCtxt, Span, Gc, Gc) -> Gc { +impl ItemModifier for fn(&mut ExtCtxt, Span, &ast::MetaItem, P) -> P { fn expand(&self, ecx: &mut ExtCtxt, span: Span, - meta_item: Gc, - item: Gc) - -> Gc { + meta_item: &ast::MetaItem, + item: P) + -> P { (*self)(ecx, span, meta_item, item) } } @@ -128,29 +129,29 @@ impl IdentMacroExpander for IdentMacroExpanderFn { /// methods are spliced into the AST at the callsite of the macro (or /// just into the compiler's internal macro table, for `make_def`). pub trait MacResult { - /// Define a new macro. + /// Attempt to define a new macro. // this should go away; the idea that a macro might expand into // either a macro definition or an expression, depending on what // the context wants, is kind of silly. - fn make_def(&self) -> Option { + fn make_def(&mut self) -> Option { None } /// Create an expression. - fn make_expr(&self) -> Option> { + fn make_expr(self: Box) -> Option> { None } /// Create zero or more items. - fn make_items(&self) -> Option>> { + fn make_items(self: Box) -> Option>> { None } /// Create zero or more methods. - fn make_methods(&self) -> Option>> { + fn make_methods(self: Box) -> Option>> { None } /// Create a pattern. - fn make_pat(&self) -> Option> { + fn make_pat(self: Box) -> Option> { None } @@ -158,69 +159,69 @@ pub trait MacResult { /// /// By default this attempts to create an expression statement, /// returning None if that fails. - fn make_stmt(&self) -> Option> { + fn make_stmt(self: Box) -> Option> { self.make_expr() - .map(|e| box(GC) codemap::respan(e.span, ast::StmtExpr(e, ast::DUMMY_NODE_ID))) + .map(|e| P(codemap::respan(e.span, ast::StmtExpr(e, ast::DUMMY_NODE_ID)))) } } /// A convenience type for macros that return a single expression. pub struct MacExpr { - e: Gc, + e: P } impl MacExpr { - pub fn new(e: Gc) -> Box { + pub fn new(e: P) -> Box { box MacExpr { e: e } as Box } } impl MacResult for MacExpr { - fn make_expr(&self) -> Option> { + fn make_expr(self: Box) -> Option> { Some(self.e) } - fn make_pat(&self) -> Option> { + fn make_pat(self: Box) -> Option> { match self.e.node { - ast::ExprLit(_) => Some(box(GC) ast::Pat { + ast::ExprLit(_) => Some(P(ast::Pat { id: ast::DUMMY_NODE_ID, - node: ast::PatLit(self.e), - span: self.e.span - }), + span: self.e.span, + node: ast::PatLit(self.e) + })), _ => None } } } /// A convenience type for macros that return a single pattern. pub struct MacPat { - p: Gc, + p: P } impl MacPat { - pub fn new(p: Gc) -> Box { + pub fn new(p: P) -> Box { box MacPat { p: p } as Box } } impl MacResult for MacPat { - fn make_pat(&self) -> Option> { + fn make_pat(self: Box) -> Option> { Some(self.p) } } /// A convenience type for macros that return a single item. pub struct MacItem { - i: Gc + i: P } impl MacItem { - pub fn new(i: Gc) -> Box { + pub fn new(i: P) -> Box { box MacItem { i: i } as Box } } impl MacResult for MacItem { - fn make_items(&self) -> Option>> { + fn make_items(self: Box) -> Option>> { Some(SmallVector::one(self.i)) } - fn make_stmt(&self) -> Option> { - Some(box(GC) codemap::respan( + fn make_stmt(self: Box) -> Option> { + Some(P(codemap::respan( self.i.span, ast::StmtDecl( - box(GC) codemap::respan(self.i.span, ast::DeclItem(self.i)), - ast::DUMMY_NODE_ID))) + P(codemap::respan(self.i.span, ast::DeclItem(self.i))), + ast::DUMMY_NODE_ID)))) } } @@ -250,17 +251,17 @@ impl DummyResult { } /// A plain dummy expression. - pub fn raw_expr(sp: Span) -> Gc { - box(GC) ast::Expr { + pub fn raw_expr(sp: Span) -> P { + P(ast::Expr { id: ast::DUMMY_NODE_ID, - node: ast::ExprLit(box(GC) codemap::respan(sp, ast::LitNil)), + node: ast::ExprLit(P(codemap::respan(sp, ast::LitNil))), span: sp, - } + }) } /// A plain dummy pattern. - pub fn raw_pat(sp: Span) -> Gc { - box(GC) ast::Pat { + pub fn raw_pat(sp: Span) -> ast::Pat { + ast::Pat { id: ast::DUMMY_NODE_ID, node: ast::PatWild(ast::PatWildSingle), span: sp, @@ -270,13 +271,13 @@ impl DummyResult { } impl MacResult for DummyResult { - fn make_expr(&self) -> Option> { + fn make_expr(self: Box) -> Option> { Some(DummyResult::raw_expr(self.span)) } - fn make_pat(&self) -> Option> { - Some(DummyResult::raw_pat(self.span)) + fn make_pat(self: Box) -> Option> { + Some(P(DummyResult::raw_pat(self.span))) } - fn make_items(&self) -> Option>> { + fn make_items(self: Box) -> Option>> { // this code needs a comment... why not always just return the Some() ? if self.expr_only { None @@ -284,17 +285,17 @@ impl MacResult for DummyResult { Some(SmallVector::zero()) } } - fn make_methods(&self) -> Option>> { + fn make_methods(self: Box) -> Option>> { if self.expr_only { None } else { Some(SmallVector::zero()) } } - fn make_stmt(&self) -> Option> { - Some(box(GC) codemap::respan(self.span, - ast::StmtExpr(DummyResult::raw_expr(self.span), - ast::DUMMY_NODE_ID))) + fn make_stmt(self: Box) -> Option> { + Some(P(codemap::respan(self.span, + ast::StmtExpr(DummyResult::raw_expr(self.span), + ast::DUMMY_NODE_ID)))) } } @@ -461,7 +462,7 @@ pub struct ExtCtxt<'a> { pub mod_path: Vec , pub trace_mac: bool, - pub exported_macros: Vec>, + pub exported_macros: Vec>, pub syntax_env: SyntaxEnv, } @@ -482,7 +483,7 @@ impl<'a> ExtCtxt<'a> { } #[deprecated = "Replaced with `expander().fold_expr()`"] - pub fn expand_expr(&mut self, e: Gc) -> Gc { + pub fn expand_expr(&mut self, e: P) -> P { self.expander().fold_expr(e) } @@ -595,12 +596,12 @@ impl<'a> ExtCtxt<'a> { /// Extract a string literal from the macro expanded version of `expr`, /// emitting `err_msg` if `expr` is not a string literal. This does not stop /// compilation on error, merely emits a non-fatal error and returns None. -pub fn expr_to_string(cx: &mut ExtCtxt, expr: Gc, err_msg: &str) - -> Option<(InternedString, ast::StrStyle)> { +pub fn expr_to_string(cx: &mut ExtCtxt, expr: P, err_msg: &str) + -> Option<(InternedString, ast::StrStyle)> { // we want to be able to handle e.g. concat("foo", "bar") let expr = cx.expander().fold_expr(expr); match expr.node { - ast::ExprLit(l) => match l.node { + ast::ExprLit(ref l) => match l.node { ast::LitStr(ref s, style) => return Some(((*s).clone(), style)), _ => cx.span_err(l.span, err_msg) }, @@ -651,7 +652,7 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, /// parsing error, emit a non-fatal error and return None. pub fn get_exprs_from_tts(cx: &mut ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) -> Option>> { + tts: &[ast::TokenTree]) -> Option>> { let mut p = cx.new_parser_from_tts(tts); let mut es = Vec::new(); while p.token != token::EOF { diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 6bd1fba4b58a3..eda373c4fb806 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -9,19 +9,18 @@ // except according to those terms. use abi; -use ast::{P, Ident, Generics, NodeId, Expr}; +use ast::{Ident, Generics, Expr}; use ast; use ast_util; use attr; use codemap::{Span, respan, Spanned, DUMMY_SP, Pos}; use ext::base::ExtCtxt; -use fold::Folder; use owned_slice::OwnedSlice; use parse::token::special_idents; use parse::token::InternedString; use parse::token; +use ptr::P; -use std::gc::{Gc, GC}; // Transitional reexports so qquote can find the paths it is looking for mod syntax { @@ -64,7 +63,6 @@ pub trait AstBuilder { fn ty_vars(&self, ty_params: &OwnedSlice) -> Vec> ; fn ty_vars_global(&self, ty_params: &OwnedSlice) -> Vec> ; fn ty_field_imm(&self, span: Span, name: Ident, ty: P) -> ast::TypeField; - fn strip_bounds(&self, bounds: &Generics) -> Generics; fn typaram(&self, span: Span, @@ -83,140 +81,130 @@ pub trait AstBuilder { -> ast::LifetimeDef; // statements - fn stmt_expr(&self, expr: Gc) -> Gc; - fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, - ex: Gc) -> Gc; + fn stmt_expr(&self, expr: P) -> P; + fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, ex: P) -> P; fn stmt_let_typed(&self, sp: Span, mutbl: bool, ident: ast::Ident, typ: P, - ex: Gc) - -> Gc; - fn stmt_item(&self, sp: Span, item: Gc) -> Gc; + ex: P) + -> P; + fn stmt_item(&self, sp: Span, item: P) -> P; // blocks - fn block(&self, span: Span, stmts: Vec>, - expr: Option>) -> P; - fn block_expr(&self, expr: Gc) -> P; + fn block(&self, span: Span, stmts: Vec>, + expr: Option>) -> P; + fn block_expr(&self, expr: P) -> P; fn block_all(&self, span: Span, - view_items: Vec , - stmts: Vec> , - expr: Option>) -> P; + view_items: Vec, + stmts: Vec>, + expr: Option>) -> P; // expressions - fn expr(&self, span: Span, node: ast::Expr_) -> Gc; - fn expr_path(&self, path: ast::Path) -> Gc; - fn expr_ident(&self, span: Span, id: ast::Ident) -> Gc; + fn expr(&self, span: Span, node: ast::Expr_) -> P; + fn expr_path(&self, path: ast::Path) -> P; + fn expr_ident(&self, span: Span, id: ast::Ident) -> P; - fn expr_self(&self, span: Span) -> Gc; + fn expr_self(&self, span: Span) -> P; fn expr_binary(&self, sp: Span, op: ast::BinOp, - lhs: Gc, rhs: Gc) -> Gc; - fn expr_deref(&self, sp: Span, e: Gc) -> Gc; - fn expr_unary(&self, sp: Span, op: ast::UnOp, e: Gc) -> Gc; - - fn expr_managed(&self, sp: Span, e: Gc) -> Gc; - fn expr_addr_of(&self, sp: Span, e: Gc) -> Gc; - fn expr_mut_addr_of(&self, sp: Span, e: Gc) -> Gc; - fn expr_field_access(&self, span: Span, expr: Gc, - ident: ast::Ident) -> Gc; - fn expr_tup_field_access(&self, sp: Span, expr: Gc, - idx: uint) -> Gc; - fn expr_call(&self, span: Span, expr: Gc, - args: Vec>) -> Gc; - fn expr_call_ident(&self, span: Span, id: ast::Ident, - args: Vec>) -> Gc; - fn expr_call_global(&self, sp: Span, fn_path: Vec , - args: Vec>) -> Gc; + lhs: P, rhs: P) -> P; + fn expr_deref(&self, sp: Span, e: P) -> P; + fn expr_unary(&self, sp: Span, op: ast::UnOp, e: P) -> P; + + fn expr_managed(&self, sp: Span, e: P) -> P; + fn expr_addr_of(&self, sp: Span, e: P) -> P; + fn expr_mut_addr_of(&self, sp: Span, e: P) -> P; + fn expr_field_access(&self, span: Span, expr: P, ident: ast::Ident) -> P; + fn expr_tup_field_access(&self, sp: Span, expr: P, + idx: uint) -> P; + fn expr_call(&self, span: Span, expr: P, args: Vec>) -> P; + fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec>) -> P; + fn expr_call_global(&self, sp: Span, fn_path: Vec, + args: Vec> ) -> P; fn expr_method_call(&self, span: Span, - expr: Gc, ident: ast::Ident, - args: Vec> ) -> Gc; - fn expr_block(&self, b: P) -> Gc; - fn expr_cast(&self, sp: Span, expr: Gc, - ty: P) -> Gc; - - fn field_imm(&self, span: Span, name: Ident, e: Gc) -> ast::Field; - fn expr_struct(&self, span: Span, path: ast::Path, - fields: Vec ) -> Gc; + expr: P, ident: ast::Ident, + args: Vec> ) -> P; + fn expr_block(&self, b: P) -> P; + fn expr_cast(&self, sp: Span, expr: P, ty: P) -> P; + + fn field_imm(&self, span: Span, name: Ident, e: P) -> ast::Field; + fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec) -> P; fn expr_struct_ident(&self, span: Span, id: ast::Ident, - fields: Vec ) -> Gc; + fields: Vec) -> P; - fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> Gc; + fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P; - fn expr_uint(&self, span: Span, i: uint) -> Gc; - fn expr_int(&self, sp: Span, i: int) -> Gc; - fn expr_u8(&self, sp: Span, u: u8) -> Gc; - fn expr_bool(&self, sp: Span, value: bool) -> Gc; + fn expr_uint(&self, span: Span, i: uint) -> P; + fn expr_int(&self, sp: Span, i: int) -> P; + fn expr_u8(&self, sp: Span, u: u8) -> P; + fn expr_bool(&self, sp: Span, value: bool) -> P; - fn expr_vec(&self, sp: Span, exprs: Vec> ) -> Gc; - fn expr_vec_ng(&self, sp: Span) -> Gc; - fn expr_vec_slice(&self, sp: Span, exprs: Vec> ) -> Gc; - fn expr_str(&self, sp: Span, s: InternedString) -> Gc; + fn expr_vec(&self, sp: Span, exprs: Vec>) -> P; + fn expr_vec_ng(&self, sp: Span) -> P; + fn expr_vec_slice(&self, sp: Span, exprs: Vec>) -> P; + fn expr_str(&self, sp: Span, s: InternedString) -> P; - fn expr_some(&self, sp: Span, expr: Gc) -> Gc; - fn expr_none(&self, sp: Span) -> Gc; + fn expr_some(&self, sp: Span, expr: P) -> P; + fn expr_none(&self, sp: Span) -> P; - fn expr_tuple(&self, sp: Span, exprs: Vec>) -> Gc; + fn expr_tuple(&self, sp: Span, exprs: Vec>) -> P; - fn expr_fail(&self, span: Span, msg: InternedString) -> Gc; - fn expr_unreachable(&self, span: Span) -> Gc; + fn expr_fail(&self, span: Span, msg: InternedString) -> P; + fn expr_unreachable(&self, span: Span) -> P; - fn expr_ok(&self, span: Span, expr: Gc) -> Gc; - fn expr_err(&self, span: Span, expr: Gc) -> Gc; - fn expr_try(&self, span: Span, head: Gc) -> Gc; + fn expr_ok(&self, span: Span, expr: P) -> P; + fn expr_err(&self, span: Span, expr: P) -> P; + fn expr_try(&self, span: Span, head: P) -> P; - fn pat(&self, span: Span, pat: ast::Pat_) -> Gc; - fn pat_wild(&self, span: Span) -> Gc; - fn pat_lit(&self, span: Span, expr: Gc) -> Gc; - fn pat_ident(&self, span: Span, ident: ast::Ident) -> Gc; + fn pat(&self, span: Span, pat: ast::Pat_) -> P; + fn pat_wild(&self, span: Span) -> P; + fn pat_lit(&self, span: Span, expr: P) -> P; + fn pat_ident(&self, span: Span, ident: ast::Ident) -> P; fn pat_ident_binding_mode(&self, span: Span, ident: ast::Ident, - bm: ast::BindingMode) -> Gc; - fn pat_enum(&self, span: Span, path: ast::Path, - subpats: Vec>) -> Gc; + bm: ast::BindingMode) -> P; + fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec> ) -> P; fn pat_struct(&self, span: Span, - path: ast::Path, field_pats: Vec ) -> Gc; - fn pat_tuple(&self, span: Span, pats: Vec>) -> Gc; + path: ast::Path, field_pats: Vec ) -> P; + fn pat_tuple(&self, span: Span, pats: Vec>) -> P; - fn pat_some(&self, span: Span, pat: Gc) -> Gc; - fn pat_none(&self, span: Span) -> Gc; + fn pat_some(&self, span: Span, pat: P) -> P; + fn pat_none(&self, span: Span) -> P; - fn pat_ok(&self, span: Span, pat: Gc) -> Gc; - fn pat_err(&self, span: Span, pat: Gc) -> Gc; + fn pat_ok(&self, span: Span, pat: P) -> P; + fn pat_err(&self, span: Span, pat: P) -> P; - fn arm(&self, span: Span, pats: Vec> , expr: Gc) -> ast::Arm; + fn arm(&self, span: Span, pats: Vec>, expr: P) -> ast::Arm; fn arm_unreachable(&self, span: Span) -> ast::Arm; - fn expr_match(&self, span: Span, arg: Gc, arms: Vec ) -> Gc; + fn expr_match(&self, span: Span, arg: P, arms: Vec ) -> P; fn expr_if(&self, span: Span, - cond: Gc, then: Gc, - els: Option>) -> Gc; - fn expr_loop(&self, span: Span, block: P) -> Gc; + cond: P, then: P, els: Option>) -> P; + fn expr_loop(&self, span: Span, block: P) -> P; fn lambda_fn_decl(&self, span: Span, - fn_decl: P, blk: P) -> Gc; + fn_decl: P, blk: P) -> P; - fn lambda(&self, span: Span, ids: Vec , blk: P) -> Gc; - fn lambda0(&self, span: Span, blk: P) -> Gc; - fn lambda1(&self, span: Span, blk: P, ident: ast::Ident) -> Gc; + fn lambda(&self, span: Span, ids: Vec , blk: P) -> P; + fn lambda0(&self, span: Span, blk: P) -> P; + fn lambda1(&self, span: Span, blk: P, ident: ast::Ident) -> P; - fn lambda_expr(&self, span: Span, ids: Vec , blk: Gc) -> Gc; - fn lambda_expr_0(&self, span: Span, expr: Gc) -> Gc; - fn lambda_expr_1(&self, span: Span, expr: Gc, ident: ast::Ident) -> Gc; + fn lambda_expr(&self, span: Span, ids: Vec , blk: P) -> P; + fn lambda_expr_0(&self, span: Span, expr: P) -> P; + fn lambda_expr_1(&self, span: Span, expr: P, ident: ast::Ident) -> P; fn lambda_stmts(&self, span: Span, ids: Vec, - blk: Vec>) -> Gc; - fn lambda_stmts_0(&self, span: Span, - stmts: Vec>) -> Gc; - fn lambda_stmts_1(&self, span: Span, - stmts: Vec>, ident: ast::Ident) -> Gc; + blk: Vec>) -> P; + fn lambda_stmts_0(&self, span: Span, stmts: Vec>) -> P; + fn lambda_stmts_1(&self, span: Span, stmts: Vec>, + ident: ast::Ident) -> P; // items fn item(&self, span: Span, - name: Ident, attrs: Vec, - node: ast::Item_) -> Gc; + name: Ident, attrs: Vec , node: ast::Item_) -> P; fn arg(&self, span: Span, name: Ident, ty: P) -> ast::Arg; // FIXME unused self @@ -228,67 +216,64 @@ pub trait AstBuilder { inputs: Vec , output: P, generics: Generics, - body: P) -> Gc; + body: P) -> P; fn item_fn(&self, span: Span, name: Ident, inputs: Vec , output: P, - body: P) -> Gc; + body: P) -> P; fn variant(&self, span: Span, name: Ident, tys: Vec> ) -> ast::Variant; fn item_enum_poly(&self, span: Span, name: Ident, enum_definition: ast::EnumDef, - generics: Generics) -> Gc; - fn item_enum(&self, span: Span, name: Ident, - enum_def: ast::EnumDef) -> Gc; + generics: Generics) -> P; + fn item_enum(&self, span: Span, name: Ident, enum_def: ast::EnumDef) -> P; fn item_struct_poly(&self, span: Span, name: Ident, struct_def: ast::StructDef, - generics: Generics) -> Gc; - fn item_struct(&self, span: Span, name: Ident, - struct_def: ast::StructDef) -> Gc; + generics: Generics) -> P; + fn item_struct(&self, span: Span, name: Ident, struct_def: ast::StructDef) -> P; fn item_mod(&self, span: Span, inner_span: Span, name: Ident, attrs: Vec, - vi: Vec, - items: Vec>) -> Gc; + vi: Vec , items: Vec> ) -> P; fn item_static(&self, span: Span, name: Ident, ty: P, mutbl: ast::Mutability, - expr: Gc) - -> Gc; + expr: P) + -> P; fn item_ty_poly(&self, span: Span, name: Ident, ty: P, - generics: Generics) -> Gc; - fn item_ty(&self, span: Span, name: Ident, ty: P) -> Gc; + generics: Generics) -> P; + fn item_ty(&self, span: Span, name: Ident, ty: P) -> P; - fn attribute(&self, sp: Span, mi: Gc) -> ast::Attribute; + fn attribute(&self, sp: Span, mi: P) -> ast::Attribute; - fn meta_word(&self, sp: Span, w: InternedString) -> Gc; + fn meta_word(&self, sp: Span, w: InternedString) -> P; fn meta_list(&self, sp: Span, name: InternedString, - mis: Vec>) - -> Gc; + mis: Vec> ) + -> P; fn meta_name_value(&self, sp: Span, name: InternedString, value: ast::Lit_) - -> Gc; + -> P; fn view_use(&self, sp: Span, - vis: ast::Visibility, vp: Gc) -> ast::ViewItem; + vis: ast::Visibility, vp: P) -> ast::ViewItem; fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem; fn view_use_simple_(&self, sp: Span, vis: ast::Visibility, ident: ast::Ident, path: ast::Path) -> ast::ViewItem; @@ -447,16 +432,6 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.path_global(DUMMY_SP, vec!(p.ident)), None)).collect() } - fn strip_bounds(&self, generics: &Generics) -> Generics { - let new_params = generics.ty_params.map(|ty_param| { - ast::TyParam { bounds: OwnedSlice::empty(), unbound: None, ..*ty_param } - }); - Generics { - ty_params: new_params, - .. (*generics).clone() - } - } - fn trait_ref(&self, path: ast::Path) -> ast::TraitRef { ast::TraitRef { path: path, @@ -483,27 +458,27 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } } - fn stmt_expr(&self, expr: Gc) -> Gc { - box(GC) respan(expr.span, ast::StmtSemi(expr, ast::DUMMY_NODE_ID)) + fn stmt_expr(&self, expr: P) -> P { + P(respan(expr.span, ast::StmtSemi(expr, ast::DUMMY_NODE_ID))) } fn stmt_let(&self, sp: Span, mutbl: bool, ident: ast::Ident, - ex: Gc) -> Gc { + ex: P) -> P { let pat = if mutbl { self.pat_ident_binding_mode(sp, ident, ast::BindByValue(ast::MutMutable)) } else { self.pat_ident(sp, ident) }; - let local = box(GC) ast::Local { + let local = P(ast::Local { ty: self.ty_infer(sp), pat: pat, init: Some(ex), id: ast::DUMMY_NODE_ID, span: sp, source: ast::LocalLet, - }; + }); let decl = respan(sp, ast::DeclLocal(local)); - box(GC) respan(sp, ast::StmtDecl(box(GC) decl, ast::DUMMY_NODE_ID)) + P(respan(sp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID))) } fn stmt_let_typed(&self, @@ -511,46 +486,43 @@ impl<'a> AstBuilder for ExtCtxt<'a> { mutbl: bool, ident: ast::Ident, typ: P, - ex: Gc) - -> Gc { + ex: P) + -> P { let pat = if mutbl { self.pat_ident_binding_mode(sp, ident, ast::BindByValue(ast::MutMutable)) } else { self.pat_ident(sp, ident) }; - let local = box(GC) ast::Local { + let local = P(ast::Local { ty: typ, pat: pat, init: Some(ex), id: ast::DUMMY_NODE_ID, span: sp, source: ast::LocalLet, - }; + }); let decl = respan(sp, ast::DeclLocal(local)); - box(GC) respan(sp, ast::StmtDecl(box(GC) decl, ast::DUMMY_NODE_ID)) + P(respan(sp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID))) } - fn block(&self, - span: Span, - stmts: Vec>, - expr: Option>) - -> P { + fn block(&self, span: Span, stmts: Vec>, + expr: Option>) -> P { self.block_all(span, Vec::new(), stmts, expr) } - fn stmt_item(&self, sp: Span, item: Gc) -> Gc { + fn stmt_item(&self, sp: Span, item: P) -> P { let decl = respan(sp, ast::DeclItem(item)); - box(GC) respan(sp, ast::StmtDecl(box(GC) decl, ast::DUMMY_NODE_ID)) + P(respan(sp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID))) } - fn block_expr(&self, expr: Gc) -> P { + fn block_expr(&self, expr: P) -> P { self.block_all(expr.span, Vec::new(), Vec::new(), Some(expr)) } fn block_all(&self, span: Span, - view_items: Vec , - stmts: Vec>, - expr: Option>) -> P { + view_items: Vec, + stmts: Vec>, + expr: Option>) -> P { P(ast::Block { view_items: view_items, stmts: stmts, @@ -561,42 +533,42 @@ impl<'a> AstBuilder for ExtCtxt<'a> { }) } - fn expr(&self, span: Span, node: ast::Expr_) -> Gc { - box(GC) ast::Expr { + fn expr(&self, span: Span, node: ast::Expr_) -> P { + P(ast::Expr { id: ast::DUMMY_NODE_ID, node: node, span: span, - } + }) } - fn expr_path(&self, path: ast::Path) -> Gc { + fn expr_path(&self, path: ast::Path) -> P { self.expr(path.span, ast::ExprPath(path)) } - fn expr_ident(&self, span: Span, id: ast::Ident) -> Gc { + fn expr_ident(&self, span: Span, id: ast::Ident) -> P { self.expr_path(self.path_ident(span, id)) } - fn expr_self(&self, span: Span) -> Gc { + fn expr_self(&self, span: Span) -> P { self.expr_ident(span, special_idents::self_) } fn expr_binary(&self, sp: Span, op: ast::BinOp, - lhs: Gc, rhs: Gc) -> Gc { + lhs: P, rhs: P) -> P { self.expr(sp, ast::ExprBinary(op, lhs, rhs)) } - fn expr_deref(&self, sp: Span, e: Gc) -> Gc { + fn expr_deref(&self, sp: Span, e: P) -> P { self.expr_unary(sp, ast::UnDeref, e) } - fn expr_unary(&self, sp: Span, op: ast::UnOp, e: Gc) -> Gc { + fn expr_unary(&self, sp: Span, op: ast::UnOp, e: P) -> P { self.expr(sp, ast::ExprUnary(op, e)) } - fn expr_managed(&self, sp: Span, e: Gc) -> Gc { + fn expr_managed(&self, sp: Span, e: P) -> P { self.expr_unary(sp, ast::UnBox, e) } - fn expr_field_access(&self, sp: Span, expr: Gc, ident: ast::Ident) -> Gc { + fn expr_field_access(&self, sp: Span, expr: P, ident: ast::Ident) -> P { let field_name = token::get_ident(ident); let field_span = Span { lo: sp.lo - Pos::from_uint(field_name.get().len()), @@ -607,7 +579,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let id = Spanned { node: ident, span: field_span }; self.expr(sp, ast::ExprField(expr, id, Vec::new())) } - fn expr_tup_field_access(&self, sp: Span, expr: Gc, idx: uint) -> Gc { + fn expr_tup_field_access(&self, sp: Span, expr: P, idx: uint) -> P { let field_span = Span { lo: sp.lo - Pos::from_uint(idx.to_string().len()), hi: sp.hi, @@ -617,68 +589,67 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let id = Spanned { node: idx, span: field_span }; self.expr(sp, ast::ExprTupField(expr, id, Vec::new())) } - fn expr_addr_of(&self, sp: Span, e: Gc) -> Gc { + fn expr_addr_of(&self, sp: Span, e: P) -> P { self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e)) } - fn expr_mut_addr_of(&self, sp: Span, e: Gc) -> Gc { + fn expr_mut_addr_of(&self, sp: Span, e: P) -> P { self.expr(sp, ast::ExprAddrOf(ast::MutMutable, e)) } - fn expr_call(&self, span: Span, expr: Gc, - args: Vec>) -> Gc { + fn expr_call(&self, span: Span, expr: P, args: Vec>) -> P { self.expr(span, ast::ExprCall(expr, args)) } fn expr_call_ident(&self, span: Span, id: ast::Ident, - args: Vec>) -> Gc { + args: Vec>) -> P { self.expr(span, ast::ExprCall(self.expr_ident(span, id), args)) } fn expr_call_global(&self, sp: Span, fn_path: Vec , - args: Vec> ) -> Gc { + args: Vec> ) -> P { let pathexpr = self.expr_path(self.path_global(sp, fn_path)); self.expr_call(sp, pathexpr, args) } fn expr_method_call(&self, span: Span, - expr: Gc, + expr: P, ident: ast::Ident, - mut args: Vec> ) -> Gc { + mut args: Vec> ) -> P { let id = Spanned { node: ident, span: span }; args.unshift(expr); self.expr(span, ast::ExprMethodCall(id, Vec::new(), args)) } - fn expr_block(&self, b: P) -> Gc { + fn expr_block(&self, b: P) -> P { self.expr(b.span, ast::ExprBlock(b)) } - fn field_imm(&self, span: Span, name: Ident, e: Gc) -> ast::Field { + fn field_imm(&self, span: Span, name: Ident, e: P) -> ast::Field { ast::Field { ident: respan(span, name), expr: e, span: span } } - fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec ) -> Gc { + fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec) -> P { self.expr(span, ast::ExprStruct(path, fields, None)) } fn expr_struct_ident(&self, span: Span, - id: ast::Ident, fields: Vec ) -> Gc { + id: ast::Ident, fields: Vec) -> P { self.expr_struct(span, self.path_ident(span, id), fields) } - fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> Gc { - self.expr(sp, ast::ExprLit(box(GC) respan(sp, lit))) + fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> P { + self.expr(sp, ast::ExprLit(P(respan(sp, lit)))) } - fn expr_uint(&self, span: Span, i: uint) -> Gc { + fn expr_uint(&self, span: Span, i: uint) -> P { self.expr_lit(span, ast::LitInt(i as u64, ast::UnsignedIntLit(ast::TyU))) } - fn expr_int(&self, sp: Span, i: int) -> Gc { + fn expr_int(&self, sp: Span, i: int) -> P { self.expr_lit(sp, ast::LitInt(i as u64, ast::SignedIntLit(ast::TyI, ast::Sign::new(i)))) } - fn expr_u8(&self, sp: Span, u: u8) -> Gc { + fn expr_u8(&self, sp: Span, u: u8) -> P { self.expr_lit(sp, ast::LitInt(u as u64, ast::UnsignedIntLit(ast::TyU8))) } - fn expr_bool(&self, sp: Span, value: bool) -> Gc { + fn expr_bool(&self, sp: Span, value: bool) -> P { self.expr_lit(sp, ast::LitBool(value)) } - fn expr_vec(&self, sp: Span, exprs: Vec> ) -> Gc { + fn expr_vec(&self, sp: Span, exprs: Vec>) -> P { self.expr(sp, ast::ExprVec(exprs)) } - fn expr_vec_ng(&self, sp: Span) -> Gc { + fn expr_vec_ng(&self, sp: Span) -> P { self.expr_call_global(sp, vec!(self.ident_of("std"), self.ident_of("vec"), @@ -686,19 +657,19 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.ident_of("new")), Vec::new()) } - fn expr_vec_slice(&self, sp: Span, exprs: Vec> ) -> Gc { + fn expr_vec_slice(&self, sp: Span, exprs: Vec>) -> P { self.expr_addr_of(sp, self.expr_vec(sp, exprs)) } - fn expr_str(&self, sp: Span, s: InternedString) -> Gc { + fn expr_str(&self, sp: Span, s: InternedString) -> P { self.expr_lit(sp, ast::LitStr(s, ast::CookedStr)) } - fn expr_cast(&self, sp: Span, expr: Gc, ty: P) -> Gc { + fn expr_cast(&self, sp: Span, expr: P, ty: P) -> P { self.expr(sp, ast::ExprCast(expr, ty)) } - fn expr_some(&self, sp: Span, expr: Gc) -> Gc { + fn expr_some(&self, sp: Span, expr: P) -> P { let some = vec!( self.ident_of("std"), self.ident_of("option"), @@ -706,7 +677,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr_call_global(sp, some, vec!(expr)) } - fn expr_none(&self, sp: Span) -> Gc { + fn expr_none(&self, sp: Span) -> P { let none = self.path_global(sp, vec!( self.ident_of("std"), self.ident_of("option"), @@ -714,11 +685,11 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr_path(none) } - fn expr_tuple(&self, sp: Span, exprs: Vec>) -> Gc { + fn expr_tuple(&self, sp: Span, exprs: Vec>) -> P { self.expr(sp, ast::ExprTup(exprs)) } - fn expr_fail(&self, span: Span, msg: InternedString) -> Gc { + fn expr_fail(&self, span: Span, msg: InternedString) -> P { let loc = self.codemap().lookup_char_pos(span.lo); let expr_file = self.expr_str(span, token::intern_and_get_ident(loc.file @@ -738,13 +709,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { expr_file_line_ptr)) } - fn expr_unreachable(&self, span: Span) -> Gc { + fn expr_unreachable(&self, span: Span) -> P { self.expr_fail(span, InternedString::new( "internal error: entered unreachable code")) } - fn expr_ok(&self, sp: Span, expr: Gc) -> Gc { + fn expr_ok(&self, sp: Span, expr: P) -> P { let ok = vec!( self.ident_of("std"), self.ident_of("result"), @@ -752,7 +723,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr_call_global(sp, ok, vec!(expr)) } - fn expr_err(&self, sp: Span, expr: Gc) -> Gc { + fn expr_err(&self, sp: Span, expr: P) -> P { let err = vec!( self.ident_of("std"), self.ident_of("result"), @@ -760,7 +731,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr_call_global(sp, err, vec!(expr)) } - fn expr_try(&self, sp: Span, head: Gc) -> Gc { + fn expr_try(&self, sp: Span, head: P) -> P { let ok = self.ident_of("Ok"); let ok_path = self.path_ident(sp, ok); let err = self.ident_of("Err"); @@ -771,11 +742,11 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let binding_expr = self.expr_ident(sp, binding_variable); // Ok(__try_var) pattern - let ok_pat = self.pat_enum(sp, ok_path, vec!(binding_pat)); + let ok_pat = self.pat_enum(sp, ok_path, vec!(binding_pat.clone())); // Err(__try_var) (pattern and expression resp.) let err_pat = self.pat_enum(sp, err_path, vec!(binding_pat)); - let err_inner_expr = self.expr_call_ident(sp, err, vec!(binding_expr)); + let err_inner_expr = self.expr_call_ident(sp, err, vec!(binding_expr.clone())); // return Err(__try_var) let err_expr = self.expr(sp, ast::ExprRet(Some(err_inner_expr))); @@ -789,41 +760,41 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } - fn pat(&self, span: Span, pat: ast::Pat_) -> Gc { - box(GC) ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span } + fn pat(&self, span: Span, pat: ast::Pat_) -> P { + P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: span }) } - fn pat_wild(&self, span: Span) -> Gc { + fn pat_wild(&self, span: Span) -> P { self.pat(span, ast::PatWild(ast::PatWildSingle)) } - fn pat_lit(&self, span: Span, expr: Gc) -> Gc { + fn pat_lit(&self, span: Span, expr: P) -> P { self.pat(span, ast::PatLit(expr)) } - fn pat_ident(&self, span: Span, ident: ast::Ident) -> Gc { + fn pat_ident(&self, span: Span, ident: ast::Ident) -> P { self.pat_ident_binding_mode(span, ident, ast::BindByValue(ast::MutImmutable)) } fn pat_ident_binding_mode(&self, span: Span, ident: ast::Ident, - bm: ast::BindingMode) -> Gc { + bm: ast::BindingMode) -> P { let pat = ast::PatIdent(bm, Spanned{span: span, node: ident}, None); self.pat(span, pat) } - fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec> ) -> Gc { + fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec>) -> P { let pat = ast::PatEnum(path, Some(subpats)); self.pat(span, pat) } fn pat_struct(&self, span: Span, - path: ast::Path, field_pats: Vec ) -> Gc { + path: ast::Path, field_pats: Vec) -> P { let pat = ast::PatStruct(path, field_pats, false); self.pat(span, pat) } - fn pat_tuple(&self, span: Span, pats: Vec>) -> Gc { + fn pat_tuple(&self, span: Span, pats: Vec>) -> P { let pat = ast::PatTup(pats); self.pat(span, pat) } - fn pat_some(&self, span: Span, pat: Gc) -> Gc { + fn pat_some(&self, span: Span, pat: P) -> P { let some = vec!( self.ident_of("std"), self.ident_of("option"), @@ -832,7 +803,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.pat_enum(span, path, vec!(pat)) } - fn pat_none(&self, span: Span) -> Gc { + fn pat_none(&self, span: Span) -> P { let some = vec!( self.ident_of("std"), self.ident_of("option"), @@ -841,7 +812,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.pat_enum(span, path, vec!()) } - fn pat_ok(&self, span: Span, pat: Gc) -> Gc { + fn pat_ok(&self, span: Span, pat: P) -> P { let some = vec!( self.ident_of("std"), self.ident_of("result"), @@ -850,7 +821,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.pat_enum(span, path, vec!(pat)) } - fn pat_err(&self, span: Span, pat: Gc) -> Gc { + fn pat_err(&self, span: Span, pat: P) -> P { let some = vec!( self.ident_of("std"), self.ident_of("result"), @@ -859,7 +830,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.pat_enum(span, path, vec!(pat)) } - fn arm(&self, _span: Span, pats: Vec> , expr: Gc) -> ast::Arm { + fn arm(&self, _span: Span, pats: Vec>, expr: P) -> ast::Arm { ast::Arm { attrs: vec!(), pats: pats, @@ -872,64 +843,62 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.arm(span, vec!(self.pat_wild(span)), self.expr_unreachable(span)) } - fn expr_match(&self, span: Span, arg: Gc, - arms: Vec) -> Gc { + fn expr_match(&self, span: Span, arg: P, arms: Vec) -> P { self.expr(span, ast::ExprMatch(arg, arms)) } - fn expr_if(&self, span: Span, - cond: Gc, then: Gc, - els: Option>) -> Gc { + fn expr_if(&self, span: Span, cond: P, + then: P, els: Option>) -> P { let els = els.map(|x| self.expr_block(self.block_expr(x))); self.expr(span, ast::ExprIf(cond, self.block_expr(then), els)) } - fn expr_loop(&self, span: Span, block: P) -> Gc { + fn expr_loop(&self, span: Span, block: P) -> P { self.expr(span, ast::ExprLoop(block, None)) } fn lambda_fn_decl(&self, span: Span, - fn_decl: P, blk: P) -> Gc { + fn_decl: P, blk: P) -> P { self.expr(span, ast::ExprFnBlock(ast::CaptureByRef, fn_decl, blk)) } - fn lambda(&self, span: Span, ids: Vec , blk: P) -> Gc { + fn lambda(&self, span: Span, ids: Vec, blk: P) -> P { let fn_decl = self.fn_decl( ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(), self.ty_infer(span)); self.expr(span, ast::ExprFnBlock(ast::CaptureByRef, fn_decl, blk)) } - fn lambda0(&self, span: Span, blk: P) -> Gc { + fn lambda0(&self, span: Span, blk: P) -> P { self.lambda(span, Vec::new(), blk) } - fn lambda1(&self, span: Span, blk: P, ident: ast::Ident) -> Gc { + fn lambda1(&self, span: Span, blk: P, ident: ast::Ident) -> P { self.lambda(span, vec!(ident), blk) } - fn lambda_expr(&self, span: Span, ids: Vec , expr: Gc) -> Gc { + fn lambda_expr(&self, span: Span, ids: Vec, + expr: P) -> P { self.lambda(span, ids, self.block_expr(expr)) } - fn lambda_expr_0(&self, span: Span, expr: Gc) -> Gc { + fn lambda_expr_0(&self, span: Span, expr: P) -> P { self.lambda0(span, self.block_expr(expr)) } - fn lambda_expr_1(&self, span: Span, expr: Gc, ident: ast::Ident) -> Gc { + fn lambda_expr_1(&self, span: Span, expr: P, ident: ast::Ident) -> P { self.lambda1(span, self.block_expr(expr), ident) } fn lambda_stmts(&self, span: Span, ids: Vec, - stmts: Vec>) - -> Gc { + stmts: Vec>) + -> P { self.lambda(span, ids, self.block(span, stmts, None)) } - fn lambda_stmts_0(&self, span: Span, - stmts: Vec>) -> Gc { + fn lambda_stmts_0(&self, span: Span, stmts: Vec>) -> P { self.lambda0(span, self.block(span, stmts, None)) } - fn lambda_stmts_1(&self, span: Span, stmts: Vec>, - ident: ast::Ident) -> Gc { + fn lambda_stmts_1(&self, span: Span, stmts: Vec>, + ident: ast::Ident) -> P { self.lambda1(span, self.block(span, stmts, None), ident) } @@ -952,17 +921,18 @@ impl<'a> AstBuilder for ExtCtxt<'a> { }) } - fn item(&self, span: Span, - name: Ident, attrs: Vec, - node: ast::Item_) -> Gc { + fn item(&self, span: Span, name: Ident, + attrs: Vec, node: ast::Item_) -> P { // FIXME: Would be nice if our generated code didn't violate // Rust coding conventions - box(GC) ast::Item { ident: name, - attrs: attrs, - id: ast::DUMMY_NODE_ID, - node: node, - vis: ast::Inherited, - span: span } + P(ast::Item { + ident: name, + attrs: attrs, + id: ast::DUMMY_NODE_ID, + node: node, + vis: ast::Inherited, + span: span + }) } fn item_fn_poly(&self, @@ -971,7 +941,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { inputs: Vec , output: P, generics: Generics, - body: P) -> Gc { + body: P) -> P { self.item(span, name, Vec::new(), @@ -988,7 +958,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { inputs: Vec , output: P, body: P - ) -> Gc { + ) -> P { self.item_fn_poly( span, name, @@ -1016,18 +986,18 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_enum_poly(&self, span: Span, name: Ident, enum_definition: ast::EnumDef, - generics: Generics) -> Gc { + generics: Generics) -> P { self.item(span, name, Vec::new(), ast::ItemEnum(enum_definition, generics)) } fn item_enum(&self, span: Span, name: Ident, - enum_definition: ast::EnumDef) -> Gc { + enum_definition: ast::EnumDef) -> P { self.item_enum_poly(span, name, enum_definition, ast_util::empty_generics()) } fn item_struct(&self, span: Span, name: Ident, - struct_def: ast::StructDef) -> Gc { + struct_def: ast::StructDef) -> P { self.item_struct_poly( span, name, @@ -1037,14 +1007,14 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn item_struct_poly(&self, span: Span, name: Ident, - struct_def: ast::StructDef, generics: Generics) -> Gc { - self.item(span, name, Vec::new(), ast::ItemStruct(box(GC) struct_def, generics)) + struct_def: ast::StructDef, generics: Generics) -> P { + self.item(span, name, Vec::new(), ast::ItemStruct(P(struct_def), generics)) } fn item_mod(&self, span: Span, inner_span: Span, name: Ident, attrs: Vec , vi: Vec , - items: Vec>) -> Gc { + items: Vec> ) -> P { self.item( span, name, @@ -1062,21 +1032,21 @@ impl<'a> AstBuilder for ExtCtxt<'a> { name: Ident, ty: P, mutbl: ast::Mutability, - expr: Gc) - -> Gc { + expr: P) + -> P { self.item(span, name, Vec::new(), ast::ItemStatic(ty, mutbl, expr)) } fn item_ty_poly(&self, span: Span, name: Ident, ty: P, - generics: Generics) -> Gc { + generics: Generics) -> P { self.item(span, name, Vec::new(), ast::ItemTy(ty, generics)) } - fn item_ty(&self, span: Span, name: Ident, ty: P) -> Gc { + fn item_ty(&self, span: Span, name: Ident, ty: P) -> P { self.item_ty_poly(span, name, ty, ast_util::empty_generics()) } - fn attribute(&self, sp: Span, mi: Gc) -> ast::Attribute { + fn attribute(&self, sp: Span, mi: P) -> ast::Attribute { respan(sp, ast::Attribute_ { id: attr::mk_attr_id(), style: ast::AttrOuter, @@ -1085,26 +1055,26 @@ impl<'a> AstBuilder for ExtCtxt<'a> { }) } - fn meta_word(&self, sp: Span, w: InternedString) -> Gc { - box(GC) respan(sp, ast::MetaWord(w)) + fn meta_word(&self, sp: Span, w: InternedString) -> P { + P(respan(sp, ast::MetaWord(w))) } fn meta_list(&self, sp: Span, name: InternedString, - mis: Vec> ) - -> Gc { - box(GC) respan(sp, ast::MetaList(name, mis)) + mis: Vec> ) + -> P { + P(respan(sp, ast::MetaList(name, mis))) } fn meta_name_value(&self, sp: Span, name: InternedString, value: ast::Lit_) - -> Gc { - box(GC) respan(sp, ast::MetaNameValue(name, respan(sp, value))) + -> P { + P(respan(sp, ast::MetaNameValue(name, respan(sp, value)))) } fn view_use(&self, sp: Span, - vis: ast::Visibility, vp: Gc) -> ast::ViewItem { + vis: ast::Visibility, vp: P) -> ast::ViewItem { ast::ViewItem { node: ast::ViewItemUse(vp), attrs: Vec::new(), @@ -1121,10 +1091,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn view_use_simple_(&self, sp: Span, vis: ast::Visibility, ident: ast::Ident, path: ast::Path) -> ast::ViewItem { self.view_use(sp, vis, - box(GC) respan(sp, - ast::ViewPathSimple(ident, - path, - ast::DUMMY_NODE_ID))) + P(respan(sp, + ast::ViewPathSimple(ident, + path, + ast::DUMMY_NODE_ID)))) } fn view_use_list(&self, sp: Span, vis: ast::Visibility, @@ -1134,41 +1104,16 @@ impl<'a> AstBuilder for ExtCtxt<'a> { }).collect(); self.view_use(sp, vis, - box(GC) respan(sp, - ast::ViewPathList(self.path(sp, path), - imports, - ast::DUMMY_NODE_ID))) + P(respan(sp, + ast::ViewPathList(self.path(sp, path), + imports, + ast::DUMMY_NODE_ID)))) } fn view_use_glob(&self, sp: Span, vis: ast::Visibility, path: Vec ) -> ast::ViewItem { self.view_use(sp, vis, - box(GC) respan(sp, - ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID))) - } -} - -struct Duplicator<'a>; - -impl<'a> Folder for Duplicator<'a> { - fn new_id(&mut self, _: NodeId) -> NodeId { - ast::DUMMY_NODE_ID - } -} - -pub trait Duplicate { - // - // Duplication functions - // - // These functions just duplicate AST nodes. - // - - fn duplicate(&self, cx: &ExtCtxt) -> Self; -} - -impl Duplicate for Gc { - fn duplicate(&self, _: &ExtCtxt) -> Gc { - let mut folder = Duplicator; - folder.fold_expr(*self) + P(respan(sp, + ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID)))) } } diff --git a/src/libsyntax/ext/bytes.rs b/src/libsyntax/ext/bytes.rs index 183675114954e..3e0f340ad7ff6 100644 --- a/src/libsyntax/ext/bytes.rs +++ b/src/libsyntax/ext/bytes.rs @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, for expr in exprs.iter() { match expr.node { // expression is a literal - ast::ExprLit(lit) => match lit.node { + ast::ExprLit(ref lit) => match lit.node { // string literal, push each byte to vector expression ast::LitStr(ref s, _) => { for byte in s.get().bytes() { diff --git a/src/libsyntax/ext/cfg.rs b/src/libsyntax/ext/cfg.rs index 0c3a951c98241..79cb47fee7b45 100644 --- a/src/libsyntax/ext/cfg.rs +++ b/src/libsyntax/ext/cfg.rs @@ -40,10 +40,10 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, } // test_cfg searches for meta items looking like `cfg(foo, ...)` - let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)]; + let in_cfg = Some(cx.meta_list(sp, InternedString::new("cfg"), cfgs)); let matches_cfg = attr::test_cfg(cx.cfg().as_slice(), - in_cfg.iter().map(|&x| x)); + in_cfg.iter()); let e = cx.expr_bool(sp, matches_cfg); MacExpr::new(e) } diff --git a/src/libsyntax/ext/concat.rs b/src/libsyntax/ext/concat.rs index ea7a4d061c0c5..455148bfedd50 100644 --- a/src/libsyntax/ext/concat.rs +++ b/src/libsyntax/ext/concat.rs @@ -27,7 +27,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, let mut accumulator = String::new(); for e in es.move_iter() { match e.node { - ast::ExprLit(lit) => { + ast::ExprLit(ref lit) => { match lit.node { ast::LitStr(ref s, _) | ast::LitFloat(ref s, _) | diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 0ac26a3a90490..145412caa0bfe 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -15,8 +15,7 @@ use ext::base; use owned_slice::OwnedSlice; use parse::token; use parse::token::{str_to_ident}; - -use std::gc::GC; +use ptr::P; pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box { @@ -44,7 +43,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } let res = str_to_ident(res_str.as_slice()); - let e = box(GC) ast::Expr { + let e = P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprPath( ast::Path { @@ -60,6 +59,6 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree] } ), span: sp, - }; + }); MacExpr::new(e) } diff --git a/src/libsyntax/ext/deriving/bounds.rs b/src/libsyntax/ext/deriving/bounds.rs index 7cff6e8ff3c01..0595b0bc7f440 100644 --- a/src/libsyntax/ext/deriving/bounds.rs +++ b/src/libsyntax/ext/deriving/bounds.rs @@ -13,14 +13,13 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_bound(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let name = match mitem.node { MetaWord(ref tname) => { diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index bbe96018f4b3d..64607ffd5d4c9 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -15,14 +15,13 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_clone(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let inline = cx.meta_word(span, InternedString::new("inline")); let attrs = vec!(cx.attribute(span, inline)); let trait_def = TraitDef { @@ -52,12 +51,12 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt, fn cs_clone( name: &str, cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> Gc { + substr: &Substructure) -> P { let clone_ident = substr.method_ident; let ctor_ident; let all_fields; let subcall = |field: &FieldInfo| - cx.expr_method_call(field.span, field.self_, clone_ident, Vec::new()); + cx.expr_method_call(field.span, field.self_.clone(), clone_ident, Vec::new()); match *substr.fields { Struct(ref af) => { diff --git a/src/libsyntax/ext/deriving/cmp/eq.rs b/src/libsyntax/ext/deriving/cmp/eq.rs index 19a979a5655ba..a27016fde6156 100644 --- a/src/libsyntax/ext/deriving/cmp/eq.rs +++ b/src/libsyntax/ext/deriving/cmp/eq.rs @@ -15,21 +15,20 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_eq(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { // structures are equal if all fields are equal, and non equal, if // any fields are not equal or if the enum variants are different - fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> Gc { + fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P { cs_and(|cx, span, _, _| cx.expr_bool(span, false), cx, span, substr) } - fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> Gc { + fn cs_ne(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P { cs_or(|cx, span, _, _| cx.expr_bool(span, true), cx, span, substr) } diff --git a/src/libsyntax/ext/deriving/cmp/ord.rs b/src/libsyntax/ext/deriving/cmp/ord.rs index dcf59ba820e4c..7cb61d295c0d4 100644 --- a/src/libsyntax/ext/deriving/cmp/ord.rs +++ b/src/libsyntax/ext/deriving/cmp/ord.rs @@ -16,14 +16,13 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_ord(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { macro_rules! md ( ($name:expr, $op:expr, $equal:expr) => { { let inline = cx.meta_word(span, InternedString::new("inline")); @@ -87,7 +86,7 @@ pub enum OrderingOp { pub fn some_ordering_collapsed(cx: &mut ExtCtxt, span: Span, op: OrderingOp, - self_arg_tags: &[ast::Ident]) -> Gc { + self_arg_tags: &[ast::Ident]) -> P { let lft = cx.expr_ident(span, self_arg_tags[0]); let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1])); let op_str = match op { @@ -99,7 +98,7 @@ pub fn some_ordering_collapsed(cx: &mut ExtCtxt, } pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span, - substr: &Substructure) -> Gc { + substr: &Substructure) -> P { let test_id = cx.ident_of("__test"); let ordering = cx.path_global(span, vec!(cx.ident_of("std"), @@ -159,8 +158,8 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span, } /// Strict inequality. -fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt, span: Span, - substr: &Substructure) -> Gc { +fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt, + span: Span, substr: &Substructure) -> P { let op = if less {ast::BiLt} else {ast::BiGt}; cs_fold( false, // need foldr, @@ -183,14 +182,14 @@ fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt, span: Span, layers of pointers, if the type includes pointers. */ let other_f = match other_fs { - [o_f] => o_f, + [ref o_f] => o_f, _ => cx.span_bug(span, "not exactly 2 arguments in `deriving(Ord)`") }; - let cmp = cx.expr_binary(span, op, self_f, other_f); + let cmp = cx.expr_binary(span, op, self_f.clone(), other_f.clone()); let not_cmp = cx.expr_unary(span, ast::UnNot, - cx.expr_binary(span, op, other_f, self_f)); + cx.expr_binary(span, op, other_f.clone(), self_f)); let and = cx.expr_binary(span, ast::BiAnd, not_cmp, subexpr); cx.expr_binary(span, ast::BiOr, cmp, and) diff --git a/src/libsyntax/ext/deriving/cmp/totaleq.rs b/src/libsyntax/ext/deriving/cmp/totaleq.rs index 42365936c9d4f..98c8885f7fa01 100644 --- a/src/libsyntax/ext/deriving/cmp/totaleq.rs +++ b/src/libsyntax/ext/deriving/cmp/totaleq.rs @@ -15,16 +15,14 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_totaleq(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { - fn cs_total_eq_assert(cx: &mut ExtCtxt, span: Span, - substr: &Substructure) -> Gc { + mitem: &MetaItem, + item: &Item, + push: |P|) { + fn cs_total_eq_assert(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P { cs_same_method(|cx, span, exprs| { // create `a.(); b.(); c.(); ...` // (where method is `assert_receiver_is_total_eq`) diff --git a/src/libsyntax/ext/deriving/cmp/totalord.rs b/src/libsyntax/ext/deriving/cmp/totalord.rs index e010b635fe41a..9ef463f9c630e 100644 --- a/src/libsyntax/ext/deriving/cmp/totalord.rs +++ b/src/libsyntax/ext/deriving/cmp/totalord.rs @@ -16,14 +16,13 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_totalord(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let inline = cx.meta_word(span, InternedString::new("inline")); let attrs = vec!(cx.attribute(span, inline)); let trait_def = TraitDef { @@ -53,14 +52,14 @@ pub fn expand_deriving_totalord(cx: &mut ExtCtxt, pub fn ordering_collapsed(cx: &mut ExtCtxt, span: Span, - self_arg_tags: &[ast::Ident]) -> Gc { + self_arg_tags: &[ast::Ident]) -> P { let lft = cx.expr_ident(span, self_arg_tags[0]); let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1])); cx.expr_method_call(span, lft, cx.ident_of("cmp"), vec![rgt]) } pub fn cs_cmp(cx: &mut ExtCtxt, span: Span, - substr: &Substructure) -> Gc { + substr: &Substructure) -> P { let test_id = cx.ident_of("__test"); let equals_path = cx.path_global(span, vec!(cx.ident_of("std"), diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index d909ffd2b49fb..fd24f5e35a446 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -21,14 +21,13 @@ use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; use parse::token; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_decodable(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let trait_def = TraitDef { span: span, attributes: Vec::new(), @@ -64,15 +63,15 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt, } fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> Gc { - let decoder = substr.nonself_args[0]; + substr: &Substructure) -> P { + let decoder = substr.nonself_args[0].clone(); let recurse = vec!(cx.ident_of("serialize"), cx.ident_of("Decodable"), cx.ident_of("decode")); // throw an underscore in front to suppress unused variable warnings let blkarg = cx.ident_of("_d"); let blkdecoder = cx.expr_ident(trait_span, blkarg); - let calldecode = cx.expr_call_global(trait_span, recurse, vec!(blkdecoder)); + let calldecode = cx.expr_call_global(trait_span, recurse, vec!(blkdecoder.clone())); let lambdadecode = cx.lambda_expr_1(trait_span, calldecode, blkarg); return match *substr.fields { @@ -89,10 +88,10 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, summary, |cx, span, name, field| { cx.expr_try(span, - cx.expr_method_call(span, blkdecoder, read_struct_field, + cx.expr_method_call(span, blkdecoder.clone(), read_struct_field, vec!(cx.expr_str(span, name), cx.expr_uint(span, field), - lambdadecode))) + lambdadecode.clone()))) }); let result = cx.expr_ok(trait_span, result); cx.expr_method_call(trait_span, @@ -121,8 +120,8 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, |cx, span, _, field| { let idx = cx.expr_uint(span, field); cx.expr_try(span, - cx.expr_method_call(span, blkdecoder, rvariant_arg, - vec!(idx, lambdadecode))) + cx.expr_method_call(span, blkdecoder.clone(), rvariant_arg, + vec!(idx, lambdadecode.clone()))) }); arms.push(cx.arm(v_span, @@ -159,8 +158,8 @@ fn decode_static_fields(cx: &mut ExtCtxt, trait_span: Span, outer_pat_ident: Ident, fields: &StaticFields, - getarg: |&mut ExtCtxt, Span, InternedString, uint| -> Gc) - -> Gc { + getarg: |&mut ExtCtxt, Span, InternedString, uint| -> P) + -> P { match *fields { Unnamed(ref fields) => { if fields.is_empty() { diff --git a/src/libsyntax/ext/deriving/default.rs b/src/libsyntax/ext/deriving/default.rs index f7d0308e1bd21..f4a66414d89bd 100644 --- a/src/libsyntax/ext/deriving/default.rs +++ b/src/libsyntax/ext/deriving/default.rs @@ -15,14 +15,13 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_default(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let inline = cx.meta_word(span, InternedString::new("inline")); let attrs = vec!(cx.attribute(span, inline)); let trait_def = TraitDef { @@ -47,8 +46,7 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt, trait_def.expand(cx, mitem, item, push) } -fn default_substructure(cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> Gc { +fn default_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { let default_ident = vec!( cx.ident_of("std"), cx.ident_of("default"), diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 02a748eed8e47..103253560df65 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -86,14 +86,13 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_encodable(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let trait_def = TraitDef { span: span, attributes: Vec::new(), @@ -131,8 +130,8 @@ pub fn expand_deriving_encodable(cx: &mut ExtCtxt, } fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> Gc { - let encoder = substr.nonself_args[0]; + substr: &Substructure) -> P { + let encoder = substr.nonself_args[0].clone(); // throw an underscore in front to suppress unused variable warnings let blkarg = cx.ident_of("_e"); let blkencoder = cx.expr_ident(trait_span, blkarg); @@ -145,7 +144,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let last = fields.len() - 1; for (i, &FieldInfo { name, - self_, + ref self_, span, .. }) in fields.iter().enumerate() { @@ -156,9 +155,10 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, i).as_slice()) } }; - let enc = cx.expr_method_call(span, self_, encode, vec!(blkencoder)); + let enc = cx.expr_method_call(span, self_.clone(), + encode, vec!(blkencoder.clone())); let lambda = cx.lambda_expr_1(span, enc, blkarg); - let call = cx.expr_method_call(span, blkencoder, + let call = cx.expr_method_call(span, blkencoder.clone(), emit_struct_field, vec!(cx.expr_str(span, name), cx.expr_uint(span, i), @@ -202,10 +202,11 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let emit_variant_arg = cx.ident_of("emit_enum_variant_arg"); let mut stmts = Vec::new(); let last = fields.len() - 1; - for (i, &FieldInfo { self_, span, .. }) in fields.iter().enumerate() { - let enc = cx.expr_method_call(span, self_, encode, vec!(blkencoder)); + for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() { + let enc = cx.expr_method_call(span, self_.clone(), + encode, vec!(blkencoder.clone())); let lambda = cx.lambda_expr_1(span, enc, blkarg); - let call = cx.expr_method_call(span, blkencoder, + let call = cx.expr_method_call(span, blkencoder.clone(), emit_variant_arg, vec!(cx.expr_uint(span, i), lambda)); diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 50bdc296aad76..53af5a86ed2c6 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -181,12 +181,13 @@ //! ~~~ use std::cell::RefCell; -use std::gc::{Gc, GC}; +use std::gc::GC; +use std::vec; use abi::Abi; use abi; use ast; -use ast::{P, EnumDef, Expr, Ident, Generics, StructDef}; +use ast::{EnumDef, Expr, Ident, Generics, StructDef}; use ast_util; use attr; use attr::AttrMetaMethods; @@ -194,9 +195,11 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use codemap; use codemap::Span; +use fold::MoveMap; use owned_slice::OwnedSlice; use parse::token::InternedString; use parse::token::special_idents; +use ptr::P; use self::ty::{LifetimeBounds, Path, Ptr, PtrTy, Self, Ty}; @@ -251,9 +254,9 @@ pub struct Substructure<'a> { /// ident of the method pub method_ident: Ident, /// dereferenced access to any Self or Ptr(Self, _) arguments - pub self_args: &'a [Gc], + pub self_args: &'a [P], /// verbatim access to any other arguments - pub nonself_args: &'a [Gc], + pub nonself_args: &'a [P], pub fields: &'a SubstructureFields<'a> } @@ -265,10 +268,10 @@ pub struct FieldInfo { pub name: Option, /// The expression corresponding to this field of `self` /// (specifically, a reference to it). - pub self_: Gc, + pub self_: P, /// The expressions corresponding to references to this field in /// the other Self arguments. - pub other: Vec>, + pub other: Vec>, } /// Fields for a static method @@ -298,7 +301,7 @@ pub enum SubstructureFields<'a> { Idents bound to the variant index values for each of the actual input Self arguments. */ - EnumNonMatchingCollapsed(Vec, &'a [Gc], &'a [Ident]), + EnumNonMatchingCollapsed(Vec, &'a [P], &'a [Ident]), /// A static method where Self is a struct. StaticStruct(&'a ast::StructDef, StaticFields), @@ -313,7 +316,7 @@ Combine the values of all the fields together. The last argument is all the fields of all the structures, see above for details. */ pub type CombineSubstructureFunc<'a> = - |&mut ExtCtxt, Span, &Substructure|: 'a -> Gc; + |&mut ExtCtxt, Span, &Substructure|: 'a -> P; /** Deal with non-matching enum variants. The tuple is a list of @@ -324,10 +327,10 @@ last argument is all the non-Self args of the method being derived. */ pub type EnumNonMatchCollapsedFunc<'a> = |&mut ExtCtxt, - Span, - (&[Ident], &[Ident]), - &[Gc]|: 'a - -> Gc; + Span, + (&[Ident], &[Ident]), + &[P]|: 'a + -> P; pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>) -> RefCell> { @@ -338,9 +341,9 @@ pub fn combine_substructure<'a>(f: CombineSubstructureFunc<'a>) impl<'a> TraitDef<'a> { pub fn expand(&self, cx: &mut ExtCtxt, - _mitem: Gc, - item: Gc, - push: |Gc|) { + _mitem: &ast::MetaItem, + item: &ast::Item, + push: |P|) { let newitem = match item.node { ast::ItemStruct(ref struct_def, ref generics) => { self.expand_struct_def(cx, @@ -365,10 +368,10 @@ impl<'a> TraitDef<'a> { _ => false, } }).map(|a| a.clone())); - push(box(GC) ast::Item { + push(P(ast::Item { attrs: attrs, ..(*newitem).clone() - }) + })) } /** @@ -387,7 +390,7 @@ impl<'a> TraitDef<'a> { cx: &mut ExtCtxt, type_ident: Ident, generics: &Generics, - methods: Vec> ) -> Gc { + methods: Vec>) -> P { let trait_path = self.path.to_path(cx, self.span, type_ident, generics); let Generics { mut lifetimes, ty_params, where_clause: _ } = @@ -475,7 +478,7 @@ impl<'a> TraitDef<'a> { cx: &mut ExtCtxt, struct_def: &StructDef, type_ident: Ident, - generics: &Generics) -> Gc { + generics: &Generics) -> P { let methods = self.methods.iter().map(|method_def| { let (explicit_self, self_args, nonself_args, tys) = method_def.split_self_nonself_args( @@ -515,7 +518,7 @@ impl<'a> TraitDef<'a> { cx: &mut ExtCtxt, enum_def: &EnumDef, type_ident: Ident, - generics: &Generics) -> Gc { + generics: &Generics) -> P { let methods = self.methods.iter().map(|method_def| { let (explicit_self, self_args, nonself_args, tys) = method_def.split_self_nonself_args(cx, self, @@ -534,7 +537,7 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - self_args.as_slice(), + self_args, nonself_args.as_slice()) }; @@ -553,7 +556,7 @@ impl<'a> TraitDef<'a> { } fn variant_to_pat(cx: &mut ExtCtxt, sp: Span, variant: &ast::Variant) - -> Gc { + -> P { let ident = cx.path_ident(sp, variant.node.name); cx.pat(sp, match variant.node.kind { ast::TupleVariantKind(..) => ast::PatEnum(ident, None), @@ -566,10 +569,10 @@ impl<'a> MethodDef<'a> { cx: &mut ExtCtxt, trait_: &TraitDef, type_ident: Ident, - self_args: &[Gc], - nonself_args: &[Gc], + self_args: &[P], + nonself_args: &[P], fields: &SubstructureFields) - -> Gc { + -> P { let substructure = Substructure { type_ident: type_ident, method_ident: cx.ident_of(self.name), @@ -600,8 +603,7 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef, type_ident: Ident, generics: &Generics) - -> (ast::ExplicitSelf, Vec>, Vec>, - Vec<(Ident, P)>) { + -> (ast::ExplicitSelf, Vec>, Vec>, Vec<(Ident, P)>) { let mut self_args = Vec::new(); let mut nonself_args = Vec::new(); @@ -654,8 +656,7 @@ impl<'a> MethodDef<'a> { abi: Abi, explicit_self: ast::ExplicitSelf, arg_types: Vec<(Ident, P)> , - body: Gc) - -> Gc { + body: P) -> P { // create the generics that aren't for Self let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics); @@ -678,7 +679,7 @@ impl<'a> MethodDef<'a> { let body_block = cx.block_expr(body); // Create the method. - box(GC) ast::Method { + P(ast::Method { attrs: self.attributes.clone(), id: ast::DUMMY_NODE_ID, span: trait_.span, @@ -690,7 +691,7 @@ impl<'a> MethodDef<'a> { fn_decl, body_block, ast::Inherited) - } + }) } /** @@ -719,9 +720,9 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef, struct_def: &StructDef, type_ident: Ident, - self_args: &[Gc], - nonself_args: &[Gc]) - -> Gc { + self_args: &[P], + nonself_args: &[P]) + -> P { let mut raw_fields = Vec::new(); // ~[[fields of self], // [fields of next Self arg], [etc]] @@ -740,20 +741,20 @@ impl<'a> MethodDef<'a> { // transpose raw_fields let fields = if raw_fields.len() > 0 { - raw_fields.get(0) - .iter() - .enumerate() - .map(|(i, &(span, opt_id, field))| { - let other_fields = raw_fields.tail().iter().map(|l| { - match l.get(i) { - &(_, _, ex) => ex - } - }).collect(); + let mut raw_fields = raw_fields.move_iter().map(|v| v.move_iter()); + let first_field = raw_fields.next().unwrap(); + let mut other_fields: Vec, P)>> + = raw_fields.collect(); + first_field.map(|(span, opt_id, field)| { FieldInfo { span: span, name: opt_id, self_: field, - other: other_fields + other: other_fields.mut_iter().map(|l| { + match l.next().unwrap() { + (_, _, ex) => ex + } + }).collect() } }).collect() } else { @@ -774,9 +775,9 @@ impl<'a> MethodDef<'a> { // make a series of nested matches, to destructure the // structs. This is actually right-to-left, but it shouldn't // matter. - for (&arg_expr, &pat) in self_args.iter().zip(patterns.iter()) { - body = cx.expr_match(trait_.span, arg_expr, - vec!( cx.arm(trait_.span, vec!(pat), body) )) + for (arg_expr, pat) in self_args.iter().zip(patterns.iter()) { + body = cx.expr_match(trait_.span, arg_expr.clone(), + vec!( cx.arm(trait_.span, vec!(pat.clone()), body) )) } body } @@ -786,9 +787,9 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef, struct_def: &StructDef, type_ident: Ident, - self_args: &[Gc], - nonself_args: &[Gc]) - -> Gc { + self_args: &[P], + nonself_args: &[P]) + -> P { let summary = trait_.summarise_struct(cx, struct_def); self.call_substructure_method(cx, @@ -834,9 +835,9 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef, enum_def: &EnumDef, type_ident: Ident, - self_args: &[Gc], - nonself_args: &[Gc]) - -> Gc { + self_args: Vec>, + nonself_args: &[P]) + -> P { self.build_enum_match_tuple( cx, trait_, enum_def, type_ident, self_args, nonself_args) } @@ -875,8 +876,8 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef, enum_def: &EnumDef, type_ident: Ident, - self_args: &[Gc], - nonself_args: &[Gc]) -> Gc { + self_args: Vec>, + nonself_args: &[P]) -> P { let sp = trait_.span; let variants = &enum_def.variants; @@ -898,7 +899,7 @@ impl<'a> MethodDef<'a> { // The `vi_idents` will be bound, solely in the catch-all, to // a series of let statements mapping each self_arg to a uint // corresponding to its variant index. - let vi_idents : Vec = self_arg_names.iter() + let vi_idents: Vec = self_arg_names.iter() .map(|name| { let vi_suffix = format!("{:s}_vi", name.as_slice()); cx.ident_of(vi_suffix.as_slice()) }) .collect::>(); @@ -914,24 +915,29 @@ impl<'a> MethodDef<'a> { // (Variant2, Variant2, ...) => Body2 // ... // where each tuple has length = self_args.len() - let mut match_arms : Vec = variants.iter().enumerate() - .map(|(index, &variant)| { - - // These self_pats have form Variant1, Variant2, ... - let self_pats : Vec<(Gc, - Vec<(Span, Option, Gc)>)>; - self_pats = self_arg_names.iter() - .map(|self_arg_name| - trait_.create_enum_variant_pattern( - cx, &*variant, self_arg_name.as_slice(), - ast::MutImmutable)) - .collect(); + let mut match_arms: Vec = variants.iter().enumerate() + .map(|(index, variant)| { + let mk_self_pat = |cx: &mut ExtCtxt, self_arg_name: &str| { + let (p, idents) = trait_.create_enum_variant_pattern(cx, &**variant, + self_arg_name, + ast::MutImmutable); + (cx.pat(sp, ast::PatRegion(p)), idents) + }; // A single arm has form (&VariantK, &VariantK, ...) => BodyK // (see "Final wrinkle" note below for why.) - let subpats = self_pats.iter() - .map(|&(p, ref _idents)| cx.pat(sp, ast::PatRegion(p))) - .collect::>>(); + let mut subpats = Vec::with_capacity(self_arg_names.len()); + let mut self_pats_idents = Vec::with_capacity(self_arg_names.len() - 1); + let first_self_pat_idents = { + let (p, idents) = mk_self_pat(cx, self_arg_names[0].as_slice()); + subpats.push(p); + idents + }; + for self_arg_name in self_arg_names.tail().iter() { + let (p, idents) = mk_self_pat(cx, self_arg_name.as_slice()); + subpats.push(p); + self_pats_idents.push(idents); + } // Here is the pat = `(&VariantK, &VariantK, ...)` let single_pat = cx.pat(sp, ast::PatTup(subpats)); @@ -941,39 +947,33 @@ impl<'a> MethodDef<'a> { // we are in. // All of the Self args have the same variant in these - // cases. So we transpose the info in self_pats to - // gather the getter expressions together, in the form - // that EnumMatching expects. + // cases. So we transpose the info in self_pats_idents + // to gather the getter expressions together, in the + // form that EnumMatching expects. // The transposition is driven by walking across the // arg fields of the variant for the first self pat. - let &(_, ref self_arg_fields) = self_pats.get(0); - - let field_tuples : Vec; - - field_tuples = self_arg_fields.iter().enumerate() + let field_tuples = first_self_pat_idents.move_iter().enumerate() // For each arg field of self, pull out its getter expr ... - .map(|(field_index, &(sp, opt_ident, self_getter_expr))| { + .map(|(field_index, (sp, opt_ident, self_getter_expr))| { // ... but FieldInfo also wants getter expr // for matching other arguments of Self type; - // so walk across the *other* self_pats and - // pull out getter for same field in each of - // them (using `field_index` tracked above). + // so walk across the *other* self_pats_idents + // and pull out getter for same field in each + // of them (using `field_index` tracked above). // That is the heart of the transposition. - let others = self_pats.tail().iter() - .map(|&(_pat, ref fields)| { + let others = self_pats_idents.iter().map(|fields| { + let &(_, _opt_ident, ref other_getter_expr) = + fields.get(field_index); - let &(_, _opt_ident, other_getter_expr) = - fields.get(field_index); + // All Self args have same variant, so + // opt_idents are the same. (Assert + // here to make it self-evident that + // it is okay to ignore `_opt_ident`.) + assert!(opt_ident == _opt_ident); - // All Self args have same variant, so - // opt_idents are the same. (Assert - // here to make it self-evident that - // it is okay to ignore `_opt_ident`.) - assert!(opt_ident == _opt_ident); - - other_getter_expr - }).collect::>>(); + other_getter_expr.clone() + }).collect::>>(); FieldInfo { span: sp, name: opt_ident, @@ -987,10 +987,10 @@ impl<'a> MethodDef<'a> { // Self arg, assuming all are instances of VariantK. // Build up code associated with such a case. let substructure = EnumMatching(index, - &*variant, + &**variant, field_tuples); let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args, nonself_args, + cx, trait_, type_ident, self_args.as_slice(), nonself_args, &substructure); cx.arm(sp, vec![single_pat], arm_expr) @@ -1012,9 +1012,9 @@ impl<'a> MethodDef<'a> { // unreachable-pattern error. // if variants.len() > 1 && self_args.len() > 1 { - let arms : Vec = variants.iter().enumerate() - .map(|(index, &variant)| { - let pat = variant_to_pat(cx, sp, &*variant); + let arms: Vec = variants.iter().enumerate() + .map(|(index, variant)| { + let pat = variant_to_pat(cx, sp, &**variant); let lit = ast::LitInt(index as u64, ast::UnsignedIntLit(ast::TyU)); cx.arm(sp, vec![pat], cx.expr_lit(sp, lit)) }).collect(); @@ -1035,15 +1035,15 @@ impl<'a> MethodDef<'a> { // A => 0u, B(..) => 1u, C(..) => 2u // }; // ``` - let mut index_let_stmts : Vec> = Vec::new(); - for (&ident, &self_arg) in vi_idents.iter().zip(self_args.iter()) { - let variant_idx = cx.expr_match(sp, self_arg, arms.clone()); + let mut index_let_stmts: Vec> = Vec::new(); + for (&ident, self_arg) in vi_idents.iter().zip(self_args.iter()) { + let variant_idx = cx.expr_match(sp, self_arg.clone(), arms.clone()); let let_stmt = cx.stmt_let(sp, false, ident, variant_idx); index_let_stmts.push(let_stmt); } let arm_expr = self.call_substructure_method( - cx, trait_, type_ident, self_args, nonself_args, + cx, trait_, type_ident, self_args.as_slice(), nonself_args, &catch_all_substructure); // Builds the expression: @@ -1124,9 +1124,7 @@ impl<'a> MethodDef<'a> { // them when they are fed as r-values into a tuple // expression; here add a layer of borrowing, turning // `(*self, *__arg_0, ...)` into `(&*self, &*__arg_0, ...)`. - let borrowed_self_args = self_args.iter() - .map(|&self_arg| cx.expr_addr_of(sp, self_arg)) - .collect::>>(); + let borrowed_self_args = self_args.move_map(|self_arg| cx.expr_addr_of(sp, self_arg)); let match_arg = cx.expr(sp, ast::ExprTup(borrowed_self_args)); cx.expr_match(sp, match_arg, match_arms) } @@ -1136,9 +1134,9 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef, enum_def: &EnumDef, type_ident: Ident, - self_args: &[Gc], - nonself_args: &[Gc]) - -> Gc { + self_args: &[P], + nonself_args: &[P]) + -> P { let summary = enum_def.variants.iter().map(|v| { let ident = v.node.name; let summary = match v.node.kind { @@ -1210,11 +1208,11 @@ impl<'a> TraitDef<'a> { cx: &mut ExtCtxt, field_paths: Vec , mutbl: ast::Mutability) - -> Vec> { + -> Vec> { field_paths.iter().map(|path| { cx.pat(path.span, ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None)) - }).collect() + }).collect() } fn create_struct_pattern(&self, @@ -1223,7 +1221,7 @@ impl<'a> TraitDef<'a> { struct_def: &StructDef, prefix: &str, mutbl: ast::Mutability) - -> (Gc, Vec<(Span, Option, Gc)>) { + -> (P, Vec<(Span, Option, P)>) { if struct_def.fields.is_empty() { return ( cx.pat_ident_binding_mode( @@ -1266,7 +1264,7 @@ impl<'a> TraitDef<'a> { // struct_type is definitely not Unknown, since struct_def.fields // must be nonempty to reach here let pattern = if struct_type == Record { - let field_pats = subpats.iter().zip(ident_expr.iter()).map(|(&pat, &(_, id, _))| { + let field_pats = subpats.move_iter().zip(ident_expr.iter()).map(|(pat, &(_, id, _))| { // id is guaranteed to be Some ast::FieldPat { ident: id.unwrap(), pat: pat } }).collect(); @@ -1283,7 +1281,7 @@ impl<'a> TraitDef<'a> { variant: &ast::Variant, prefix: &str, mutbl: ast::Mutability) - -> (Gc, Vec<(Span, Option, Gc)> ) { + -> (P, Vec<(Span, Option, P)>) { let variant_ident = variant.node.name; match variant.node.kind { ast::TupleVariantKind(ref variant_args) => { @@ -1327,13 +1325,13 @@ Fold the fields. `use_foldl` controls whether this is done left-to-right (`true`) or right-to-left (`false`). */ pub fn cs_fold(use_foldl: bool, - f: |&mut ExtCtxt, Span, Gc, Gc, &[Gc]| -> Gc, - base: Gc, + f: |&mut ExtCtxt, Span, P, P, &[P]| -> P, + base: P, enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) - -> Gc { + -> P { match *substructure.fields { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { if use_foldl { @@ -1341,7 +1339,7 @@ pub fn cs_fold(use_foldl: bool, f(cx, field.span, old, - field.self_, + field.self_.clone(), field.other.as_slice()) }) } else { @@ -1349,7 +1347,7 @@ pub fn cs_fold(use_foldl: bool, f(cx, field.span, old, - field.self_, + field.self_.clone(), field.other.as_slice()) }) } @@ -1374,21 +1372,21 @@ f(cx, span, ~[self_1.method(__arg_1_1, __arg_2_1), ~~~ */ #[inline] -pub fn cs_same_method(f: |&mut ExtCtxt, Span, Vec>| -> Gc, +pub fn cs_same_method(f: |&mut ExtCtxt, Span, Vec>| -> P, enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) - -> Gc { + -> P { match *substructure.fields { EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { // call self_n.method(other_1_n, other_2_n, ...) let called = all_fields.iter().map(|field| { cx.expr_method_call(field.span, - field.self_, + field.self_.clone(), substructure.method_ident, field.other.iter() - .map(|e| cx.expr_addr_of(field.span, *e)) + .map(|e| cx.expr_addr_of(field.span, e.clone())) .collect()) }).collect(); @@ -1410,21 +1408,21 @@ fields. `use_foldl` controls whether this is done left-to-right */ #[inline] pub fn cs_same_method_fold(use_foldl: bool, - f: |&mut ExtCtxt, Span, Gc, Gc| -> Gc, - base: Gc, + f: |&mut ExtCtxt, Span, P, P| -> P, + base: P, enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, substructure: &Substructure) - -> Gc { + -> P { cs_same_method( |cx, span, vals| { if use_foldl { - vals.iter().fold(base, |old, &new| { + vals.move_iter().fold(base.clone(), |old, new| { f(cx, span, old, new) }) } else { - vals.iter().rev().fold(base, |old, &new| { + vals.move_iter().rev().fold(base.clone(), |old, new| { f(cx, span, old, new) }) } @@ -1438,10 +1436,10 @@ Use a given binop to combine the result of calling the derived method on all the fields. */ #[inline] -pub fn cs_binop(binop: ast::BinOp, base: Gc, +pub fn cs_binop(binop: ast::BinOp, base: P, enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, trait_span: Span, - substructure: &Substructure) -> Gc { + substructure: &Substructure) -> P { cs_same_method_fold( true, // foldl is good enough |cx, span, old, new| { @@ -1459,7 +1457,7 @@ pub fn cs_binop(binop: ast::BinOp, base: Gc, #[inline] pub fn cs_or(enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, span: Span, - substructure: &Substructure) -> Gc { + substructure: &Substructure) -> P { cs_binop(ast::BiOr, cx.expr_bool(span, false), enum_nonmatch_f, cx, span, substructure) @@ -1469,7 +1467,7 @@ pub fn cs_or(enum_nonmatch_f: EnumNonMatchCollapsedFunc, #[inline] pub fn cs_and(enum_nonmatch_f: EnumNonMatchCollapsedFunc, cx: &mut ExtCtxt, span: Span, - substructure: &Substructure) -> Gc { + substructure: &Substructure) -> P { cs_binop(ast::BiAnd, cx.expr_bool(span, true), enum_nonmatch_f, cx, span, substructure) diff --git a/src/libsyntax/ext/deriving/generic/ty.rs b/src/libsyntax/ext/deriving/generic/ty.rs index 8b4a9c51cf09d..a90618a30b6eb 100644 --- a/src/libsyntax/ext/deriving/generic/ty.rs +++ b/src/libsyntax/ext/deriving/generic/ty.rs @@ -14,14 +14,13 @@ explicit `Self` type to use when specifying impls to be derived. */ use ast; -use ast::{P,Expr,Generics,Ident}; +use ast::{Expr,Generics,Ident}; use ext::base::ExtCtxt; use ext::build::AstBuilder; use codemap::{Span,respan}; use owned_slice::OwnedSlice; use parse::token::special_idents; - -use std::gc::Gc; +use ptr::P; /// The types of pointers #[deriving(Clone)] @@ -260,7 +259,7 @@ impl<'a> LifetimeBounds<'a> { } pub fn get_explicit_self(cx: &ExtCtxt, span: Span, self_ptr: &Option) - -> (Gc, ast::ExplicitSelf) { + -> (P, ast::ExplicitSelf) { // this constructs a fresh `self` path, which will match the fresh `self` binding // created below. let self_path = cx.expr_self(span); diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs index f469139177a0b..b7f11c2582548 100644 --- a/src/libsyntax/ext/deriving/hash.rs +++ b/src/libsyntax/ext/deriving/hash.rs @@ -15,14 +15,13 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_hash(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let (path, generics, args) = if cx.ecfg.deriving_hash_type_parameter { (Path::new_(vec!("std", "hash", "Hash"), None, @@ -64,15 +63,14 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt, hash_trait_def.expand(cx, mitem, item, push); } -fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> Gc { +fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { let state_expr = match substr.nonself_args { - [state_expr] => state_expr, + [ref state_expr] => state_expr, _ => cx.span_bug(trait_span, "incorrect number of arguments in `deriving(Hash)`") }; let hash_ident = substr.method_ident; let call_hash = |span, thing_expr| { - let expr = cx.expr_method_call(span, thing_expr, hash_ident, vec!(state_expr)); + let expr = cx.expr_method_call(span, thing_expr, hash_ident, vec!(state_expr.clone())); cx.stmt_expr(expr) }; let mut stmts = Vec::new(); @@ -83,7 +81,7 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, // Determine the discriminant. We will feed this value to the byte // iteration function. let discriminant = match variant.node.disr_expr { - Some(d) => d, + Some(ref d) => d.clone(), None => cx.expr_uint(trait_span, index) }; @@ -94,8 +92,8 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, _ => cx.span_bug(trait_span, "impossible substructure in `deriving(Hash)`") }; - for &FieldInfo { self_, span, .. } in fields.iter() { - stmts.push(call_hash(span, self_)); + for &FieldInfo { ref self_, span, .. } in fields.iter() { + stmts.push(call_hash(span, self_.clone())); } if stmts.len() == 0 { diff --git a/src/libsyntax/ext/deriving/mod.rs b/src/libsyntax/ext/deriving/mod.rs index a9b5c8a413463..b8cebd8ea201c 100644 --- a/src/libsyntax/ext/deriving/mod.rs +++ b/src/libsyntax/ext/deriving/mod.rs @@ -21,8 +21,7 @@ library. use ast::{Item, MetaItem, MetaList, MetaNameValue, MetaWord}; use ext::base::ExtCtxt; use codemap::Span; - -use std::gc::Gc; +use ptr::P; pub mod bounds; pub mod clone; @@ -49,9 +48,9 @@ pub mod generic; pub fn expand_meta_deriving(cx: &mut ExtCtxt, _span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { match mitem.node { MetaNameValue(_, ref l) => { cx.span_err(l.span, "unexpected value in `deriving`"); @@ -63,13 +62,13 @@ pub fn expand_meta_deriving(cx: &mut ExtCtxt, cx.span_warn(mitem.span, "empty trait list in `deriving`"); } MetaList(_, ref titems) => { - for &titem in titems.iter().rev() { + for titem in titems.iter().rev() { match titem.node { MetaNameValue(ref tname, _) | MetaList(ref tname, _) | MetaWord(ref tname) => { macro_rules! expand(($func:path) => ($func(cx, titem.span, - titem, item, + &**titem, item, |i| push(i)))); match tname.get() { "Clone" => expand!(clone::expand_deriving_clone), diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs index 30dd8e9683ad5..044a2812c0003 100644 --- a/src/libsyntax/ext/deriving/primitive.rs +++ b/src/libsyntax/ext/deriving/primitive.rs @@ -16,14 +16,13 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let inline = cx.meta_word(span, InternedString::new("inline")); let attrs = vec!(cx.attribute(span, inline)); let trait_def = TraitDef { @@ -70,10 +69,9 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, trait_def.expand(cx, mitem, item, push) } -fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> Gc { +fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { let n = match substr.nonself_args { - [n] => n, + [ref n] => n, _ => cx.span_bug(trait_span, "incorrect number of arguments in `deriving(FromPrimitive)`") }; @@ -106,8 +104,8 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, // expr for `$n == $variant as $name` let variant = cx.expr_ident(span, variant.node.name); let ty = cx.ty_ident(span, cx.ident_of(name)); - let cast = cx.expr_cast(span, variant, ty); - let guard = cx.expr_binary(span, ast::BiEq, n, cast); + let cast = cx.expr_cast(span, variant.clone(), ty); + let guard = cx.expr_binary(span, ast::BiEq, n.clone(), cast); // expr for `Some($variant)` let body = cx.expr_some(span, variant); @@ -141,7 +139,7 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, }; arms.push(arm); - cx.expr_match(trait_span, n, arms) + cx.expr_match(trait_span, n.clone(), arms) } _ => cx.span_bug(trait_span, "expected StaticEnum in deriving(FromPrimitive)") } diff --git a/src/libsyntax/ext/deriving/rand.rs b/src/libsyntax/ext/deriving/rand.rs index c652b5a5bed9a..584645bb30639 100644 --- a/src/libsyntax/ext/deriving/rand.rs +++ b/src/libsyntax/ext/deriving/rand.rs @@ -15,14 +15,13 @@ use ext::base::ExtCtxt; use ext::build::{AstBuilder}; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_rand(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let trait_def = TraitDef { span: span, attributes: Vec::new(), @@ -54,10 +53,9 @@ pub fn expand_deriving_rand(cx: &mut ExtCtxt, trait_def.expand(cx, mitem, item, push) } -fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> Gc { +fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { let rng = match substr.nonself_args { - [rng] => vec!( rng ), + [ref rng] => rng, _ => cx.bug("Incorrect number of arguments to `rand` in `deriving(Rand)`") }; let rand_ident = vec!( @@ -69,7 +67,7 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, let rand_call = |cx: &mut ExtCtxt, span| { cx.expr_call_global(span, rand_ident.clone(), - vec!( *rng.get(0) )) + vec!(rng.clone())) }; return match *substr.fields { @@ -95,7 +93,7 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, // ::rand::Rand::rand(rng) let rv_call = cx.expr_call(trait_span, rand_name, - vec!( *rng.get(0) )); + vec!(rng.clone())); // need to specify the uint-ness of the random number let uint_ty = cx.ty_ident(trait_span, cx.ident_of("uint")); @@ -136,8 +134,8 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, trait_span: Span, ctor_ident: Ident, summary: &StaticFields, - rand_call: |&mut ExtCtxt, Span| -> Gc) - -> Gc { + rand_call: |&mut ExtCtxt, Span| -> P) + -> P { match *summary { Unnamed(ref fields) => { if fields.is_empty() { diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index e0dfbb232f554..16ce264fe712d 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -9,7 +9,7 @@ // except according to those terms. use ast; -use ast::{MetaItem, Item, Expr}; +use ast::{MetaItem, Item, Expr,}; use codemap::Span; use ext::format; use ext::base::ExtCtxt; @@ -17,16 +17,15 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token; +use ptr::P; use std::collections::HashMap; -use std::string::String; -use std::gc::Gc; pub fn expand_deriving_show(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { // &mut ::std::fmt::Formatter let fmtr = Ptr(box Literal(Path::new(vec!("std", "fmt", "Formatter"))), Borrowed(None, ast::MutMutable)); @@ -57,7 +56,7 @@ pub fn expand_deriving_show(cx: &mut ExtCtxt, /// We construct a format string and then defer to std::fmt, since that /// knows what's up with formatting and so on. fn show_substructure(cx: &mut ExtCtxt, span: Span, - substr: &Substructure) -> Gc { + substr: &Substructure) -> P { // build ``, `({}, {}, ...)` or ` { : {}, // : {}, ... }` based on the "shape". // @@ -91,7 +90,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, format_string.push_str("{}"); - exprs.push(field.self_); + exprs.push(field.self_.clone()); } format_string.push_str(")"); @@ -108,7 +107,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, format_string.push_str(name.get()); format_string.push_str(": {}"); - exprs.push(field.self_); + exprs.push(field.self_.clone()); } format_string.push_str(" }}"); @@ -123,7 +122,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, // format_arg_method!(fmt, write_fmt, "", exprs...) // // but doing it directly via ext::format. - let formatter = substr.nonself_args[0]; + let formatter = substr.nonself_args[0].clone(); let meth = cx.ident_of("write_fmt"); let s = token::intern_and_get_ident(format_string.as_slice()); diff --git a/src/libsyntax/ext/deriving/zero.rs b/src/libsyntax/ext/deriving/zero.rs index 973f9d518cd70..7f265b529ffea 100644 --- a/src/libsyntax/ext/deriving/zero.rs +++ b/src/libsyntax/ext/deriving/zero.rs @@ -15,14 +15,13 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use ext::deriving::generic::ty::*; use parse::token::InternedString; - -use std::gc::Gc; +use ptr::P; pub fn expand_deriving_zero(cx: &mut ExtCtxt, span: Span, - mitem: Gc, - item: Gc, - push: |Gc|) { + mitem: &MetaItem, + item: &Item, + push: |P|) { let inline = cx.meta_word(span, InternedString::new("inline")); let attrs = vec!(cx.attribute(span, inline)); let trait_def = TraitDef { @@ -63,8 +62,7 @@ pub fn expand_deriving_zero(cx: &mut ExtCtxt, trait_def.expand(cx, mitem, item, push) } -fn zero_substructure(cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> Gc { +fn zero_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { let zero_ident = vec!( cx.ident_of("std"), cx.ident_of("num"), diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index aae92ae85fc5b..69574ee669678 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -61,38 +61,42 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenT pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> Box { - let exprs = match get_exprs_from_tts(cx, sp, tts) { + let mut exprs = match get_exprs_from_tts(cx, sp, tts) { Some(ref exprs) if exprs.len() == 0 => { cx.span_err(sp, "env! takes 1 or 2 arguments"); return DummyResult::expr(sp); } None => return DummyResult::expr(sp), - Some(exprs) => exprs + Some(exprs) => exprs.move_iter() }; let var = match expr_to_string(cx, - *exprs.get(0), + exprs.next().unwrap(), "expected string literal") { None => return DummyResult::expr(sp), Some((v, _style)) => v }; - let msg = match exprs.len() { - 1 => { + let msg = match exprs.next() { + None => { token::intern_and_get_ident(format!("environment variable `{}` \ not defined", var).as_slice()) } - 2 => { - match expr_to_string(cx, *exprs.get(1), "expected string literal") { + Some(second) => { + match expr_to_string(cx, second, "expected string literal") { None => return DummyResult::expr(sp), Some((s, _style)) => s } } - _ => { + }; + + match exprs.next() { + None => {} + Some(_) => { cx.span_err(sp, "env! takes 1 or 2 arguments"); return DummyResult::expr(sp); } - }; + } let e = match os::getenv(var.get()) { None => { diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index d15d6b3f8f127..310f7c4f3036f 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{P, Block, Crate, DeclLocal, ExprMac, PatMac}; +use ast::{Block, Crate, DeclLocal, ExprMac, PatMac}; use ast::{Local, Ident, MacInvocTT}; use ast::{ItemMac, Mrk, Stmt, StmtDecl, StmtMac, StmtExpr, StmtSemi}; use ast::TokenTree; @@ -25,103 +25,106 @@ use fold::*; use parse; use parse::token::{fresh_mark, fresh_name, intern}; use parse::token; +use ptr::P; +use util::small_vector::SmallVector; use visit; use visit::Visitor; -use util::small_vector::SmallVector; -use std::gc::{Gc, GC}; +use std::gc::Gc; enum Either { Left(L), Right(R) } -fn expand_expr(e: Gc, fld: &mut MacroExpander) -> Gc { - match e.node { +pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { + e.and_then(|ast::Expr {id, node, span}| match node { // expr_mac should really be expr_ext or something; it's the // entry-point for all syntax extensions. - ExprMac(ref mac) => { - let expanded_expr = match expand_mac_invoc(mac,&e.span, - |r|{r.make_expr()}, - |expr,fm|{mark_expr(expr,fm)}, - fld) { + ExprMac(mac) => { + let expanded_expr = match expand_mac_invoc(mac, span, + |r| r.make_expr(), + mark_expr, fld) { Some(expr) => expr, None => { - return DummyResult::raw_expr(e.span); + return DummyResult::raw_expr(span); } }; // Keep going, outside-in. // - // FIXME(pcwalton): Is it necessary to clone the - // node here? - let fully_expanded = - fld.fold_expr(expanded_expr).node.clone(); + let fully_expanded = fld.fold_expr(expanded_expr); fld.cx.bt_pop(); - box(GC) ast::Expr { + fully_expanded.map(|e| ast::Expr { id: ast::DUMMY_NODE_ID, - node: fully_expanded, - span: e.span, - } + node: e.node, + span: span, + }) } ast::ExprWhile(cond, body, opt_ident) => { let cond = fld.fold_expr(cond); let (body, opt_ident) = expand_loop_block(body, opt_ident, fld); - fld.cx.expr(e.span, ast::ExprWhile(cond, body, opt_ident)) + fld.cx.expr(span, ast::ExprWhile(cond, body, opt_ident)) } ast::ExprLoop(loop_block, opt_ident) => { let (loop_block, opt_ident) = expand_loop_block(loop_block, opt_ident, fld); - fld.cx.expr(e.span, ast::ExprLoop(loop_block, opt_ident)) + fld.cx.expr(span, ast::ExprLoop(loop_block, opt_ident)) } ast::ExprForLoop(pat, head, body, opt_ident) => { let pat = fld.fold_pat(pat); let head = fld.fold_expr(head); let (body, opt_ident) = expand_loop_block(body, opt_ident, fld); - fld.cx.expr(e.span, ast::ExprForLoop(pat, head, body, opt_ident)) + fld.cx.expr(span, ast::ExprForLoop(pat, head, body, opt_ident)) } ast::ExprFnBlock(capture_clause, fn_decl, block) => { let (rewritten_fn_decl, rewritten_block) - = expand_and_rename_fn_decl_and_block(&*fn_decl, block, fld); + = expand_and_rename_fn_decl_and_block(fn_decl, block, fld); let new_node = ast::ExprFnBlock(capture_clause, rewritten_fn_decl, rewritten_block); - box(GC) ast::Expr{id:e.id, node: new_node, span: fld.new_span(e.span)} + P(ast::Expr{id:id, node: new_node, span: fld.new_span(span)}) } ast::ExprProc(fn_decl, block) => { let (rewritten_fn_decl, rewritten_block) - = expand_and_rename_fn_decl_and_block(&*fn_decl, block, fld); + = expand_and_rename_fn_decl_and_block(fn_decl, block, fld); let new_node = ast::ExprProc(rewritten_fn_decl, rewritten_block); - box(GC) ast::Expr{id:e.id, node: new_node, span: fld.new_span(e.span)} + P(ast::Expr{id:id, node: new_node, span: fld.new_span(span)}) } - _ => noop_fold_expr(e, fld) - } + _ => { + P(noop_fold_expr(ast::Expr { + id: id, + node: node, + span: span + }, fld)) + } + }) } /// Expand a (not-ident-style) macro invocation. Returns the result /// of expansion and the mark which must be applied to the result. /// Our current interface doesn't allow us to apply the mark to the /// result until after calling make_expr, make_items, etc. -fn expand_mac_invoc(mac: &ast::Mac, span: &codemap::Span, +fn expand_mac_invoc(mac: ast::Mac, span: codemap::Span, parse_thunk: |Box|->Option, mark_thunk: |T,Mrk|->T, fld: &mut MacroExpander) -> Option { - match (*mac).node { + match mac.node { // it would almost certainly be cleaner to pass the whole // macro invocation in, rather than pulling it apart and // marking the tts and the ctxt separately. This also goes // for the other three macro invocation chunks of code // in this file. // Token-tree macros: - MacInvocTT(ref pth, ref tts, _) => { + MacInvocTT(pth, tts, _) => { if pth.segments.len() > 1u { fld.cx.span_err(pth.span, "expected macro name without module \ @@ -144,7 +147,7 @@ fn expand_mac_invoc(mac: &ast::Mac, span: &codemap::Span, Some(rc) => match *rc { NormalTT(ref expandfun, exp_span) => { fld.cx.bt_push(ExpnInfo { - call_site: *span, + call_site: span, callee: NameAndSpan { name: extnamestr.get().to_string(), format: MacroBang, @@ -218,7 +221,7 @@ fn expand_loop_block(loop_block: P, // in a block enclosed by loop head. fld.cx.syntax_env.push_frame(); fld.cx.syntax_env.info().pending_renames.push(rename); - let expanded_block = expand_block_elts(&*loop_block, fld); + let expanded_block = expand_block_elts(loop_block, fld); fld.cx.syntax_env.pop_frame(); (expanded_block, Some(renamed_ident)) @@ -240,8 +243,8 @@ macro_rules! with_exts_frame ( ) // When we enter a module, record it, for the sake of `module!` -fn expand_item(it: Gc, fld: &mut MacroExpander) - -> SmallVector> { +pub fn expand_item(it: P, fld: &mut MacroExpander) + -> SmallVector> { let it = expand_item_modifiers(it, fld); let mut decorator_items = SmallVector::zero(); @@ -265,8 +268,9 @@ fn expand_item(it: Gc, fld: &mut MacroExpander) // we'd ideally decorator_items.push_all(expand_item(item, fld)), // but that double-mut-borrows fld - let mut items: SmallVector> = SmallVector::zero(); - dec.expand(fld.cx, attr.span, attr.node.value, it, |item| items.push(item)); + let mut items: SmallVector> = SmallVector::zero(); + dec.expand(fld.cx, attr.span, &*attr.node.value, &*it, + |item| items.push(item)); decorator_items.extend(items.move_iter() .flat_map(|item| expand_item(item, fld).move_iter())); @@ -285,17 +289,16 @@ fn expand_item(it: Gc, fld: &mut MacroExpander) let macro_escape = contains_macro_escape(new_attrs.as_slice()); let result = with_exts_frame!(fld.cx.syntax_env, macro_escape, - noop_fold_item(&*it, fld)); + noop_fold_item(it, fld)); fld.cx.mod_pop(); result }, _ => { - let it = box(GC) ast::Item { + let it = P(ast::Item { attrs: new_attrs, ..(*it).clone() - - }; - noop_fold_item(&*it, fld) + }); + noop_fold_item(it, fld) } }; @@ -303,8 +306,8 @@ fn expand_item(it: Gc, fld: &mut MacroExpander) new_items } -fn expand_item_modifiers(mut it: Gc, fld: &mut MacroExpander) - -> Gc { +fn expand_item_modifiers(mut it: P, fld: &mut MacroExpander) + -> P { // partition the attributes into ItemModifiers and others let (modifiers, other_attrs) = it.attrs.partitioned(|attr| { match fld.cx.syntax_env.find(&intern(attr.name().get())) { @@ -313,10 +316,10 @@ fn expand_item_modifiers(mut it: Gc, fld: &mut MacroExpander) } }); // update the attrs, leave everything else alone. Is this mutation really a good idea? - it = box(GC) ast::Item { + it = P(ast::Item { attrs: other_attrs, ..(*it).clone() - }; + }); if modifiers.is_empty() { return it; @@ -337,7 +340,7 @@ fn expand_item_modifiers(mut it: Gc, fld: &mut MacroExpander) span: None, } }); - it = mac.expand(fld.cx, attr.span, attr.node.value, it); + it = mac.expand(fld.cx, attr.span, &*attr.node.value, it); fld.cx.bt_pop(); } _ => unreachable!() @@ -351,15 +354,15 @@ fn expand_item_modifiers(mut it: Gc, fld: &mut MacroExpander) } /// Expand item_underscore -fn expand_item_underscore(item: &ast::Item_, fld: &mut MacroExpander) -> ast::Item_ { - match *item { - ast::ItemFn(decl, fn_style, abi, ref generics, body) => { +fn expand_item_underscore(item: ast::Item_, fld: &mut MacroExpander) -> ast::Item_ { + match item { + ast::ItemFn(decl, fn_style, abi, generics, body) => { let (rewritten_fn_decl, rewritten_body) - = expand_and_rename_fn_decl_and_block(&*decl, body, fld); + = expand_and_rename_fn_decl_and_block(decl, body, fld); let expanded_generics = fold::noop_fold_generics(generics,fld); ast::ItemFn(rewritten_fn_decl, fn_style, abi, expanded_generics, rewritten_body) } - _ => noop_fold_item_underscore(&*item, fld) + _ => noop_fold_item_underscore(item, fld) } } @@ -370,26 +373,24 @@ fn contains_macro_escape(attrs: &[ast::Attribute]) -> bool { // Support for item-position macro invocations, exactly the same // logic as for expression-position macro invocations. -fn expand_item_mac(it: Gc, fld: &mut MacroExpander) - -> SmallVector> -{ - let (pth, tts) = match it.node { +pub fn expand_item_mac(it: P, fld: &mut MacroExpander) + -> SmallVector> { + let (extname, path_span, tts) = match it.node { ItemMac(codemap::Spanned { node: MacInvocTT(ref pth, ref tts, _), .. }) => { - (pth, (*tts).clone()) + (pth.segments.get(0).identifier, pth.span, (*tts).clone()) } _ => fld.cx.span_bug(it.span, "invalid item macro invocation") }; - let extname = pth.segments.get(0).identifier; let extnamestr = token::get_ident(extname); let fm = fresh_mark(); let def_or_items = { - let expanded = match fld.cx.syntax_env.find(&extname.name) { + let mut expanded = match fld.cx.syntax_env.find(&extname.name) { None => { - fld.cx.span_err(pth.span, + fld.cx.span_err(path_span, format!("macro undefined: '{}!'", extnamestr).as_slice()); // let compilation continue @@ -400,7 +401,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) NormalTT(ref expander, span) => { if it.ident.name != parse::token::special_idents::invalid.name { fld.cx - .span_err(pth.span, + .span_err(path_span, format!("macro {}! expects no ident argument, \ given '{}'", extnamestr, @@ -421,7 +422,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) } IdentTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { - fld.cx.span_err(pth.span, + fld.cx.span_err(path_span, format!("macro {}! expects an ident argument", extnamestr.get()).as_slice()); return SmallVector::zero(); @@ -440,7 +441,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) } LetSyntaxTT(ref expander, span) => { if it.ident.name == parse::token::special_idents::invalid.name { - fld.cx.span_err(pth.span, + fld.cx.span_err(path_span, format!("macro {}! expects an ident argument", extnamestr.get()).as_slice()); return SmallVector::zero(); @@ -490,7 +491,7 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) .collect() } Right(None) => { - fld.cx.span_err(pth.span, + fld.cx.span_err(path_span, format!("non-item macro in item position: {}", extnamestr.get()).as_slice()); return SmallVector::zero(); @@ -498,24 +499,21 @@ fn expand_item_mac(it: Gc, fld: &mut MacroExpander) }; fld.cx.bt_pop(); - return items; + items } /// Expand a stmt // // I don't understand why this returns a vector... it looks like we're // half done adding machinery to allow macros to expand into multiple statements. -fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector> { +fn expand_stmt(s: Stmt, fld: &mut MacroExpander) -> SmallVector> { let (mac, semi) = match s.node { - StmtMac(ref mac, semi) => (mac, semi), + StmtMac(mac, semi) => (mac, semi), _ => return expand_non_macro_stmt(s, fld) }; - let expanded_stmt = match expand_mac_invoc(mac,&s.span, - |r|{r.make_stmt()}, - |sts,mrk| { - mark_stmt(&*sts,mrk) - }, - fld) { + let expanded_stmt = match expand_mac_invoc(mac, s.span, + |r| r.make_stmt(), + mark_stmt, fld) { Some(stmt) => stmt, None => { return SmallVector::zero(); @@ -523,46 +521,34 @@ fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector> { }; // Keep going, outside-in. - let fully_expanded = fld.fold_stmt(&*expanded_stmt); + let fully_expanded = fld.fold_stmt(expanded_stmt); fld.cx.bt_pop(); - let fully_expanded: SmallVector> = fully_expanded.move_iter() - .map(|s| box(GC) Spanned { span: s.span, node: s.node.clone() }) - .collect(); - - fully_expanded.move_iter().map(|s| { - match s.node { - StmtExpr(e, stmt_id) if semi => { - box(GC) Spanned { - span: s.span, - node: StmtSemi(e, stmt_id) - } + + if semi { + fully_expanded.move_iter().map(|s| s.map(|Spanned {node, span}| { + Spanned { + node: match node { + StmtExpr(e, stmt_id) => StmtSemi(e, stmt_id), + _ => node /* might already have a semi */ + }, + span: span } - _ => s /* might already have a semi */ - } - }).collect() + })).collect() + } else { + fully_expanded + } } // expand a non-macro stmt. this is essentially the fallthrough for // expand_stmt, above. -fn expand_non_macro_stmt(s: &Stmt, fld: &mut MacroExpander) - -> SmallVector> { +fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroExpander) + -> SmallVector> { // is it a let? - match s.node { - StmtDecl(decl, node_id) => { - match *decl { - Spanned { - node: DeclLocal(ref local), - span: stmt_span - } => { - // take it apart: - let Local { - ty: ty, - pat: pat, - init: init, - id: id, - span: span, - source: source, - } = **local; + match node { + StmtDecl(decl, node_id) => decl.and_then(|Spanned {node: decl, span}| match decl { + DeclLocal(local) => { + // take it apart: + let rewritten_local = local.map(|Local {id, pat, ty, init, source, span}| { // expand the ty since TyFixedLengthVec contains an Expr // and thus may have a macro use let expanded_ty = fld.fold_ty(ty); @@ -585,57 +571,66 @@ fn expand_non_macro_stmt(s: &Stmt, fld: &mut MacroExpander) }; // add them to the existing pending renames: fld.cx.syntax_env.info().pending_renames.push_all_move(new_pending_renames); - // also, don't forget to expand the init: - let new_init_opt = init.map(|e| fld.fold_expr(e)); - let rewritten_local = - box(GC) Local { - ty: expanded_ty, - pat: rewritten_pat, - init: new_init_opt, - id: id, - span: span, - source: source - }; - SmallVector::one(box(GC) Spanned { - node: StmtDecl(box(GC) Spanned { - node: DeclLocal(rewritten_local), - span: stmt_span - }, - node_id), + Local { + id: id, + ty: expanded_ty, + pat: rewritten_pat, + // also, don't forget to expand the init: + init: init.map(|e| fld.fold_expr(e)), + source: source, span: span - }) - } - _ => noop_fold_stmt(s, fld), + } + }); + SmallVector::one(P(Spanned { + node: StmtDecl(P(Spanned { + node: DeclLocal(rewritten_local), + span: span + }), + node_id), + span: stmt_span + })) } - }, - _ => noop_fold_stmt(s, fld), + _ => { + noop_fold_stmt(Spanned { + node: StmtDecl(P(Spanned { + node: decl, + span: span + }), + node_id), + span: stmt_span + }, fld) + } + }), + _ => { + noop_fold_stmt(Spanned { + node: node, + span: stmt_span + }, fld) + } } } // expand the arm of a 'match', renaming for macro hygiene -fn expand_arm(arm: &ast::Arm, fld: &mut MacroExpander) -> ast::Arm { +fn expand_arm(arm: ast::Arm, fld: &mut MacroExpander) -> ast::Arm { // expand pats... they might contain macro uses: - let expanded_pats : Vec> = arm.pats.iter().map(|pat| fld.fold_pat(*pat)).collect(); + let expanded_pats = arm.pats.move_map(|pat| fld.fold_pat(pat)); if expanded_pats.len() == 0 { fail!("encountered match arm with 0 patterns"); } // all of the pats must have the same set of bindings, so use the // first one to extract them and generate new names: - let first_pat = expanded_pats.get(0); - let idents = pattern_bindings(&**first_pat); - let new_renames = - idents.iter().map(|id| (*id,fresh_name(id))).collect(); + let idents = pattern_bindings(&**expanded_pats.get(0)); + let new_renames = idents.move_iter().map(|id| (id, fresh_name(&id))).collect(); // apply the renaming, but only to the PatIdents: let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames}; - let rewritten_pats = - expanded_pats.iter().map(|pat| rename_pats_fld.fold_pat(*pat)).collect(); + let rewritten_pats = expanded_pats.move_map(|pat| rename_pats_fld.fold_pat(pat)); // apply renaming and then expansion to the guard and the body: let mut rename_fld = IdentRenamer{renames:&new_renames}; let rewritten_guard = arm.guard.map(|g| fld.fold_expr(rename_fld.fold_expr(g))); let rewritten_body = fld.fold_expr(rename_fld.fold_expr(arm.body)); ast::Arm { - attrs: arm.attrs.iter().map(|x| fld.fold_attribute(*x)).collect(), + attrs: arm.attrs.move_map(|x| fld.fold_attribute(x)), pats: rewritten_pats, guard: rewritten_guard, body: rewritten_body, @@ -683,121 +678,126 @@ fn fn_decl_arg_bindings(fn_decl: &ast::FnDecl) -> Vec { } // expand a block. pushes a new exts_frame, then calls expand_block_elts -fn expand_block(blk: &Block, fld: &mut MacroExpander) -> P { +pub fn expand_block(blk: P, fld: &mut MacroExpander) -> P { // see note below about treatment of exts table with_exts_frame!(fld.cx.syntax_env,false, expand_block_elts(blk, fld)) } // expand the elements of a block. -fn expand_block_elts(b: &Block, fld: &mut MacroExpander) -> P { - let new_view_items = b.view_items.iter().map(|x| fld.fold_view_item(x)).collect(); - let new_stmts = - b.stmts.iter().flat_map(|x| { +pub fn expand_block_elts(b: P, fld: &mut MacroExpander) -> P { + b.map(|Block {id, view_items, stmts, expr, rules, span}| { + let new_view_items = view_items.move_iter().map(|x| fld.fold_view_item(x)).collect(); + let new_stmts = stmts.move_iter().flat_map(|x| { // perform all pending renames let renamed_stmt = { let pending_renames = &mut fld.cx.syntax_env.info().pending_renames; let mut rename_fld = IdentRenamer{renames:pending_renames}; - rename_fld.fold_stmt(&**x).expect_one("rename_fold didn't return one value") + rename_fld.fold_stmt(x).expect_one("rename_fold didn't return one value") }; // expand macros in the statement - fld.fold_stmt(&*renamed_stmt).move_iter() + fld.fold_stmt(renamed_stmt).move_iter() }).collect(); - let new_expr = b.expr.map(|x| { - let expr = { - let pending_renames = &mut fld.cx.syntax_env.info().pending_renames; - let mut rename_fld = IdentRenamer{renames:pending_renames}; - rename_fld.fold_expr(x) - }; - fld.fold_expr(expr) - }); - P(Block { - view_items: new_view_items, - stmts: new_stmts, - expr: new_expr, - id: fld.new_id(b.id), - rules: b.rules, - span: b.span, + let new_expr = expr.map(|x| { + let expr = { + let pending_renames = &mut fld.cx.syntax_env.info().pending_renames; + let mut rename_fld = IdentRenamer{renames:pending_renames}; + rename_fld.fold_expr(x) + }; + fld.fold_expr(expr) + }); + Block { + id: fld.new_id(id), + view_items: new_view_items, + stmts: new_stmts, + expr: new_expr, + rules: rules, + span: span + } }) } -fn expand_pat(p: Gc, fld: &mut MacroExpander) -> Gc { - let (pth, tts) = match p.node { - PatMac(ref mac) => { - match mac.node { - MacInvocTT(ref pth, ref tts, _) => { - (pth, (*tts).clone()) - } - } - } - _ => return noop_fold_pat(p, fld), - }; - if pth.segments.len() > 1u { - fld.cx.span_err(pth.span, "expected macro name without module separators"); - return DummyResult::raw_pat(p.span); +fn expand_pat(p: P, fld: &mut MacroExpander) -> P { + match p.node { + PatMac(_) => {} + _ => return noop_fold_pat(p, fld) } - let extname = pth.segments.get(0).identifier; - let extnamestr = token::get_ident(extname); - let marked_after = match fld.cx.syntax_env.find(&extname.name) { - None => { - fld.cx.span_err(pth.span, - format!("macro undefined: '{}!'", - extnamestr).as_slice()); - // let compilation continue - return DummyResult::raw_pat(p.span); + p.map(|ast::Pat {node, span, ..}| { + let (pth, tts) = match node { + PatMac(mac) => match mac.node { + MacInvocTT(pth, tts, _) => { + (pth, tts) + } + }, + _ => unreachable!() + }; + if pth.segments.len() > 1u { + fld.cx.span_err(pth.span, "expected macro name without module separators"); + return DummyResult::raw_pat(span); } + let extname = pth.segments.get(0).identifier; + let extnamestr = token::get_ident(extname); + let marked_after = match fld.cx.syntax_env.find(&extname.name) { + None => { + fld.cx.span_err(pth.span, + format!("macro undefined: '{}!'", + extnamestr).as_slice()); + // let compilation continue + return DummyResult::raw_pat(span); + } - Some(rc) => match *rc { - NormalTT(ref expander, span) => { - fld.cx.bt_push(ExpnInfo { - call_site: p.span, - callee: NameAndSpan { - name: extnamestr.get().to_string(), - format: MacroBang, - span: span - } - }); + Some(rc) => match *rc { + NormalTT(ref expander, tt_span) => { + fld.cx.bt_push(ExpnInfo { + call_site: span, + callee: NameAndSpan { + name: extnamestr.get().to_string(), + format: MacroBang, + span: tt_span + } + }); - let fm = fresh_mark(); - let marked_before = mark_tts(tts.as_slice(), fm); - let mac_span = original_span(fld.cx); - let expanded = match expander.expand(fld.cx, - mac_span.call_site, - marked_before.as_slice()).make_pat() { - Some(e) => e, - None => { - fld.cx.span_err( - pth.span, - format!( - "non-pattern macro in pattern position: {}", - extnamestr.get() - ).as_slice() - ); - return DummyResult::raw_pat(p.span); - } - }; + let fm = fresh_mark(); + let marked_before = mark_tts(tts.as_slice(), fm); + let mac_span = original_span(fld.cx); + let expanded = match expander.expand(fld.cx, + mac_span.call_site, + marked_before.as_slice()).make_pat() { + Some(e) => e, + None => { + fld.cx.span_err( + pth.span, + format!( + "non-pattern macro in pattern position: {}", + extnamestr.get() + ).as_slice() + ); + return DummyResult::raw_pat(span); + } + }; - // mark after: - mark_pat(expanded,fm) - } - _ => { - fld.cx.span_err(p.span, - format!("{}! is not legal in pattern position", - extnamestr.get()).as_slice()); - return DummyResult::raw_pat(p.span); + // mark after: + mark_pat(expanded,fm) + } + _ => { + fld.cx.span_err(span, + format!("{}! is not legal in pattern position", + extnamestr.get()).as_slice()); + return DummyResult::raw_pat(span); + } } - } - }; + }; - let fully_expanded = - fld.fold_pat(marked_after).node.clone(); - fld.cx.bt_pop(); + let fully_expanded = + fld.fold_pat(marked_after).node.clone(); + fld.cx.bt_pop(); - box(GC) ast::Pat { - id: ast::DUMMY_NODE_ID, - node: fully_expanded, - span: p.span, - } + ast::Pat { + id: ast::DUMMY_NODE_ID, + node: fully_expanded, + span: span + } + }) } /// A tree-folder that applies every rename in its (mutable) list @@ -814,7 +814,7 @@ impl<'a> Folder for IdentRenamer<'a> { ctxt: mtwt::apply_renames(self.renames, id.ctxt), } } - fn fold_mac(&mut self, macro: &ast::Mac) -> ast::Mac { + fn fold_mac(&mut self, macro: ast::Mac) -> ast::Mac { fold::noop_fold_mac(macro, self) } } @@ -828,45 +828,50 @@ pub struct PatIdentRenamer<'a> { } impl<'a> Folder for PatIdentRenamer<'a> { - fn fold_pat(&mut self, pat: Gc) -> Gc { + fn fold_pat(&mut self, pat: P) -> P { match pat.node { - ast::PatIdent(binding_mode, Spanned{span: ref sp, node: id}, ref sub) => { - let new_ident = Ident{name: id.name, - ctxt: mtwt::apply_renames(self.renames, id.ctxt)}; + ast::PatIdent(..) => {}, + _ => return noop_fold_pat(pat, self) + } + + pat.map(|ast::Pat {id, node, span}| match node { + ast::PatIdent(binding_mode, Spanned{span: sp, node: ident}, sub) => { + let new_ident = Ident{name: ident.name, + ctxt: mtwt::apply_renames(self.renames, ident.ctxt)}; let new_node = ast::PatIdent(binding_mode, - Spanned{span: self.new_span(*sp), node: new_ident}, + Spanned{span: self.new_span(sp), node: new_ident}, sub.map(|p| self.fold_pat(p))); - box(GC) ast::Pat { - id: pat.id, - span: self.new_span(pat.span), + ast::Pat { + id: id, node: new_node, + span: self.new_span(span) } }, - _ => noop_fold_pat(pat, self) - } + _ => unreachable!() + }) } - fn fold_mac(&mut self, macro: &ast::Mac) -> ast::Mac { + fn fold_mac(&mut self, macro: ast::Mac) -> ast::Mac { fold::noop_fold_mac(macro, self) } } // expand a method -fn expand_method(m: &ast::Method, fld: &mut MacroExpander) -> SmallVector> { - let id = fld.new_id(m.id); - match m.node { +fn expand_method(m: P, fld: &mut MacroExpander) -> SmallVector> { + m.and_then(|m| match m.node { ast::MethDecl(ident, - ref generics, + generics, abi, - ref explicit_self, + explicit_self, fn_style, decl, body, vis) => { + let id = fld.new_id(m.id); let (rewritten_fn_decl, rewritten_body) - = expand_and_rename_fn_decl_and_block(&*decl,body,fld); - SmallVector::one(box(GC) ast::Method { - attrs: m.attrs.iter().map(|a| fld.fold_attribute(*a)).collect(), + = expand_and_rename_fn_decl_and_block(decl,body,fld); + SmallVector::one(P(ast::Method { + attrs: m.attrs.move_map(|a| fld.fold_attribute(a)), id: id, span: fld.new_span(m.span), node: ast::MethDecl(fld.fold_ident(ident), @@ -877,15 +882,13 @@ fn expand_method(m: &ast::Method, fld: &mut MacroExpander) -> SmallVector { + ast::MethMac(mac) => { let maybe_new_methods = - expand_mac_invoc(mac, &m.span, - |r|{r.make_methods()}, - |meths,mark|{ - meths.move_iter().map(|m|{mark_method(m,mark)}) - .collect()}, + expand_mac_invoc(mac, m.span, + |r| r.make_methods(), + |meths, mark| meths.move_map(|m| mark_method(m, mark)), fld); let new_methods = match maybe_new_methods { @@ -896,22 +899,22 @@ fn expand_method(m: &ast::Method, fld: &mut MacroExpander) -> SmallVector, +fn expand_and_rename_fn_decl_and_block(fn_decl: P, block: P, fld: &mut MacroExpander) - -> (Gc, Gc) { + -> (P, P) { let expanded_decl = fld.fold_fn_decl(fn_decl); let idents = fn_decl_arg_bindings(&*expanded_decl); let renames = idents.iter().map(|id : &ast::Ident| (*id,fresh_name(id))).collect(); // first, a renamer for the PatIdents, for the fn_decl: let mut rename_pat_fld = PatIdentRenamer{renames: &renames}; - let rewritten_fn_decl = rename_pat_fld.fold_fn_decl(&*expanded_decl); + let rewritten_fn_decl = rename_pat_fld.fold_fn_decl(expanded_decl); // now, a renamer for *all* idents, for the body: let mut rename_fld = IdentRenamer{renames: &renames}; let rewritten_body = fld.fold_block(rename_fld.fold_block(block)); @@ -924,36 +927,36 @@ pub struct MacroExpander<'a, 'b:'a> { } impl<'a, 'b> Folder for MacroExpander<'a, 'b> { - fn fold_expr(&mut self, expr: Gc) -> Gc { + fn fold_expr(&mut self, expr: P) -> P { expand_expr(expr, self) } - fn fold_pat(&mut self, pat: Gc) -> Gc { + fn fold_pat(&mut self, pat: P) -> P { expand_pat(pat, self) } - fn fold_item(&mut self, item: Gc) -> SmallVector> { + fn fold_item(&mut self, item: P) -> SmallVector> { expand_item(item, self) } - fn fold_item_underscore(&mut self, item: &ast::Item_) -> ast::Item_ { + fn fold_item_underscore(&mut self, item: ast::Item_) -> ast::Item_ { expand_item_underscore(item, self) } - fn fold_stmt(&mut self, stmt: &ast::Stmt) -> SmallVector> { - expand_stmt(stmt, self) + fn fold_stmt(&mut self, stmt: P) -> SmallVector> { + stmt.and_then(|stmt| expand_stmt(stmt, self)) } fn fold_block(&mut self, block: P) -> P { - expand_block(&*block, self) + expand_block(block, self) } - fn fold_arm(&mut self, arm: &ast::Arm) -> ast::Arm { + fn fold_arm(&mut self, arm: ast::Arm) -> ast::Arm { expand_arm(arm, self) } - fn fold_method(&mut self, method: Gc) -> SmallVector> { - expand_method(&*method, self) + fn fold_method(&mut self, method: P) -> SmallVector> { + expand_method(method, self) } fn new_span(&mut self, span: Span) -> Span { @@ -1033,17 +1036,16 @@ impl Folder for Marker { ctxt: mtwt::apply_mark(self.mark, id.ctxt) } } - fn fold_mac(&mut self, m: &ast::Mac) -> ast::Mac { - let macro = match m.node { - MacInvocTT(ref path, ref tts, ctxt) => { - MacInvocTT(self.fold_path(path), - self.fold_tts(tts.as_slice()), - mtwt::apply_mark(self.mark, ctxt)) - } - }; + fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac { Spanned { - node: macro, - span: m.span, + node: match node { + MacInvocTT(path, tts, ctxt) => { + MacInvocTT(self.fold_path(path), + self.fold_tts(tts.as_slice()), + mtwt::apply_mark(self.mark, ctxt)) + } + }, + span: span, } } } @@ -1054,29 +1056,29 @@ fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec { } // apply a given mark to the given expr. Used following the expansion of a macro. -fn mark_expr(expr: Gc, m: Mrk) -> Gc { +fn mark_expr(expr: P, m: Mrk) -> P { Marker{mark:m}.fold_expr(expr) } // apply a given mark to the given pattern. Used following the expansion of a macro. -fn mark_pat(pat: Gc, m: Mrk) -> Gc { +fn mark_pat(pat: P, m: Mrk) -> P { Marker{mark:m}.fold_pat(pat) } // apply a given mark to the given stmt. Used following the expansion of a macro. -fn mark_stmt(expr: &ast::Stmt, m: Mrk) -> Gc { +fn mark_stmt(expr: P, m: Mrk) -> P { Marker{mark:m}.fold_stmt(expr) .expect_one("marking a stmt didn't return exactly one stmt") } // apply a given mark to the given item. Used following the expansion of a macro. -fn mark_item(expr: Gc, m: Mrk) -> Gc { +fn mark_item(expr: P, m: Mrk) -> P { Marker{mark:m}.fold_item(expr) .expect_one("marking an item didn't return exactly one item") } // apply a given mark to the given item. Used following the expansion of a macro. -fn mark_method(expr: Gc, m: Mrk) -> Gc { +fn mark_method(expr: P, m: Mrk) -> P { Marker{mark:m}.fold_method(expr) .expect_one("marking an item didn't return exactly one method") } @@ -1128,13 +1130,12 @@ mod test { use fold::Folder; use parse; use parse::token; + use ptr::P; use util::parser_testing::{string_to_parser}; use util::parser_testing::{string_to_pat, string_to_crate, strs_to_idents}; use visit; use visit::Visitor; - use std::gc::GC; - // a visitor that extracts the paths // from a given thingy and puts them in a mutable // array (passed in to the traversal) @@ -1239,7 +1240,7 @@ mod test { let attr1 = make_dummy_attr ("foo"); let attr2 = make_dummy_attr ("bar"); let escape_attr = make_dummy_attr ("macro_escape"); - let attrs1 = vec!(attr1, escape_attr, attr2); + let attrs1 = vec!(attr1.clone(), escape_attr, attr2.clone()); assert_eq!(contains_macro_escape(attrs1.as_slice()),true); let attrs2 = vec!(attr1,attr2); assert_eq!(contains_macro_escape(attrs2.as_slice()),false); @@ -1252,10 +1253,10 @@ mod test { node: Attribute_ { id: attr::mk_attr_id(), style: AttrOuter, - value: box(GC) Spanned { + value: P(Spanned { node: MetaWord(token::intern_and_get_ident(s)), span: codemap::DUMMY_SP, - }, + }), is_sugared_doc: false, } } diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 0bb32c73ca264..271a5137bbf36 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -9,7 +9,6 @@ // except according to those terms. use ast; -use ast::P; use codemap::{Span, respan}; use ext::base::*; use ext::base; @@ -17,9 +16,9 @@ use ext::build::AstBuilder; use fmt_macros as parse; use parse::token::InternedString; use parse::token; +use ptr::P; use std::collections::HashMap; -use std::gc::{Gc, GC}; #[deriving(PartialEq)] enum ArgumentType { @@ -39,13 +38,13 @@ struct Context<'a, 'b:'a> { /// Parsed argument expressions and the types that we've found so far for /// them. - args: Vec>, + args: Vec>, arg_types: Vec>, /// Parsed named expressions and the types that we've found for them so far. /// Note that we keep a side-array of the ordering of the named arguments /// found to be sure that we can translate them in the same order that they /// were declared in. - names: HashMap>, + names: HashMap>, name_types: HashMap, name_ordering: Vec, @@ -53,14 +52,14 @@ struct Context<'a, 'b:'a> { literal: String, /// Collection of the compiled `rt::Argument` structures - pieces: Vec>, + pieces: Vec>, /// Collection of string literals - str_pieces: Vec>, + str_pieces: Vec>, /// Stays `true` if all formatting parameters are default (as in "{}{}"). all_pieces_simple: bool, name_positions: HashMap, - method_statics: Vec>, + method_statics: Vec>, /// Updated as arguments are consumed or methods are entered nest_level: uint, @@ -68,8 +67,8 @@ struct Context<'a, 'b:'a> { } pub enum Invocation { - Call(Gc), - MethodCall(Gc, ast::Ident), + Call(P), + MethodCall(P, ast::Ident), } /// Parses the arguments from the given list of tokens, returning None @@ -82,10 +81,10 @@ pub enum Invocation { /// named arguments)) fn parse_args(ecx: &mut ExtCtxt, sp: Span, allow_method: bool, tts: &[ast::TokenTree]) - -> (Invocation, Option<(Gc, Vec>, Vec, - HashMap>)>) { + -> (Invocation, Option<(P, Vec>, Vec, + HashMap>)>) { let mut args = Vec::new(); - let mut names = HashMap::>::new(); + let mut names = HashMap::>::new(); let mut order = Vec::new(); let mut p = ecx.new_parser_from_tts(tts); @@ -323,44 +322,44 @@ impl<'a, 'b> Context<'a, 'b> { /// These attributes are applied to all statics that this syntax extension /// will generate. - fn static_attrs(&self) -> Vec { + fn static_attrs(ecx: &ExtCtxt, fmtsp: Span) -> Vec { // Flag statics as `inline` so LLVM can merge duplicate globals as much // as possible (which we're generating a whole lot of). - let unnamed = self.ecx.meta_word(self.fmtsp, InternedString::new("inline")); - let unnamed = self.ecx.attribute(self.fmtsp, unnamed); + let unnamed = ecx.meta_word(fmtsp, InternedString::new("inline")); + let unnamed = ecx.attribute(fmtsp, unnamed); // Do not warn format string as dead code - let dead_code = self.ecx.meta_word(self.fmtsp, - InternedString::new("dead_code")); - let allow_dead_code = self.ecx.meta_list(self.fmtsp, - InternedString::new("allow"), - vec!(dead_code)); - let allow_dead_code = self.ecx.attribute(self.fmtsp, allow_dead_code); - return vec!(unnamed, allow_dead_code); + let dead_code = ecx.meta_word(fmtsp, InternedString::new("dead_code")); + let allow_dead_code = ecx.meta_list(fmtsp, + InternedString::new("allow"), + vec![dead_code]); + let allow_dead_code = ecx.attribute(fmtsp, allow_dead_code); + vec![unnamed, allow_dead_code] } - fn rtpath(&self, s: &str) -> Vec { - vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("rt"), self.ecx.ident_of(s)) + fn rtpath(ecx: &ExtCtxt, s: &str) -> Vec { + vec![ecx.ident_of("std"), ecx.ident_of("fmt"), ecx.ident_of("rt"), ecx.ident_of(s)] } - fn trans_count(&self, c: parse::Count) -> Gc { + fn trans_count(&self, c: parse::Count) -> P { let sp = self.fmtsp; match c { parse::CountIs(i) => { - self.ecx.expr_call_global(sp, self.rtpath("CountIs"), + self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIs"), vec!(self.ecx.expr_uint(sp, i))) } parse::CountIsParam(i) => { - self.ecx.expr_call_global(sp, self.rtpath("CountIsParam"), + self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIsParam"), vec!(self.ecx.expr_uint(sp, i))) } parse::CountImplied => { - let path = self.ecx.path_global(sp, self.rtpath("CountImplied")); + let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, + "CountImplied")); self.ecx.expr_path(path) } parse::CountIsNextParam => { - let path = self.ecx.path_global(sp, self.rtpath("CountIsNextParam")); + let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, + "CountIsNextParam")); self.ecx.expr_path(path) } parse::CountIsName(n) => { @@ -369,14 +368,14 @@ impl<'a, 'b> Context<'a, 'b> { None => 0, // error already emitted elsewhere }; let i = i + self.args.len(); - self.ecx.expr_call_global(sp, self.rtpath("CountIsParam"), + self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "CountIsParam"), vec!(self.ecx.expr_uint(sp, i))) } } } /// Translate the accumulated string literals to a literal expression - fn trans_literal_string(&mut self) -> Gc { + fn trans_literal_string(&mut self) -> P { let sp = self.fmtsp; let s = token::intern_and_get_ident(self.literal.as_slice()); self.literal.clear(); @@ -385,7 +384,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Translate a `parse::Piece` to a static `rt::Argument` or append /// to the `literal` string. - fn trans_piece(&mut self, piece: &parse::Piece) -> Option> { + fn trans_piece(&mut self, piece: &parse::Piece) -> Option> { let sp = self.fmtsp; match *piece { parse::String(s) => { @@ -397,12 +396,12 @@ impl<'a, 'b> Context<'a, 'b> { let pos = match arg.position { // These two have a direct mapping parse::ArgumentNext => { - let path = self.ecx.path_global(sp, - self.rtpath("ArgumentNext")); + let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, + "ArgumentNext")); self.ecx.expr_path(path) } parse::ArgumentIs(i) => { - self.ecx.expr_call_global(sp, self.rtpath("ArgumentIs"), + self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "ArgumentIs"), vec!(self.ecx.expr_uint(sp, i))) } // Named arguments are converted to positional arguments at @@ -413,7 +412,7 @@ impl<'a, 'b> Context<'a, 'b> { None => 0, // error already emitted elsewhere }; let i = i + self.args.len(); - self.ecx.expr_call_global(sp, self.rtpath("ArgumentIs"), + self.ecx.expr_call_global(sp, Context::rtpath(self.ecx, "ArgumentIs"), vec!(self.ecx.expr_uint(sp, i))) } }; @@ -440,23 +439,23 @@ impl<'a, 'b> Context<'a, 'b> { let fill = self.ecx.expr_lit(sp, ast::LitChar(fill)); let align = match arg.format.align { parse::AlignLeft => { - self.ecx.path_global(sp, self.rtpath("AlignLeft")) + self.ecx.path_global(sp, Context::rtpath(self.ecx, "AlignLeft")) } parse::AlignRight => { - self.ecx.path_global(sp, self.rtpath("AlignRight")) + self.ecx.path_global(sp, Context::rtpath(self.ecx, "AlignRight")) } parse::AlignCenter => { - self.ecx.path_global(sp, self.rtpath("AlignCenter")) + self.ecx.path_global(sp, Context::rtpath(self.ecx, "AlignCenter")) } parse::AlignUnknown => { - self.ecx.path_global(sp, self.rtpath("AlignUnknown")) + self.ecx.path_global(sp, Context::rtpath(self.ecx, "AlignUnknown")) } }; let align = self.ecx.expr_path(align); let flags = self.ecx.expr_uint(sp, arg.format.flags); let prec = self.trans_count(arg.format.precision); let width = self.trans_count(arg.format.width); - let path = self.ecx.path_global(sp, self.rtpath("FormatSpec")); + let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "FormatSpec")); let fmt = self.ecx.expr_struct(sp, path, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("fill"), fill), self.ecx.field_imm(sp, self.ecx.ident_of("align"), align), @@ -464,7 +463,7 @@ impl<'a, 'b> Context<'a, 'b> { self.ecx.field_imm(sp, self.ecx.ident_of("precision"), prec), self.ecx.field_imm(sp, self.ecx.ident_of("width"), width))); - let path = self.ecx.path_global(sp, self.rtpath("Argument")); + let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "Argument")); Some(self.ecx.expr_struct(sp, path, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("position"), pos), self.ecx.field_imm(sp, self.ecx.ident_of("format"), fmt)))) @@ -472,29 +471,28 @@ impl<'a, 'b> Context<'a, 'b> { } } - fn item_static_array(&self, + fn item_static_array(ecx: &mut ExtCtxt, name: ast::Ident, - piece_ty: Gc, - pieces: Vec>) - -> ast::Stmt - { - let pieces_len = self.ecx.expr_uint(self.fmtsp, pieces.len()); - let fmt = self.ecx.expr_vec(self.fmtsp, pieces); + piece_ty: P, + pieces: Vec>) + -> P { + let fmtsp = piece_ty.span; + let pieces_len = ecx.expr_uint(fmtsp, pieces.len()); + let fmt = ecx.expr_vec(fmtsp, pieces); let ty = ast::TyFixedLengthVec( piece_ty, pieces_len ); - let ty = self.ecx.ty(self.fmtsp, ty); + let ty = ecx.ty(fmtsp, ty); let st = ast::ItemStatic(ty, ast::MutImmutable, fmt); - let item = self.ecx.item(self.fmtsp, name, - self.static_attrs(), st); - let decl = respan(self.fmtsp, ast::DeclItem(item)); - respan(self.fmtsp, ast::StmtDecl(box(GC) decl, ast::DUMMY_NODE_ID)) + let item = ecx.item(fmtsp, name, Context::static_attrs(ecx, fmtsp), st); + let decl = respan(fmtsp, ast::DeclItem(item)); + P(respan(fmtsp, ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID))) } /// Actually builds the expression which the iformat! block will be expanded /// to - fn to_expr(&self, invocation: Invocation) -> Gc { + fn to_expr(mut self, invocation: Invocation) -> P { let mut lets = Vec::new(); let mut locals = Vec::new(); let mut names = Vec::from_fn(self.name_positions.len(), |_| None); @@ -502,10 +500,10 @@ impl<'a, 'b> Context<'a, 'b> { let mut heads = Vec::new(); // First, declare all of our methods that are statics - for &method in self.method_statics.iter() { + for method in self.method_statics.move_iter() { let decl = respan(self.fmtsp, ast::DeclItem(method)); - lets.push(box(GC) respan(self.fmtsp, - ast::StmtDecl(box(GC) decl, ast::DUMMY_NODE_ID))); + lets.push(P(respan(self.fmtsp, + ast::StmtDecl(P(decl), ast::DUMMY_NODE_ID)))); } // Next, build up the static array which will become our precompiled @@ -517,9 +515,10 @@ impl<'a, 'b> Context<'a, 'b> { self.ecx.ty_ident(self.fmtsp, self.ecx.ident_of("str")), Some(static_lifetime), ast::MutImmutable); - lets.push(box(GC) self.item_static_array(static_str_name, - piece_ty, - self.str_pieces.clone())); + lets.push(Context::item_static_array(self.ecx, + static_str_name, + piece_ty, + self.str_pieces)); // Then, build up the static array which will store our precompiled // nonstandard placeholders, if there are any. @@ -527,13 +526,14 @@ impl<'a, 'b> Context<'a, 'b> { if !self.all_pieces_simple { let piece_ty = self.ecx.ty_path(self.ecx.path_all( self.fmtsp, - true, self.rtpath("Argument"), + true, Context::rtpath(self.ecx, "Argument"), vec![static_lifetime], vec![] ), None); - lets.push(box(GC) self.item_static_array(static_args_name, - piece_ty, - self.pieces.clone())); + lets.push(Context::item_static_array(self.ecx, + static_args_name, + piece_ty, + self.pieces)); } // Right now there is a bug such that for the expression: @@ -543,31 +543,35 @@ impl<'a, 'b> Context<'a, 'b> { // format! string are shoved into locals. Furthermore, we shove the address // of each variable because we don't want to move out of the arguments // passed to this function. - for (i, &e) in self.args.iter().enumerate() { - if self.arg_types.get(i).is_none() { - continue // error already generated - } + for (i, e) in self.args.move_iter().enumerate() { + let arg_ty = match self.arg_types.get(i).as_ref() { + Some(ty) => ty, + None => continue // error already generated + }; let name = self.ecx.ident_of(format!("__arg{}", i).as_slice()); pats.push(self.ecx.pat_ident(e.span, name)); + locals.push(Context::format_arg(self.ecx, e.span, arg_ty, + self.ecx.expr_ident(e.span, name))); heads.push(self.ecx.expr_addr_of(e.span, e)); - locals.push(self.format_arg(e.span, Exact(i), - self.ecx.expr_ident(e.span, name))); } for name in self.name_ordering.iter() { - let e = match self.names.find(name) { - Some(&e) if self.name_types.contains_key(name) => e, - Some(..) | None => continue + let e = match self.names.pop(name) { + Some(e) => e, + None => continue + }; + let arg_ty = match self.name_types.find(name) { + Some(ty) => ty, + None => continue }; let lname = self.ecx.ident_of(format!("__arg{}", *name).as_slice()); pats.push(self.ecx.pat_ident(e.span, lname)); - heads.push(self.ecx.expr_addr_of(e.span, e)); *names.get_mut(*self.name_positions.get(name)) = - Some(self.format_arg(e.span, - Named((*name).clone()), - self.ecx.expr_ident(e.span, lname))); + Some(Context::format_arg(self.ecx, e.span, arg_ty, + self.ecx.expr_ident(e.span, lname))); + heads.push(self.ecx.expr_addr_of(e.span, e)); } // Now create a vector containing all the arguments @@ -611,12 +615,14 @@ impl<'a, 'b> Context<'a, 'b> { let res = self.ecx.expr_ident(self.fmtsp, resname); let result = match invocation { Call(e) => { - self.ecx.expr_call(e.span, e, - vec!(self.ecx.expr_addr_of(e.span, res))) + let span = e.span; + self.ecx.expr_call(span, e, + vec!(self.ecx.expr_addr_of(span, res))) } MethodCall(e, m) => { - self.ecx.expr_method_call(e.span, e, m, - vec!(self.ecx.expr_addr_of(e.span, res))) + let span = e.span; + self.ecx.expr_method_call(span, e, m, + vec!(self.ecx.expr_addr_of(span, res))) } }; let body = self.ecx.expr_block(self.ecx.block(self.fmtsp, lets, @@ -655,13 +661,9 @@ impl<'a, 'b> Context<'a, 'b> { self.ecx.expr_match(self.fmtsp, head, vec!(arm)) } - fn format_arg(&self, sp: Span, argno: Position, arg: Gc) - -> Gc { - let ty = match argno { - Exact(ref i) => self.arg_types.get(*i).get_ref(), - Named(ref s) => self.name_types.get(s) - }; - + fn format_arg(ecx: &ExtCtxt, sp: Span, + ty: &ArgumentType, arg: P) + -> P { let (krate, fmt_fn) = match *ty { Known(ref tyname) => { match tyname.as_slice() { @@ -681,36 +683,35 @@ impl<'a, 'b> Context<'a, 'b> { "x" => ("std", "secret_lower_hex"), "X" => ("std", "secret_upper_hex"), _ => { - self.ecx - .span_err(sp, - format!("unknown format trait `{}`", - *tyname).as_slice()); + ecx.span_err(sp, + format!("unknown format trait `{}`", + *tyname).as_slice()); ("std", "dummy") } } } String => { - return self.ecx.expr_call_global(sp, vec!( - self.ecx.ident_of("std"), - self.ecx.ident_of("fmt"), - self.ecx.ident_of("argumentstr")), vec!(arg)) + return ecx.expr_call_global(sp, vec![ + ecx.ident_of("std"), + ecx.ident_of("fmt"), + ecx.ident_of("argumentstr")], vec![arg]) } Unsigned => { - return self.ecx.expr_call_global(sp, vec!( - self.ecx.ident_of("std"), - self.ecx.ident_of("fmt"), - self.ecx.ident_of("argumentuint")), vec!(arg)) + return ecx.expr_call_global(sp, vec![ + ecx.ident_of("std"), + ecx.ident_of("fmt"), + ecx.ident_of("argumentuint")], vec![arg]) } }; - let format_fn = self.ecx.path_global(sp, vec!( - self.ecx.ident_of(krate), - self.ecx.ident_of("fmt"), - self.ecx.ident_of(fmt_fn))); - self.ecx.expr_call_global(sp, vec!( - self.ecx.ident_of("std"), - self.ecx.ident_of("fmt"), - self.ecx.ident_of("argument")), vec!(self.ecx.expr_path(format_fn), arg)) + let format_fn = ecx.path_global(sp, vec![ + ecx.ident_of(krate), + ecx.ident_of("fmt"), + ecx.ident_of(fmt_fn)]); + ecx.expr_call_global(sp, vec![ + ecx.ident_of("std"), + ecx.ident_of("fmt"), + ecx.ident_of("argument")], vec![ecx.expr_path(format_fn), arg]) } } @@ -744,12 +745,11 @@ pub fn expand_format_args_method<'cx>(ecx: &'cx mut ExtCtxt, sp: Span, /// expression. pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, invocation: Invocation, - efmt: Gc, - args: Vec>, + efmt: P, + args: Vec>, name_ordering: Vec, - names: HashMap>) - -> Gc -{ + names: HashMap>) + -> P { let arg_types = Vec::from_fn(args.len(), |_| None); let mut cx = Context { ecx: ecx, @@ -796,7 +796,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, } match parser.errors.shift() { Some(error) => { - cx.ecx.span_err(efmt.span, + cx.ecx.span_err(cx.fmtsp, format!("invalid format string: {}", error).as_slice()); return DummyResult::raw_expr(sp); diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 808e671f868d3..6f13a2e6a51fc 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -15,8 +15,7 @@ use ext::base; use ext::build::AstBuilder; use parse::token::*; use parse::token; - -use std::gc::Gc; +use ptr::P; /** * @@ -36,14 +35,13 @@ pub mod rt { use parse::token; use parse; use print::pprust; + use ptr::P; use ast::{TokenTree, Generics, Expr}; pub use parse::new_parser_from_tts; pub use codemap::{BytePos, Span, dummy_spanned}; - use std::gc::Gc; - pub trait ToTokens { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec ; } @@ -107,13 +105,13 @@ pub mod rt { } macro_rules! impl_to_source( - (Gc<$t:ty>, $pp:ident) => ( - impl ToSource for Gc<$t> { + (P<$t:ty>, $pp:ident) => ( + impl ToSource for P<$t> { fn to_source(&self) -> String { pprust::$pp(&**self) } } - impl ToSourceWithHygiene for Gc<$t> { + impl ToSourceWithHygiene for P<$t> { fn to_source_with_hygiene(&self) -> String { pprust::with_hygiene::$pp(&**self) } @@ -182,18 +180,18 @@ pub mod rt { impl_to_source!(ast::Block, block_to_string) impl_to_source!(ast::Arg, arg_to_string) impl_to_source!(Generics, generics_to_string) - impl_to_source!(Gc, item_to_string) - impl_to_source!(Gc, method_to_string) - impl_to_source!(Gc, stmt_to_string) - impl_to_source!(Gc, expr_to_string) - impl_to_source!(Gc, pat_to_string) + impl_to_source!(P, item_to_string) + impl_to_source!(P, method_to_string) + impl_to_source!(P, stmt_to_string) + impl_to_source!(P, expr_to_string) + impl_to_source!(P, pat_to_string) impl_to_source!(ast::Arm, arm_to_string) impl_to_source_slice!(ast::Ty, ", ") - impl_to_source_slice!(Gc, "\n\n") + impl_to_source_slice!(P, "\n\n") impl ToSource for ast::Attribute_ { fn to_source(&self) -> String { - pprust::attribute_to_string(&dummy_spanned(*self)) + pprust::attribute_to_string(&dummy_spanned(self.clone())) } } impl ToSourceWithHygiene for ast::Attribute_ { @@ -315,16 +313,16 @@ pub mod rt { ) impl_to_tokens!(ast::Ident) - impl_to_tokens!(Gc) - impl_to_tokens!(Gc) + impl_to_tokens!(P) + impl_to_tokens!(P) impl_to_tokens!(ast::Arm) - impl_to_tokens!(Gc) - impl_to_tokens_lifetime!(&'a [Gc]) + impl_to_tokens!(P) + impl_to_tokens_lifetime!(&'a [P]) impl_to_tokens!(ast::Ty) impl_to_tokens_lifetime!(&'a [ast::Ty]) impl_to_tokens!(Generics) - impl_to_tokens!(Gc) - impl_to_tokens!(Gc) + impl_to_tokens!(P) + impl_to_tokens!(P) impl_to_tokens!(ast::Block) impl_to_tokens!(ast::Arg) impl_to_tokens!(ast::Attribute_) @@ -344,9 +342,9 @@ pub mod rt { impl_to_tokens!(u64) pub trait ExtParseUtils { - fn parse_item(&self, s: String) -> Gc; - fn parse_expr(&self, s: String) -> Gc; - fn parse_stmt(&self, s: String) -> Gc; + fn parse_item(&self, s: String) -> P; + fn parse_expr(&self, s: String) -> P; + fn parse_stmt(&self, s: String) -> P; fn parse_tts(&self, s: String) -> Vec; } @@ -358,7 +356,7 @@ pub mod rt { impl<'a> ExtParseUtils for ExtCtxt<'a> { - fn parse_item(&self, s: String) -> Gc { + fn parse_item(&self, s: String) -> P { let res = parse::parse_item_from_source_str( "".to_string(), s, @@ -373,7 +371,7 @@ pub mod rt { } } - fn parse_stmt(&self, s: String) -> Gc { + fn parse_stmt(&self, s: String) -> P { parse::parse_stmt_from_source_str("".to_string(), s, self.cfg(), @@ -381,7 +379,7 @@ pub mod rt { self.parse_sess()) } - fn parse_expr(&self, s: String) -> Gc { + fn parse_expr(&self, s: String) -> P { parse::parse_expr_from_source_str("".to_string(), s, self.cfg(), @@ -491,7 +489,7 @@ fn id_ext(str: &str) -> ast::Ident { } // Lift an ident to the expr that evaluates to that ident. -fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> Gc { +fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { let e_str = cx.expr_str(sp, token::get_ident(ident)); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), @@ -500,7 +498,7 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> Gc { } // Lift a name to the expr that evaluates to that name -fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> Gc { +fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { let e_str = cx.expr_str(sp, token::get_ident(ident)); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), @@ -508,17 +506,17 @@ fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> Gc { vec!(e_str)) } -fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> Gc { +fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext(name)); cx.expr_path(cx.path_global(sp, idents)) } -fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> Gc { +fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { let idents = vec!(id_ext("syntax"), id_ext("parse"), id_ext("token"), id_ext(name)); cx.expr_path(cx.path_global(sp, idents)) } -fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> Gc { +fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> P { let name = match bop { PLUS => "PLUS", MINUS => "MINUS", @@ -534,7 +532,7 @@ fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> Gc { mk_token_path(cx, sp, name) } -fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> Gc { +fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { match *tok { BINOP(binop) => { @@ -640,7 +638,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> Gc { } -fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec> { +fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec> { match *tt { ast::TTTok(sp, ref tok) => { let e_sp = cx.expr_ident(sp, id_ext("_sp")); @@ -680,7 +678,7 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec> { } fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) - -> Vec> { + -> Vec> { let mut ss = Vec::new(); for tt in tts.iter() { ss.push_all_move(mk_tt(cx, sp, tt)); @@ -689,7 +687,7 @@ fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) - -> (Gc, Gc) { + -> (P, P) { // NB: It appears that the main parser loses its mind if we consider // $foo as a TTNonterminal during the main parse, so we have to re-parse // under quote_depth > 0. This is silly and should go away; the _guess_ is @@ -757,8 +755,8 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) fn expand_wrapper(cx: &ExtCtxt, sp: Span, - cx_expr: Gc, - expr: Gc) -> Gc { + cx_expr: P, + expr: P) -> P { let uses = [ &["syntax", "ext", "quote", "rt"], ].iter().map(|path| { @@ -776,8 +774,8 @@ fn expand_wrapper(cx: &ExtCtxt, fn expand_parse_call(cx: &ExtCtxt, sp: Span, parse_method: &str, - arg_exprs: Vec>, - tts: &[ast::TokenTree]) -> Gc { + arg_exprs: Vec> , + tts: &[ast::TokenTree]) -> P { let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); let cfg_call = || cx.expr_method_call( diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 509d5bd442182..3006bcaf6f876 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -87,9 +87,9 @@ use parse::attr::ParserAttr; use parse::parser::{LifetimeAndTypesWithoutColons, Parser}; use parse::token::{Token, EOF, Nonterminal}; use parse::token; +use ptr::P; use std::rc::Rc; -use std::gc::GC; use std::collections::HashMap; /* to avoid costly uniqueness checks, we require that `MatchSeq` always has a @@ -451,7 +451,7 @@ pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { "meta" => token::NtMeta(p.parse_meta_item()), "tt" => { p.quote_depth += 1u; //but in theory, non-quoted tts might be useful - let res = token::NtTT(box(GC) p.parse_token_tree()); + let res = token::NtTT(P(p.parse_token_tree())); p.quote_depth -= 1u; res } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index d8f0eb32ad7bf..6c7bbb2384c12 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,8 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq}; -use ast::{TTDelim}; +use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim}; use ast; use codemap::{Span, Spanned, DUMMY_SP}; use ext::base::{ExtCtxt, MacResult, MacroDef}; @@ -24,11 +23,12 @@ use parse::token::{special_idents, gensym_ident}; use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF}; use parse::token; use print; +use ptr::P; + use util::small_vector::SmallVector; use std::cell::RefCell; use std::rc::Rc; -use std::gc::Gc; struct ParserAnyMacro<'a> { parser: RefCell>, @@ -58,17 +58,17 @@ impl<'a> ParserAnyMacro<'a> { } impl<'a> MacResult for ParserAnyMacro<'a> { - fn make_expr(&self) -> Option> { + fn make_expr(self: Box>) -> Option> { let ret = self.parser.borrow_mut().parse_expr(); self.ensure_complete_parse(true); Some(ret) } - fn make_pat(&self) -> Option> { + fn make_pat(self: Box>) -> Option> { let ret = self.parser.borrow_mut().parse_pat(); self.ensure_complete_parse(false); Some(ret) } - fn make_items(&self) -> Option>> { + fn make_items(self: Box>) -> Option>> { let mut ret = SmallVector::zero(); loop { let mut parser = self.parser.borrow_mut(); @@ -84,7 +84,7 @@ impl<'a> MacResult for ParserAnyMacro<'a> { Some(ret) } - fn make_methods(&self) -> Option>> { + fn make_methods(self: Box>) -> Option>> { let mut ret = SmallVector::zero(); loop { let mut parser = self.parser.borrow_mut(); @@ -97,7 +97,7 @@ impl<'a> MacResult for ParserAnyMacro<'a> { Some(ret) } - fn make_stmt(&self) -> Option> { + fn make_stmt(self: Box>) -> Option> { let attrs = self.parser.borrow_mut().parse_outer_attributes(); let ret = self.parser.borrow_mut().parse_stmt(attrs); self.ensure_complete_parse(true); @@ -127,11 +127,11 @@ impl TTMacroExpander for MacroRulesMacroExpander { } struct MacroRulesDefiner { - def: RefCell> + def: Option } impl MacResult for MacroRulesDefiner { - fn make_def(&self) -> Option { - Some(self.def.borrow_mut().take().expect("MacroRulesDefiner expanded twice")) + fn make_def(&mut self) -> Option { + Some(self.def.take().expect("empty MacroRulesDefiner")) } } @@ -170,8 +170,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, Success(named_matches) => { let rhs = match *rhses[i] { // okay, what's your transcriber? - MatchedNonterminal(NtTT(tt)) => { - match *tt { + MatchedNonterminal(NtTT(ref tt)) => { + match **tt { // cut off delimiters; don't parse 'em TTDelim(ref tts) => { (*tts).slice(1u,(*tts).len()-1u) @@ -269,9 +269,9 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt, }; box MacroRulesDefiner { - def: RefCell::new(Some(MacroDef { + def: Some(MacroDef { name: token::get_ident(name).to_string(), ext: NormalTT(exp, Some(sp)) - })) + }) } as Box } diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 30b7317fa56f1..e63954c36806e 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -23,11 +23,35 @@ use ast; use ast_util; use codemap::{respan, Span, Spanned}; use parse::token; +use ptr::P; use owned_slice::OwnedSlice; use util::small_vector::SmallVector; use std::rc::Rc; -use std::gc::{Gc, GC}; + +// This could have a better place to live. +pub trait MoveMap { + fn move_map(self, f: |T| -> T) -> Self; +} + +impl MoveMap for Vec { + fn move_map(mut self, f: |T| -> T) -> Vec { + use std::{mem, ptr}; + for p in self.mut_iter() { + unsafe { + // FIXME(#5016) this shouldn't need to zero to be safe. + mem::move_val_init(p, f(ptr::read_and_zero(p))); + } + } + self + } +} + +impl MoveMap for OwnedSlice { + fn move_map(self, f: |T| -> T) -> OwnedSlice { + OwnedSlice::from_vec(self.into_vec().move_map(f)) + } +} pub trait Folder { // Any additions to this trait should happen in form @@ -42,91 +66,91 @@ pub trait Folder { noop_fold_crate(c, self) } - fn fold_meta_items(&mut self, meta_items: &[Gc]) -> Vec> { + fn fold_meta_items(&mut self, meta_items: Vec>) -> Vec> { noop_fold_meta_items(meta_items, self) } - fn fold_meta_item(&mut self, meta_item: &MetaItem) -> MetaItem { + fn fold_meta_item(&mut self, meta_item: P) -> P { noop_fold_meta_item(meta_item, self) } - fn fold_view_path(&mut self, view_path: Gc) -> Gc { + fn fold_view_path(&mut self, view_path: P) -> P { noop_fold_view_path(view_path, self) } - fn fold_view_item(&mut self, vi: &ViewItem) -> ViewItem { + fn fold_view_item(&mut self, vi: ViewItem) -> ViewItem { noop_fold_view_item(vi, self) } - fn fold_foreign_item(&mut self, ni: Gc) -> Gc { - noop_fold_foreign_item(&*ni, self) + fn fold_foreign_item(&mut self, ni: P) -> P { + noop_fold_foreign_item(ni, self) } - fn fold_item(&mut self, i: Gc) -> SmallVector> { - noop_fold_item(&*i, self) + fn fold_item(&mut self, i: P) -> SmallVector> { + noop_fold_item(i, self) } - fn fold_item_simple(&mut self, i: &Item) -> Item { + fn fold_item_simple(&mut self, i: Item) -> Item { noop_fold_item_simple(i, self) } - fn fold_struct_field(&mut self, sf: &StructField) -> StructField { + fn fold_struct_field(&mut self, sf: StructField) -> StructField { noop_fold_struct_field(sf, self) } - fn fold_item_underscore(&mut self, i: &Item_) -> Item_ { + fn fold_item_underscore(&mut self, i: Item_) -> Item_ { noop_fold_item_underscore(i, self) } - fn fold_fn_decl(&mut self, d: &FnDecl) -> P { + fn fold_fn_decl(&mut self, d: P) -> P { noop_fold_fn_decl(d, self) } - fn fold_type_method(&mut self, m: &TypeMethod) -> TypeMethod { + fn fold_type_method(&mut self, m: TypeMethod) -> TypeMethod { noop_fold_type_method(m, self) } - fn fold_method(&mut self, m: Gc) -> SmallVector> { - noop_fold_method(&*m, self) + fn fold_method(&mut self, m: P) -> SmallVector> { + noop_fold_method(m, self) } fn fold_block(&mut self, b: P) -> P { noop_fold_block(b, self) } - fn fold_stmt(&mut self, s: &Stmt) -> SmallVector> { - noop_fold_stmt(s, self) + fn fold_stmt(&mut self, s: P) -> SmallVector> { + s.and_then(|s| noop_fold_stmt(s, self)) } - fn fold_arm(&mut self, a: &Arm) -> Arm { + fn fold_arm(&mut self, a: Arm) -> Arm { noop_fold_arm(a, self) } - fn fold_pat(&mut self, p: Gc) -> Gc { + fn fold_pat(&mut self, p: P) -> P { noop_fold_pat(p, self) } - fn fold_decl(&mut self, d: Gc) -> SmallVector> { + fn fold_decl(&mut self, d: P) -> SmallVector> { noop_fold_decl(d, self) } - fn fold_expr(&mut self, e: Gc) -> Gc { - noop_fold_expr(e, self) + fn fold_expr(&mut self, e: P) -> P { + e.map(|e| noop_fold_expr(e, self)) } fn fold_ty(&mut self, t: P) -> P { noop_fold_ty(t, self) } - fn fold_mod(&mut self, m: &Mod) -> Mod { + fn fold_mod(&mut self, m: Mod) -> Mod { noop_fold_mod(m, self) } - fn fold_foreign_mod(&mut self, nm: &ForeignMod) -> ForeignMod { + fn fold_foreign_mod(&mut self, nm: ForeignMod) -> ForeignMod { noop_fold_foreign_mod(nm, self) } - fn fold_variant(&mut self, v: &Variant) -> P { + fn fold_variant(&mut self, v: P) -> P { noop_fold_variant(v, self) } @@ -138,15 +162,15 @@ pub trait Folder { noop_fold_uint(i, self) } - fn fold_path(&mut self, p: &Path) -> Path { + fn fold_path(&mut self, p: Path) -> Path { noop_fold_path(p, self) } - fn fold_local(&mut self, l: Gc) -> Gc { + fn fold_local(&mut self, l: P) -> P { noop_fold_local(l, self) } - fn fold_mac(&mut self, _macro: &Mac) -> Mac { + fn fold_mac(&mut self, _macro: Mac) -> Mac { fail!("fold_mac disabled by default"); // NB: see note about macros above. // if you really want a folder that @@ -155,19 +179,19 @@ pub trait Folder { // fold::noop_fold_mac(_macro, self) } - fn fold_explicit_self(&mut self, es: &ExplicitSelf) -> ExplicitSelf { + fn fold_explicit_self(&mut self, es: ExplicitSelf) -> ExplicitSelf { noop_fold_explicit_self(es, self) } - fn fold_explicit_self_underscore(&mut self, es: &ExplicitSelf_) -> ExplicitSelf_ { + fn fold_explicit_self_underscore(&mut self, es: ExplicitSelf_) -> ExplicitSelf_ { noop_fold_explicit_self_underscore(es, self) } - fn fold_lifetime(&mut self, l: &Lifetime) -> Lifetime { + fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime { noop_fold_lifetime(l, self) } - fn fold_lifetime_def(&mut self, l: &LifetimeDef) -> LifetimeDef { + fn fold_lifetime_def(&mut self, l: LifetimeDef) -> LifetimeDef { noop_fold_lifetime_def(l, self) } @@ -175,35 +199,35 @@ pub trait Folder { noop_fold_attribute(at, self) } - fn fold_arg(&mut self, a: &Arg) -> Arg { + fn fold_arg(&mut self, a: Arg) -> Arg { noop_fold_arg(a, self) } - fn fold_generics(&mut self, generics: &Generics) -> Generics { + fn fold_generics(&mut self, generics: Generics) -> Generics { noop_fold_generics(generics, self) } - fn fold_trait_ref(&mut self, p: &TraitRef) -> TraitRef { + fn fold_trait_ref(&mut self, p: TraitRef) -> TraitRef { noop_fold_trait_ref(p, self) } - fn fold_struct_def(&mut self, struct_def: Gc) -> Gc { + fn fold_struct_def(&mut self, struct_def: P) -> P { noop_fold_struct_def(struct_def, self) } - fn fold_lifetimes(&mut self, lts: &[Lifetime]) -> Vec { + fn fold_lifetimes(&mut self, lts: Vec) -> Vec { noop_fold_lifetimes(lts, self) } - fn fold_lifetime_defs(&mut self, lts: &[LifetimeDef]) -> Vec { + fn fold_lifetime_defs(&mut self, lts: Vec) -> Vec { noop_fold_lifetime_defs(lts, self) } - fn fold_ty_param(&mut self, tp: &TyParam) -> TyParam { + fn fold_ty_param(&mut self, tp: TyParam) -> TyParam { noop_fold_ty_param(tp, self) } - fn fold_ty_params(&mut self, tps: &[TyParam]) -> OwnedSlice { + fn fold_ty_params(&mut self, tps: OwnedSlice) -> OwnedSlice { noop_fold_ty_params(tps, self) } @@ -215,37 +239,37 @@ pub trait Folder { noop_fold_tts(tts, self) } - fn fold_token(&mut self, t: &token::Token) -> token::Token { + fn fold_token(&mut self, t: token::Token) -> token::Token { noop_fold_token(t, self) } - fn fold_interpolated(&mut self, nt : &token::Nonterminal) -> token::Nonterminal { + fn fold_interpolated(&mut self, nt: token::Nonterminal) -> token::Nonterminal { noop_fold_interpolated(nt, self) } - fn fold_opt_lifetime(&mut self, o_lt: &Option) -> Option { + fn fold_opt_lifetime(&mut self, o_lt: Option) -> Option { noop_fold_opt_lifetime(o_lt, self) } - fn fold_variant_arg(&mut self, va: &VariantArg) -> VariantArg { + fn fold_variant_arg(&mut self, va: VariantArg) -> VariantArg { noop_fold_variant_arg(va, self) } - fn fold_opt_bounds(&mut self, b: &Option>) + fn fold_opt_bounds(&mut self, b: Option>) -> Option> { noop_fold_opt_bounds(b, self) } - fn fold_bounds(&mut self, b: &OwnedSlice) + fn fold_bounds(&mut self, b: OwnedSlice) -> OwnedSlice { noop_fold_bounds(b, self) } - fn fold_ty_param_bound(&mut self, tpb: &TyParamBound) -> TyParamBound { + fn fold_ty_param_bound(&mut self, tpb: TyParamBound) -> TyParamBound { noop_fold_ty_param_bound(tpb, self) } - fn fold_mt(&mut self, mt: &MutTy) -> MutTy { + fn fold_mt(&mut self, mt: MutTy) -> MutTy { noop_fold_mt(mt, self) } @@ -253,23 +277,16 @@ pub trait Folder { noop_fold_field(field, self) } - fn fold_where_clause(&mut self, where_clause: &WhereClause) + fn fold_where_clause(&mut self, where_clause: WhereClause) -> WhereClause { noop_fold_where_clause(where_clause, self) } - fn fold_where_predicate(&mut self, where_predicate: &WherePredicate) + fn fold_where_predicate(&mut self, where_predicate: WherePredicate) -> WherePredicate { noop_fold_where_predicate(where_predicate, self) } -// Helper methods: - - fn map_exprs(&self, f: |Gc| -> Gc, - es: &[Gc]) -> Vec> { - es.iter().map(|x| f(*x)).collect() - } - fn new_id(&mut self, i: NodeId) -> NodeId { i } @@ -279,190 +296,161 @@ pub trait Folder { } } -pub fn noop_fold_meta_items(meta_items: &[Gc], fld: &mut T) - -> Vec> { - meta_items.iter().map(|x| box (GC) fld.fold_meta_item(&**x)).collect() +pub fn noop_fold_meta_items(meta_items: Vec>, fld: &mut T) + -> Vec> { + meta_items.move_map(|x| fld.fold_meta_item(x)) } -pub fn noop_fold_view_path(view_path: Gc, fld: &mut T) -> Gc { - let inner_view_path = match view_path.node { - ViewPathSimple(ref ident, ref path, node_id) => { - let id = fld.new_id(node_id); - ViewPathSimple(ident.clone(), - fld.fold_path(path), - id) - } - ViewPathGlob(ref path, node_id) => { - let id = fld.new_id(node_id); - ViewPathGlob(fld.fold_path(path), id) - } - ViewPathList(ref path, ref path_list_idents, node_id) => { - let id = fld.new_id(node_id); - ViewPathList(fld.fold_path(path), - path_list_idents.iter().map(|path_list_ident| { - Spanned { - node: match path_list_ident.node { - PathListIdent { id, name } => - PathListIdent { - id: fld.new_id(id), - name: name.clone() - }, - PathListMod { id } => - PathListMod { id: fld.new_id(id) } - }, - span: fld.new_span(path_list_ident.span) - } - }).collect(), - id) - } - }; - box(GC) Spanned { - node: inner_view_path, - span: fld.new_span(view_path.span), - } +pub fn noop_fold_view_path(view_path: P, fld: &mut T) -> P { + view_path.map(|Spanned {node, span}| Spanned { + node: match node { + ViewPathSimple(ident, path, node_id) => { + let id = fld.new_id(node_id); + ViewPathSimple(ident, fld.fold_path(path), id) + } + ViewPathGlob(path, node_id) => { + let id = fld.new_id(node_id); + ViewPathGlob(fld.fold_path(path), id) + } + ViewPathList(path, path_list_idents, node_id) => { + let id = fld.new_id(node_id); + ViewPathList(fld.fold_path(path), + path_list_idents.move_map(|path_list_ident| { + Spanned { + node: match path_list_ident.node { + PathListIdent { id, name } => + PathListIdent { + id: fld.new_id(id), + name: name + }, + PathListMod { id } => + PathListMod { id: fld.new_id(id) } + }, + span: fld.new_span(path_list_ident.span) + } + }), + id) + } + }, + span: fld.new_span(span) + }) } -pub fn noop_fold_arm(a: &Arm, fld: &mut T) -> Arm { +pub fn noop_fold_arm(Arm {attrs, pats, guard, body}: Arm, fld: &mut T) -> Arm { Arm { - attrs: a.attrs.iter().map(|x| fld.fold_attribute(*x)).collect(), - pats: a.pats.iter().map(|x| fld.fold_pat(*x)).collect(), - guard: a.guard.map(|x| fld.fold_expr(x)), - body: fld.fold_expr(a.body), + attrs: attrs.move_map(|x| fld.fold_attribute(x)), + pats: pats.move_map(|x| fld.fold_pat(x)), + guard: guard.map(|x| fld.fold_expr(x)), + body: fld.fold_expr(body), } } -pub fn noop_fold_decl(d: Gc, fld: &mut T) -> SmallVector> { - let node = match d.node { - DeclLocal(ref l) => SmallVector::one(DeclLocal(fld.fold_local(*l))), - DeclItem(it) => { - fld.fold_item(it).move_iter().map(|i| DeclItem(i)).collect() - } - }; - - node.move_iter().map(|node| { - box(GC) Spanned { - node: node, - span: fld.new_span(d.span), - } - }).collect() +pub fn noop_fold_decl(d: P, fld: &mut T) -> SmallVector> { + d.and_then(|Spanned {node, span}| match node { + DeclLocal(l) => SmallVector::one(P(Spanned { + node: DeclLocal(fld.fold_local(l)), + span: fld.new_span(span) + })), + DeclItem(it) => fld.fold_item(it).move_iter().map(|i| P(Spanned { + node: DeclItem(i), + span: fld.new_span(span) + })).collect() + }) } pub fn noop_fold_ty(t: P, fld: &mut T) -> P { - let id = fld.new_id(t.id); - let node = match t.node { - TyNil | TyBot | TyInfer => t.node.clone(), - TyBox(ty) => TyBox(fld.fold_ty(ty)), - TyUniq(ty) => TyUniq(fld.fold_ty(ty)), - TyVec(ty) => TyVec(fld.fold_ty(ty)), - TyPtr(ref mt) => TyPtr(fld.fold_mt(mt)), - TyRptr(ref region, ref mt) => { - TyRptr(fld.fold_opt_lifetime(region), fld.fold_mt(mt)) - } - TyClosure(ref f) => { - TyClosure(box(GC) ClosureTy { - fn_style: f.fn_style, - onceness: f.onceness, - bounds: fld.fold_bounds(&f.bounds), - decl: fld.fold_fn_decl(&*f.decl), - lifetimes: fld.fold_lifetime_defs(f.lifetimes.as_slice()), - }) - } - TyProc(ref f) => { - TyProc(box(GC) ClosureTy { - fn_style: f.fn_style, - onceness: f.onceness, - bounds: fld.fold_bounds(&f.bounds), - decl: fld.fold_fn_decl(&*f.decl), - lifetimes: fld.fold_lifetime_defs(f.lifetimes.as_slice()), - }) - } - TyBareFn(ref f) => { - TyBareFn(box(GC) BareFnTy { - lifetimes: fld.fold_lifetime_defs(f.lifetimes.as_slice()), - fn_style: f.fn_style, - abi: f.abi, - decl: fld.fold_fn_decl(&*f.decl) - }) - } - TyUnboxedFn(ref f) => { - TyUnboxedFn(box(GC) UnboxedFnTy { - decl: fld.fold_fn_decl(&*f.decl), - kind: f.kind, - }) - } - TyTup(ref tys) => TyTup(tys.iter().map(|&ty| fld.fold_ty(ty)).collect()), - TyParen(ref ty) => TyParen(fld.fold_ty(*ty)), - TyPath(ref path, ref bounds, id) => { - let id = fld.new_id(id); - TyPath(fld.fold_path(path), - fld.fold_opt_bounds(bounds), - id) - } - TyFixedLengthVec(ty, e) => { - TyFixedLengthVec(fld.fold_ty(ty), fld.fold_expr(e)) - } - TyTypeof(expr) => TyTypeof(fld.fold_expr(expr)), - }; - P(Ty { - id: id, - span: fld.new_span(t.span), - node: node, + t.map(|Ty {id, node, span}| Ty { + id: fld.new_id(id), + node: match node { + TyNil | TyBot | TyInfer => node, + TyBox(ty) => TyBox(fld.fold_ty(ty)), + TyUniq(ty) => TyUniq(fld.fold_ty(ty)), + TyVec(ty) => TyVec(fld.fold_ty(ty)), + TyPtr(mt) => TyPtr(fld.fold_mt(mt)), + TyRptr(region, mt) => { + TyRptr(fld.fold_opt_lifetime(region), fld.fold_mt(mt)) + } + TyClosure(f) => { + TyClosure(f.map(|ClosureTy {fn_style, onceness, bounds, decl, lifetimes}| { + ClosureTy { + fn_style: fn_style, + onceness: onceness, + bounds: fld.fold_bounds(bounds), + decl: fld.fold_fn_decl(decl), + lifetimes: fld.fold_lifetime_defs(lifetimes) + } + })) + } + TyProc(f) => { + TyProc(f.map(|ClosureTy {fn_style, onceness, bounds, decl, lifetimes}| { + ClosureTy { + fn_style: fn_style, + onceness: onceness, + bounds: fld.fold_bounds(bounds), + decl: fld.fold_fn_decl(decl), + lifetimes: fld.fold_lifetime_defs(lifetimes) + } + })) + } + TyBareFn(f) => { + TyBareFn(f.map(|BareFnTy {lifetimes, fn_style, abi, decl}| BareFnTy { + lifetimes: fld.fold_lifetime_defs(lifetimes), + fn_style: fn_style, + abi: abi, + decl: fld.fold_fn_decl(decl) + })) + } + TyUnboxedFn(f) => { + TyUnboxedFn(f.map(|UnboxedFnTy {decl, kind}| UnboxedFnTy { + decl: fld.fold_fn_decl(decl), + kind: kind, + })) + } + TyTup(tys) => TyTup(tys.move_map(|ty| fld.fold_ty(ty))), + TyParen(ty) => TyParen(fld.fold_ty(ty)), + TyPath(path, bounds, id) => { + let id = fld.new_id(id); + TyPath(fld.fold_path(path), + fld.fold_opt_bounds(bounds), + id) + } + TyFixedLengthVec(ty, e) => { + TyFixedLengthVec(fld.fold_ty(ty), fld.fold_expr(e)) + } + TyTypeof(expr) => TyTypeof(fld.fold_expr(expr)) + }, + span: fld.new_span(span) }) } -pub fn noop_fold_foreign_mod(nm: &ForeignMod, fld: &mut T) -> ForeignMod { - ast::ForeignMod { - abi: nm.abi, - view_items: nm.view_items - .iter() - .map(|x| fld.fold_view_item(x)) - .collect(), - items: nm.items - .iter() - .map(|x| fld.fold_foreign_item(*x)) - .collect(), +pub fn noop_fold_foreign_mod(ForeignMod {abi, view_items, items}: ForeignMod, + fld: &mut T) -> ForeignMod { + ForeignMod { + abi: abi, + view_items: view_items.move_map(|x| fld.fold_view_item(x)), + items: items.move_map(|x| fld.fold_foreign_item(x)), } } -pub fn noop_fold_variant(v: &Variant, fld: &mut T) -> P { - let id = fld.new_id(v.node.id); - let kind; - match v.node.kind { - TupleVariantKind(ref variant_args) => { - kind = TupleVariantKind(variant_args.iter().map(|x| - fld.fold_variant_arg(x)).collect()) - } - StructVariantKind(ref struct_def) => { - kind = StructVariantKind(box(GC) ast::StructDef { - fields: struct_def.fields.iter() - .map(|f| fld.fold_struct_field(f)).collect(), - ctor_id: struct_def.ctor_id.map(|c| fld.new_id(c)), - super_struct: match struct_def.super_struct { - Some(t) => Some(fld.fold_ty(t)), - None => None - }, - is_virtual: struct_def.is_virtual, - }) - } - } - - let attrs = v.node.attrs.iter().map(|x| fld.fold_attribute(*x)).collect(); - - let de = match v.node.disr_expr { - Some(e) => Some(fld.fold_expr(e)), - None => None - }; - let node = ast::Variant_ { - name: v.node.name, - attrs: attrs, - kind: kind, - id: id, - disr_expr: de, - vis: v.node.vis, - }; - P(Spanned { - node: node, - span: fld.new_span(v.span), +pub fn noop_fold_variant(v: P, fld: &mut T) -> P { + v.map(|Spanned {node: Variant_ {id, name, attrs, kind, disr_expr, vis}, span}| Spanned { + node: Variant_ { + id: fld.new_id(id), + name: name, + attrs: attrs.move_map(|x| fld.fold_attribute(x)), + kind: match kind { + TupleVariantKind(variant_args) => { + TupleVariantKind(variant_args.move_map(|x| + fld.fold_variant_arg(x))) + } + StructVariantKind(struct_def) => { + StructVariantKind(fld.fold_struct_def(struct_def)) + } + }, + disr_expr: disr_expr.map(|e| fld.fold_expr(e)), + vis: vis, + }, + span: fld.new_span(span), }) } @@ -474,109 +462,105 @@ pub fn noop_fold_uint(i: uint, _: &mut T) -> uint { i } -pub fn noop_fold_path(p: &Path, fld: &mut T) -> Path { - ast::Path { - span: fld.new_span(p.span), - global: p.global, - segments: p.segments.iter().map(|segment| ast::PathSegment { - identifier: fld.fold_ident(segment.identifier), - lifetimes: segment.lifetimes.iter().map(|l| fld.fold_lifetime(l)).collect(), - types: segment.types.iter().map(|&typ| fld.fold_ty(typ)).collect(), - }).collect() +pub fn noop_fold_path(Path {global, segments, span}: Path, fld: &mut T) -> Path { + Path { + global: global, + segments: segments.move_map(|PathSegment {identifier, lifetimes, types}| PathSegment { + identifier: fld.fold_ident(identifier), + lifetimes: fld.fold_lifetimes(lifetimes), + types: types.move_map(|typ| fld.fold_ty(typ)), + }), + span: fld.new_span(span) } } -pub fn noop_fold_local(l: Gc, fld: &mut T) -> Gc { - let id = fld.new_id(l.id); // Needs to be first, for ast_map. - box(GC) Local { - id: id, - ty: fld.fold_ty(l.ty), - pat: fld.fold_pat(l.pat), - init: l.init.map(|e| fld.fold_expr(e)), - span: fld.new_span(l.span), - source: l.source, - } +pub fn noop_fold_local(l: P, fld: &mut T) -> P { + l.map(|Local {id, pat, ty, init, source, span}| Local { + id: fld.new_id(id), + ty: fld.fold_ty(ty), + pat: fld.fold_pat(pat), + init: init.map(|e| fld.fold_expr(e)), + source: source, + span: fld.new_span(span) + }) } pub fn noop_fold_attribute(at: Attribute, fld: &mut T) -> Attribute { + let Spanned {node: Attribute_ {id, style, value, is_sugared_doc}, span} = at; Spanned { - span: fld.new_span(at.span), - node: ast::Attribute_ { - id: at.node.id, - style: at.node.style, - value: box (GC) fld.fold_meta_item(&*at.node.value), - is_sugared_doc: at.node.is_sugared_doc - } + node: Attribute_ { + id: id, + style: style, + value: fld.fold_meta_item(value), + is_sugared_doc: is_sugared_doc + }, + span: fld.new_span(span) } } -pub fn noop_fold_explicit_self_underscore(es: &ExplicitSelf_, fld: &mut T) +pub fn noop_fold_explicit_self_underscore(es: ExplicitSelf_, fld: &mut T) -> ExplicitSelf_ { - match *es { - SelfStatic | SelfValue(_) => *es, - SelfRegion(ref lifetime, m, id) => { - SelfRegion(fld.fold_opt_lifetime(lifetime), m, id) + match es { + SelfStatic | SelfValue(_) => es, + SelfRegion(lifetime, m, ident) => { + SelfRegion(fld.fold_opt_lifetime(lifetime), m, ident) + } + SelfExplicit(typ, ident) => { + SelfExplicit(fld.fold_ty(typ), ident) } - SelfExplicit(ref typ, id) => SelfExplicit(fld.fold_ty(*typ), id), } } -pub fn noop_fold_explicit_self(es: &ExplicitSelf, fld: &mut T) -> ExplicitSelf { +pub fn noop_fold_explicit_self(Spanned {span, node}: ExplicitSelf, fld: &mut T) + -> ExplicitSelf { Spanned { - span: fld.new_span(es.span), - node: fld.fold_explicit_self_underscore(&es.node) + node: fld.fold_explicit_self_underscore(node), + span: fld.new_span(span) } } -pub fn noop_fold_mac(macro: &Mac, fld: &mut T) -> Mac { +pub fn noop_fold_mac(Spanned {node, span}: Mac, fld: &mut T) -> Mac { Spanned { - node: match macro.node { - MacInvocTT(ref p, ref tts, ctxt) => { - MacInvocTT(fld.fold_path(p), - fld.fold_tts(tts.as_slice()), - ctxt) + node: match node { + MacInvocTT(p, tts, ctxt) => { + MacInvocTT(fld.fold_path(p), fld.fold_tts(tts.as_slice()), ctxt) } }, - span: fld.new_span(macro.span) + span: fld.new_span(span) } } -pub fn noop_fold_meta_item(mi: &MetaItem, fld: &mut T) -> MetaItem { - Spanned { - node: - match mi.node { - MetaWord(ref id) => MetaWord((*id).clone()), - MetaList(ref id, ref mis) => { - MetaList((*id).clone(), - mis.iter() - .map(|e| box (GC) fld.fold_meta_item(&**e)).collect()) - } - MetaNameValue(ref id, ref s) => { - MetaNameValue((*id).clone(), (*s).clone()) - } - }, - span: fld.new_span(mi.span) } +pub fn noop_fold_meta_item(mi: P, fld: &mut T) -> P { + mi.map(|Spanned {node, span}| Spanned { + node: match node { + MetaWord(id) => MetaWord(id), + MetaList(id, mis) => { + MetaList(id, mis.move_map(|e| fld.fold_meta_item(e))) + } + MetaNameValue(id, s) => MetaNameValue(id, s) + }, + span: fld.new_span(span) + }) } -pub fn noop_fold_arg(a: &Arg, fld: &mut T) -> Arg { - let id = fld.new_id(a.id); // Needs to be first, for ast_map. +pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { Arg { - id: id, - ty: fld.fold_ty(a.ty), - pat: fld.fold_pat(a.pat), + id: fld.new_id(id), + pat: fld.fold_pat(pat), + ty: fld.fold_ty(ty) } } pub fn noop_fold_tt(tt: &TokenTree, fld: &mut T) -> TokenTree { match *tt { TTTok(span, ref tok) => - TTTok(span, fld.fold_token(tok)), + TTTok(span, fld.fold_token(tok.clone())), TTDelim(ref tts) => TTDelim(Rc::new(fld.fold_tts(tts.as_slice()))), TTSeq(span, ref pattern, ref sep, is_optional) => TTSeq(span, Rc::new(fld.fold_tts(pattern.as_slice())), - sep.as_ref().map(|tok| fld.fold_token(tok)), + sep.clone().map(|tok| fld.fold_token(tok)), is_optional), TTNonterminal(sp,ref ident) => TTNonterminal(sp,fld.fold_ident(*ident)) @@ -588,14 +572,14 @@ pub fn noop_fold_tts(tts: &[TokenTree], fld: &mut T) -> Vec(t: &token::Token, fld: &mut T) -> token::Token { - match *t { +pub fn noop_fold_token(t: token::Token, fld: &mut T) -> token::Token { + match t { token::IDENT(id, followed_by_colons) => { token::IDENT(fld.fold_ident(id), followed_by_colons) } token::LIFETIME(id) => token::LIFETIME(fld.fold_ident(id)), - token::INTERPOLATED(ref nt) => token::INTERPOLATED(fld.fold_interpolated(nt)), - _ => (*t).clone() + token::INTERPOLATED(nt) => token::INTERPOLATED(fld.fold_interpolated(nt)), + _ => t } } @@ -619,9 +603,9 @@ pub fn noop_fold_token(t: &token::Token, fld: &mut T) -> token::Token // BTW, design choice: I considered just changing the type of, e.g., NtItem to contain // multiple items, but decided against it when I looked at parse_item_or_view_item and // tried to figure out what I would do with multiple items there.... -pub fn noop_fold_interpolated(nt : &token::Nonterminal, fld: &mut T) +pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) -> token::Nonterminal { - match *nt { + match nt { token::NtItem(item) => token::NtItem(fld.fold_item(item) // this is probably okay, because the only folds likely @@ -630,7 +614,7 @@ pub fn noop_fold_interpolated(nt : &token::Nonterminal, fld: &mut T) .expect_one("expected fold to produce exactly one item")), token::NtBlock(block) => token::NtBlock(fld.fold_block(block)), token::NtStmt(stmt) => - token::NtStmt(fld.fold_stmt(&*stmt) + token::NtStmt(fld.fold_stmt(stmt) // this is probably okay, because the only folds likely // to peek inside interpolated nodes will be renamings/markings, // which map single items to single items @@ -638,403 +622,373 @@ pub fn noop_fold_interpolated(nt : &token::Nonterminal, fld: &mut T) token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)), token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), - token::NtIdent(ref id, is_mod_name) => - token::NtIdent(box fld.fold_ident(**id),is_mod_name), - token::NtMeta(meta_item) => token::NtMeta(box (GC) fld.fold_meta_item(&*meta_item)), - token::NtPath(ref path) => token::NtPath(box fld.fold_path(&**path)), - token::NtTT(tt) => token::NtTT(box (GC) fld.fold_tt(&*tt)), + token::NtIdent(box id, is_mod_name) => + token::NtIdent(box fld.fold_ident(id), is_mod_name), + token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), + token::NtPath(box path) => token::NtPath(box fld.fold_path(path)), + token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&*tt))), // it looks to me like we can leave out the matchers: token::NtMatchers(matchers) - _ => (*nt).clone() + _ => nt } } -pub fn noop_fold_fn_decl(decl: &FnDecl, fld: &mut T) -> P { - P(FnDecl { - inputs: decl.inputs.iter().map(|x| fld.fold_arg(x)).collect(), // bad copy - output: fld.fold_ty(decl.output), - cf: decl.cf, - variadic: decl.variadic +pub fn noop_fold_fn_decl(decl: P, fld: &mut T) -> P { + decl.map(|FnDecl {inputs, output, cf, variadic}| FnDecl { + inputs: inputs.move_map(|x| fld.fold_arg(x)), + output: fld.fold_ty(output), + cf: cf, + variadic: variadic }) } -pub fn noop_fold_ty_param_bound(tpb: &TyParamBound, fld: &mut T) +pub fn noop_fold_ty_param_bound(tpb: TyParamBound, fld: &mut T) -> TyParamBound { - match *tpb { - TraitTyParamBound(ref ty) => TraitTyParamBound(fld.fold_trait_ref(ty)), - RegionTyParamBound(ref lifetime) => RegionTyParamBound(fld.fold_lifetime(lifetime)), - UnboxedFnTyParamBound(ref unboxed_function_type) => { + match tpb { + TraitTyParamBound(ty) => TraitTyParamBound(fld.fold_trait_ref(ty)), + RegionTyParamBound(lifetime) => RegionTyParamBound(fld.fold_lifetime(lifetime)), + UnboxedFnTyParamBound(UnboxedFnTy {decl, kind}) => { UnboxedFnTyParamBound(UnboxedFnTy { - decl: fld.fold_fn_decl(&*unboxed_function_type.decl), - kind: unboxed_function_type.kind, + decl: fld.fold_fn_decl(decl), + kind: kind, }) } } } -pub fn noop_fold_ty_param(tp: &TyParam, fld: &mut T) -> TyParam { - let id = fld.new_id(tp.id); +pub fn noop_fold_ty_param(tp: TyParam, fld: &mut T) -> TyParam { + let TyParam {id, ident, bounds, unbound, default, span} = tp; TyParam { - ident: tp.ident, - id: id, - bounds: fld.fold_bounds(&tp.bounds), - unbound: tp.unbound.as_ref().map(|x| fld.fold_ty_param_bound(x)), - default: tp.default.map(|x| fld.fold_ty(x)), - span: tp.span + id: fld.new_id(id), + ident: ident, + bounds: fld.fold_bounds(bounds), + unbound: unbound.map(|x| fld.fold_ty_param_bound(x)), + default: default.map(|x| fld.fold_ty(x)), + span: span } } -pub fn noop_fold_ty_params(tps: &[TyParam], fld: &mut T) +pub fn noop_fold_ty_params(tps: OwnedSlice, fld: &mut T) -> OwnedSlice { - tps.iter().map(|tp| fld.fold_ty_param(tp)).collect() + tps.move_map(|tp| fld.fold_ty_param(tp)) } -pub fn noop_fold_lifetime(l: &Lifetime, fld: &mut T) -> Lifetime { - let id = fld.new_id(l.id); +pub fn noop_fold_lifetime(l: Lifetime, fld: &mut T) -> Lifetime { Lifetime { - id: id, - span: fld.new_span(l.span), - name: l.name + id: fld.new_id(l.id), + name: l.name, + span: fld.new_span(l.span) } } -pub fn noop_fold_lifetime_def(l: &LifetimeDef, fld: &mut T) - -> LifetimeDef -{ +pub fn noop_fold_lifetime_def(l: LifetimeDef, fld: &mut T) + -> LifetimeDef { LifetimeDef { - lifetime: fld.fold_lifetime(&l.lifetime), - bounds: fld.fold_lifetimes(l.bounds.as_slice()), + lifetime: fld.fold_lifetime(l.lifetime), + bounds: fld.fold_lifetimes(l.bounds), } } -pub fn noop_fold_lifetimes(lts: &[Lifetime], fld: &mut T) -> Vec { - lts.iter().map(|l| fld.fold_lifetime(l)).collect() +pub fn noop_fold_lifetimes(lts: Vec, fld: &mut T) -> Vec { + lts.move_map(|l| fld.fold_lifetime(l)) } -pub fn noop_fold_lifetime_defs(lts: &[LifetimeDef], fld: &mut T) -> Vec { - lts.iter().map(|l| fld.fold_lifetime_def(l)).collect() +pub fn noop_fold_lifetime_defs(lts: Vec, fld: &mut T) + -> Vec { + lts.move_map(|l| fld.fold_lifetime_def(l)) } -pub fn noop_fold_opt_lifetime(o_lt: &Option, fld: &mut T) - -> Option { - o_lt.as_ref().map(|lt| fld.fold_lifetime(lt)) +pub fn noop_fold_opt_lifetime(o_lt: Option, fld: &mut T) + -> Option { + o_lt.map(|lt| fld.fold_lifetime(lt)) } -pub fn noop_fold_generics(generics: &Generics, fld: &mut T) -> Generics { +pub fn noop_fold_generics(Generics {ty_params, lifetimes, where_clause}: Generics, + fld: &mut T) -> Generics { Generics { - ty_params: fld.fold_ty_params(generics.ty_params.as_slice()), - lifetimes: fld.fold_lifetime_defs(generics.lifetimes.as_slice()), - where_clause: fld.fold_where_clause(&generics.where_clause), + ty_params: fld.fold_ty_params(ty_params), + lifetimes: fld.fold_lifetime_defs(lifetimes), + where_clause: fld.fold_where_clause(where_clause), } } pub fn noop_fold_where_clause( - where_clause: &WhereClause, + WhereClause {id, predicates}: WhereClause, fld: &mut T) -> WhereClause { WhereClause { - id: fld.new_id(where_clause.id), - predicates: where_clause.predicates.iter().map(|predicate| { + id: fld.new_id(id), + predicates: predicates.move_map(|predicate| { fld.fold_where_predicate(predicate) - }).collect(), + }) } } pub fn noop_fold_where_predicate( - predicate: &WherePredicate, + WherePredicate {id, ident, bounds, span}: WherePredicate, fld: &mut T) -> WherePredicate { WherePredicate { - id: fld.new_id(predicate.id), - span: fld.new_span(predicate.span), - ident: fld.fold_ident(predicate.ident), - bounds: predicate.bounds.map(|x| { - fld.fold_ty_param_bound(x) - }), + id: fld.new_id(id), + ident: fld.fold_ident(ident), + bounds: bounds.move_map(|x| fld.fold_ty_param_bound(x)), + span: fld.new_span(span) } } -pub fn noop_fold_struct_def(struct_def: Gc, - fld: &mut T) -> Gc { - box(GC) ast::StructDef { - fields: struct_def.fields.iter().map(|f| fld.fold_struct_field(f)).collect(), - ctor_id: struct_def.ctor_id.map(|cid| fld.new_id(cid)), - super_struct: match struct_def.super_struct { - Some(t) => Some(fld.fold_ty(t)), - None => None - }, - is_virtual: struct_def.is_virtual, - } +pub fn noop_fold_struct_def(struct_def: P, fld: &mut T) -> P { + struct_def.map(|StructDef {fields, ctor_id, super_struct, is_virtual}| StructDef { + fields: fields.move_map(|f| fld.fold_struct_field(f)), + ctor_id: ctor_id.map(|cid| fld.new_id(cid)), + super_struct: super_struct.map(|t| fld.fold_ty(t)), + is_virtual: is_virtual + }) } -pub fn noop_fold_trait_ref(p: &TraitRef, fld: &mut T) -> TraitRef { - let id = fld.new_id(p.ref_id); - ast::TraitRef { - path: fld.fold_path(&p.path), - ref_id: id, +pub fn noop_fold_trait_ref(TraitRef {ref_id, path}: TraitRef, fld: &mut T) -> TraitRef { + TraitRef { + ref_id: fld.new_id(ref_id), + path: fld.fold_path(path), } } -pub fn noop_fold_struct_field(f: &StructField, fld: &mut T) -> StructField { - let id = fld.new_id(f.node.id); +pub fn noop_fold_struct_field(f: StructField, fld: &mut T) -> StructField { + let StructField {node: StructField_ {id, kind, ty, attrs}, span} = f; Spanned { - node: ast::StructField_ { - kind: f.node.kind, - id: id, - ty: fld.fold_ty(f.node.ty), - attrs: f.node.attrs.iter().map(|a| fld.fold_attribute(*a)).collect(), + node: StructField_ { + id: fld.new_id(id), + kind: kind, + ty: fld.fold_ty(ty), + attrs: attrs.move_map(|a| fld.fold_attribute(a)) }, - span: fld.new_span(f.span), + span: fld.new_span(span) } } -pub fn noop_fold_field(field: Field, folder: &mut T) -> Field { - ast::Field { - ident: respan(field.ident.span, folder.fold_ident(field.ident.node)), - expr: folder.fold_expr(field.expr), - span: folder.new_span(field.span), +pub fn noop_fold_field(Field {ident, expr, span}: Field, folder: &mut T) -> Field { + Field { + ident: respan(ident.span, folder.fold_ident(ident.node)), + expr: folder.fold_expr(expr), + span: folder.new_span(span) } } -pub fn noop_fold_mt(mt: &MutTy, folder: &mut T) -> MutTy { +pub fn noop_fold_mt(MutTy {ty, mutbl}: MutTy, folder: &mut T) -> MutTy { MutTy { - ty: folder.fold_ty(mt.ty), - mutbl: mt.mutbl, + ty: folder.fold_ty(ty), + mutbl: mutbl, } } -pub fn noop_fold_opt_bounds(b: &Option>, folder: &mut T) - -> Option> { - b.as_ref().map(|bounds| folder.fold_bounds(bounds)) +pub fn noop_fold_opt_bounds(b: Option>, folder: &mut T) + -> Option> { + b.map(|bounds| folder.fold_bounds(bounds)) } -fn noop_fold_bounds(bounds: &TyParamBounds, folder: &mut T) +fn noop_fold_bounds(bounds: TyParamBounds, folder: &mut T) -> TyParamBounds { - bounds.map(|bound| folder.fold_ty_param_bound(bound)) + bounds.move_map(|bound| folder.fold_ty_param_bound(bound)) } -pub fn noop_fold_variant_arg(va: &VariantArg, folder: &mut T) -> VariantArg { - let id = folder.new_id(va.id); - ast::VariantArg { - ty: folder.fold_ty(va.ty), - id: id, +fn noop_fold_variant_arg(VariantArg {id, ty}: VariantArg, folder: &mut T) + -> VariantArg { + VariantArg { + id: folder.new_id(id), + ty: folder.fold_ty(ty) } } -pub fn noop_fold_view_item(vi: &ViewItem, folder: &mut T) - -> ViewItem{ - let inner_view_item = match vi.node { - ViewItemExternCrate(ref ident, ref string, node_id) => { - ViewItemExternCrate(ident.clone(), - (*string).clone(), - folder.new_id(node_id)) - } - ViewItemUse(ref view_path) => { - ViewItemUse(folder.fold_view_path(*view_path)) - } - }; +pub fn noop_fold_view_item(ViewItem {node, attrs, vis, span}: ViewItem, + folder: &mut T) -> ViewItem { ViewItem { - node: inner_view_item, - attrs: vi.attrs.iter().map(|a| folder.fold_attribute(*a)).collect(), - vis: vi.vis, - span: folder.new_span(vi.span), + node: match node { + ViewItemExternCrate(ident, string, node_id) => { + ViewItemExternCrate(ident, string, + folder.new_id(node_id)) + } + ViewItemUse(view_path) => { + ViewItemUse(folder.fold_view_path(view_path)) + } + }, + attrs: attrs.move_map(|a| folder.fold_attribute(a)), + vis: vis, + span: folder.new_span(span) } } pub fn noop_fold_block(b: P, folder: &mut T) -> P { - let id = folder.new_id(b.id); // Needs to be first, for ast_map. - let view_items = b.view_items.iter().map(|x| folder.fold_view_item(x)).collect(); - let stmts = b.stmts.iter().flat_map(|s| folder.fold_stmt(&**s).move_iter()).collect(); - P(Block { - id: id, - view_items: view_items, - stmts: stmts, - expr: b.expr.map(|x| folder.fold_expr(x)), - rules: b.rules, - span: folder.new_span(b.span), + b.map(|Block {id, view_items, stmts, expr, rules, span}| Block { + id: folder.new_id(id), + view_items: view_items.move_map(|x| folder.fold_view_item(x)), + stmts: stmts.move_iter().flat_map(|s| folder.fold_stmt(s).move_iter()).collect(), + expr: expr.map(|x| folder.fold_expr(x)), + rules: rules, + span: folder.new_span(span), }) } -pub fn noop_fold_item_underscore(i: &Item_, folder: &mut T) -> Item_ { - match *i { +pub fn noop_fold_item_underscore(i: Item_, folder: &mut T) -> Item_ { + match i { ItemStatic(t, m, e) => { ItemStatic(folder.fold_ty(t), m, folder.fold_expr(e)) } - ItemFn(decl, fn_style, abi, ref generics, body) => { + ItemFn(decl, fn_style, abi, generics, body) => { ItemFn( - folder.fold_fn_decl(&*decl), + folder.fold_fn_decl(decl), fn_style, abi, folder.fold_generics(generics), folder.fold_block(body) ) } - ItemMod(ref m) => ItemMod(folder.fold_mod(m)), - ItemForeignMod(ref nm) => ItemForeignMod(folder.fold_foreign_mod(nm)), - ItemTy(t, ref generics) => { + ItemMod(m) => ItemMod(folder.fold_mod(m)), + ItemForeignMod(nm) => ItemForeignMod(folder.fold_foreign_mod(nm)), + ItemTy(t, generics) => { ItemTy(folder.fold_ty(t), folder.fold_generics(generics)) } - ItemEnum(ref enum_definition, ref generics) => { + ItemEnum(enum_definition, generics) => { ItemEnum( ast::EnumDef { - variants: enum_definition.variants.iter().map(|&x| { - folder.fold_variant(&*x) - }).collect(), + variants: enum_definition.variants.move_map(|x| folder.fold_variant(x)), }, folder.fold_generics(generics)) } - ItemStruct(ref struct_def, ref generics) => { - let struct_def = folder.fold_struct_def(*struct_def); + ItemStruct(struct_def, generics) => { + let struct_def = folder.fold_struct_def(struct_def); ItemStruct(struct_def, folder.fold_generics(generics)) } - ItemImpl(ref generics, ref ifce, ty, ref impl_items) => { + ItemImpl(generics, ifce, ty, impl_items) => { ItemImpl(folder.fold_generics(generics), - ifce.as_ref().map(|p| folder.fold_trait_ref(p)), + ifce.map(|p| folder.fold_trait_ref(p)), folder.fold_ty(ty), - impl_items.iter() - .flat_map(|impl_item| { - match *impl_item { - MethodImplItem(x) => { - folder.fold_method(x) - .move_iter() - .map(|x| MethodImplItem(x)) - } - } - }).collect() - ) + impl_items.move_iter().flat_map(|impl_item| match impl_item { + MethodImplItem(x) => { + folder.fold_method(x).move_iter().map(|x| MethodImplItem(x)) + } + }).collect()) } - ItemTrait(ref generics, ref unbound, ref bounds, ref methods) => { + ItemTrait(generics, unbound, bounds, methods) => { let bounds = folder.fold_bounds(bounds); - let methods = methods.iter().flat_map(|method| { - let r = match *method { - RequiredMethod(ref m) => { - SmallVector::one(RequiredMethod( - folder.fold_type_method(m))).move_iter() - } - ProvidedMethod(method) => { - // the awkward collect/iter idiom here is because - // even though an iter and a map satisfy the same trait bound, - // they're not actually the same type, so the method arms - // don't unify. - let methods : SmallVector = - folder.fold_method(method).move_iter() - .map(|m| ProvidedMethod(m)).collect(); - methods.move_iter() - } - }; - r + let methods = methods.move_iter().flat_map(|method| match method { + RequiredMethod(m) => { + SmallVector::one(RequiredMethod(folder.fold_type_method(m))).move_iter() + } + ProvidedMethod(method) => { + // the awkward collect/iter idiom here is because + // even though an iter and a map satisfy the same trait bound, + // they're not actually the same type, so the method arms + // don't unify. + let methods: SmallVector = + folder.fold_method(method).move_iter() + .map(|m| ProvidedMethod(m)).collect(); + methods.move_iter() + } }).collect(); ItemTrait(folder.fold_generics(generics), - unbound.clone(), + unbound, bounds, methods) } - ItemMac(ref m) => ItemMac(folder.fold_mac(m)), + ItemMac(m) => ItemMac(folder.fold_mac(m)), } } -pub fn noop_fold_type_method(m: &TypeMethod, fld: &mut T) -> TypeMethod { - let id = fld.new_id(m.id); // Needs to be first, for ast_map. +pub fn noop_fold_type_method(m: TypeMethod, fld: &mut T) -> TypeMethod { + let TypeMethod {id, ident, attrs, fn_style, abi, decl, generics, explicit_self, vis, span} = m; TypeMethod { - id: id, - ident: fld.fold_ident(m.ident), - attrs: m.attrs.iter().map(|a| fld.fold_attribute(*a)).collect(), - fn_style: m.fn_style, - abi: m.abi, - decl: fld.fold_fn_decl(&*m.decl), - generics: fld.fold_generics(&m.generics), - explicit_self: fld.fold_explicit_self(&m.explicit_self), - span: fld.new_span(m.span), - vis: m.vis, + id: fld.new_id(id), + ident: fld.fold_ident(ident), + attrs: attrs.move_map(|a| fld.fold_attribute(a)), + fn_style: fn_style, + abi: abi, + decl: fld.fold_fn_decl(decl), + generics: fld.fold_generics(generics), + explicit_self: fld.fold_explicit_self(explicit_self), + vis: vis, + span: fld.new_span(span) } } -pub fn noop_fold_mod(m: &Mod, folder: &mut T) -> Mod { - ast::Mod { - inner: folder.new_span(m.inner), - view_items: m.view_items - .iter() - .map(|x| folder.fold_view_item(x)).collect(), - items: m.items.iter().flat_map(|x| folder.fold_item(*x).move_iter()).collect(), +pub fn noop_fold_mod(Mod {inner, view_items, items}: Mod, folder: &mut T) -> Mod { + Mod { + inner: folder.new_span(inner), + view_items: view_items.move_map(|x| folder.fold_view_item(x)), + items: items.move_iter().flat_map(|x| folder.fold_item(x).move_iter()).collect(), } } -pub fn noop_fold_crate(c: Crate, folder: &mut T) -> Crate { +pub fn noop_fold_crate(Crate {module, attrs, config, exported_macros, span}: Crate, + folder: &mut T) -> Crate { Crate { - module: folder.fold_mod(&c.module), - attrs: c.attrs.iter().map(|x| folder.fold_attribute(*x)).collect(), - config: c.config.iter().map(|x| box (GC) folder.fold_meta_item(&**x)).collect(), - span: folder.new_span(c.span), - exported_macros: c.exported_macros + module: folder.fold_mod(module), + attrs: attrs.move_map(|x| folder.fold_attribute(x)), + config: folder.fold_meta_items(config), + exported_macros: exported_macros, + span: folder.new_span(span) } } // fold one item into possibly many items -pub fn noop_fold_item(i: &Item, - folder: &mut T) -> SmallVector> { - SmallVector::one(box(GC) folder.fold_item_simple(i)) +pub fn noop_fold_item(i: P, folder: &mut T) -> SmallVector> { + SmallVector::one(i.map(|i| folder.fold_item_simple(i))) } - // fold one item into exactly one item -pub fn noop_fold_item_simple(i: &Item, folder: &mut T) -> Item { - let id = folder.new_id(i.id); // Needs to be first, for ast_map. - let node = folder.fold_item_underscore(&i.node); +pub fn noop_fold_item_simple(Item {id, ident, attrs, node, vis, span}: Item, + folder: &mut T) -> Item { + let id = folder.new_id(id); + let node = folder.fold_item_underscore(node); let ident = match node { // The node may have changed, recompute the "pretty" impl name. - ItemImpl(_, ref maybe_trait, ty, _) => { - ast_util::impl_pretty_name(maybe_trait, &*ty) + ItemImpl(_, ref maybe_trait, ref ty, _) => { + ast_util::impl_pretty_name(maybe_trait, &**ty) } - _ => i.ident + _ => ident }; Item { id: id, ident: folder.fold_ident(ident), - attrs: i.attrs.iter().map(|e| folder.fold_attribute(*e)).collect(), + attrs: attrs.move_map(|e| folder.fold_attribute(e)), node: node, - vis: i.vis, - span: folder.new_span(i.span) + vis: vis, + span: folder.new_span(span) } } -pub fn noop_fold_foreign_item(ni: &ForeignItem, - folder: &mut T) -> Gc { - let id = folder.new_id(ni.id); // Needs to be first, for ast_map. - box(GC) ForeignItem { - id: id, - ident: folder.fold_ident(ni.ident), - attrs: ni.attrs.iter().map(|x| folder.fold_attribute(*x)).collect(), - node: match ni.node { - ForeignItemFn(ref fdec, ref generics) => { - ForeignItemFn(P(FnDecl { - inputs: fdec.inputs.iter().map(|a| folder.fold_arg(a)).collect(), - output: folder.fold_ty(fdec.output), - cf: fdec.cf, - variadic: fdec.variadic +pub fn noop_fold_foreign_item(ni: P, folder: &mut T) -> P { + ni.map(|ForeignItem {id, ident, attrs, node, span, vis}| ForeignItem { + id: folder.new_id(id), + ident: folder.fold_ident(ident), + attrs: attrs.move_map(|x| folder.fold_attribute(x)), + node: match node { + ForeignItemFn(fdec, generics) => { + ForeignItemFn(fdec.map(|FnDecl {inputs, output, cf, variadic}| FnDecl { + inputs: inputs.move_map(|a| folder.fold_arg(a)), + output: folder.fold_ty(output), + cf: cf, + variadic: variadic }), folder.fold_generics(generics)) } ForeignItemStatic(t, m) => { ForeignItemStatic(folder.fold_ty(t), m) } }, - span: folder.new_span(ni.span), - vis: ni.vis, - } + vis: vis, + span: folder.new_span(span) + }) } // Default fold over a method. // Invariant: produces exactly one method. -pub fn noop_fold_method(m: &Method, folder: &mut T) -> SmallVector> { - let id = folder.new_id(m.id); // Needs to be first, for ast_map. - SmallVector::one(box(GC) Method { - attrs: m.attrs.iter().map(|a| folder.fold_attribute(*a)).collect(), - id: id, - span: folder.new_span(m.span), - node: match m.node { +pub fn noop_fold_method(m: P, folder: &mut T) -> SmallVector> { + SmallVector::one(m.map(|Method {id, attrs, node, span}| Method { + id: folder.new_id(id), + attrs: attrs.move_map(|a| folder.fold_attribute(a)), + node: match node { MethDecl(ident, - ref generics, + generics, abi, - ref explicit_self, + explicit_self, fn_style, decl, body, @@ -1044,215 +998,226 @@ pub fn noop_fold_method(m: &Method, folder: &mut T) -> SmallVector MethMac(folder.fold_mac(mac)), - } - }) + MethMac(mac) => MethMac(folder.fold_mac(mac)), + }, + span: folder.new_span(span) + })) } -pub fn noop_fold_pat(p: Gc, folder: &mut T) -> Gc { - let id = folder.new_id(p.id); - let node = match p.node { - PatWild(k) => PatWild(k), - PatIdent(binding_mode, ref pth1, ref sub) => { - PatIdent(binding_mode, - Spanned{span: folder.new_span(pth1.span), - node: folder.fold_ident(pth1.node)}, - sub.map(|x| folder.fold_pat(x))) - } - PatLit(e) => PatLit(folder.fold_expr(e)), - PatEnum(ref pth, ref pats) => { - PatEnum(folder.fold_path(pth), - pats.as_ref().map(|pats| pats.iter().map(|x| folder.fold_pat(*x)).collect())) - } - PatStruct(ref pth, ref fields, etc) => { - let pth_ = folder.fold_path(pth); - let fs = fields.iter().map(|f| { - ast::FieldPat { - ident: f.ident, - pat: folder.fold_pat(f.pat) - } - }).collect(); - PatStruct(pth_, fs, etc) - } - PatTup(ref elts) => PatTup(elts.iter().map(|x| folder.fold_pat(*x)).collect()), - PatBox(inner) => PatBox(folder.fold_pat(inner)), - PatRegion(inner) => PatRegion(folder.fold_pat(inner)), - PatRange(e1, e2) => { - PatRange(folder.fold_expr(e1), folder.fold_expr(e2)) +pub fn noop_fold_pat(p: P, folder: &mut T) -> P { + p.map(|Pat {id, node, span}| Pat { + id: folder.new_id(id), + node: match node { + PatWild(k) => PatWild(k), + PatIdent(binding_mode, pth1, sub) => { + PatIdent(binding_mode, + Spanned{span: folder.new_span(pth1.span), + node: folder.fold_ident(pth1.node)}, + sub.map(|x| folder.fold_pat(x))) + } + PatLit(e) => PatLit(folder.fold_expr(e)), + PatEnum(pth, pats) => { + PatEnum(folder.fold_path(pth), + pats.map(|pats| pats.move_map(|x| folder.fold_pat(x)))) + } + PatStruct(pth, fields, etc) => { + let pth = folder.fold_path(pth); + let fs = fields.move_map(|f| { + ast::FieldPat { + ident: f.ident, + pat: folder.fold_pat(f.pat) + } + }); + PatStruct(pth, fs, etc) + } + PatTup(elts) => PatTup(elts.move_map(|x| folder.fold_pat(x))), + PatBox(inner) => PatBox(folder.fold_pat(inner)), + PatRegion(inner) => PatRegion(folder.fold_pat(inner)), + PatRange(e1, e2) => { + PatRange(folder.fold_expr(e1), folder.fold_expr(e2)) + }, + PatVec(before, slice, after) => { + PatVec(before.move_map(|x| folder.fold_pat(x)), + slice.map(|x| folder.fold_pat(x)), + after.move_map(|x| folder.fold_pat(x))) + } + PatMac(mac) => PatMac(folder.fold_mac(mac)) }, - PatVec(ref before, ref slice, ref after) => { - PatVec(before.iter().map(|x| folder.fold_pat(*x)).collect(), - slice.map(|x| folder.fold_pat(x)), - after.iter().map(|x| folder.fold_pat(*x)).collect()) - } - PatMac(ref mac) => PatMac(folder.fold_mac(mac)), - }; - - box(GC) Pat { - id: id, - span: folder.new_span(p.span), - node: node, - } + span: folder.new_span(span) + }) } -pub fn noop_fold_expr(e: Gc, folder: &mut T) -> Gc { - let id = folder.new_id(e.id); - let node = match e.node { - ExprBox(p, e) => { - ExprBox(folder.fold_expr(p), folder.fold_expr(e)) - } - ExprVec(ref exprs) => { - ExprVec(exprs.iter().map(|&x| folder.fold_expr(x)).collect()) - } - ExprRepeat(expr, count) => { - ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count)) - } - ExprTup(ref elts) => ExprTup(elts.iter().map(|x| folder.fold_expr(*x)).collect()), - ExprCall(f, ref args) => { - ExprCall(folder.fold_expr(f), - args.iter().map(|&x| folder.fold_expr(x)).collect()) - } - ExprMethodCall(i, ref tps, ref args) => { - ExprMethodCall( - respan(i.span, folder.fold_ident(i.node)), - tps.iter().map(|&x| folder.fold_ty(x)).collect(), - args.iter().map(|&x| folder.fold_expr(x)).collect()) - } - ExprBinary(binop, lhs, rhs) => { - ExprBinary(binop, - folder.fold_expr(lhs), - folder.fold_expr(rhs)) - } - ExprUnary(binop, ohs) => { - ExprUnary(binop, folder.fold_expr(ohs)) - } - ExprLit(_) => e.node.clone(), - ExprCast(expr, ty) => { - ExprCast(folder.fold_expr(expr), folder.fold_ty(ty)) - } - ExprAddrOf(m, ohs) => ExprAddrOf(m, folder.fold_expr(ohs)), - ExprIf(cond, tr, fl) => { - ExprIf(folder.fold_expr(cond), - folder.fold_block(tr), - fl.map(|x| folder.fold_expr(x))) - } - ExprWhile(cond, body, opt_ident) => { - ExprWhile(folder.fold_expr(cond), - folder.fold_block(body), - opt_ident.map(|i| folder.fold_ident(i))) - } - ExprForLoop(pat, iter, body, ref opt_ident) => { - ExprForLoop(folder.fold_pat(pat), - folder.fold_expr(iter), - folder.fold_block(body), +pub fn noop_fold_expr(Expr {id, node, span}: Expr, folder: &mut T) -> Expr { + Expr { + id: folder.new_id(id), + node: match node { + ExprBox(p, e) => { + ExprBox(folder.fold_expr(p), folder.fold_expr(e)) + } + ExprVec(exprs) => { + ExprVec(exprs.move_map(|x| folder.fold_expr(x))) + } + ExprRepeat(expr, count) => { + ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count)) + } + ExprTup(elts) => ExprTup(elts.move_map(|x| folder.fold_expr(x))), + ExprCall(f, args) => { + ExprCall(folder.fold_expr(f), + args.move_map(|x| folder.fold_expr(x))) + } + ExprMethodCall(i, tps, args) => { + ExprMethodCall( + respan(i.span, folder.fold_ident(i.node)), + tps.move_map(|x| folder.fold_ty(x)), + args.move_map(|x| folder.fold_expr(x))) + } + ExprBinary(binop, lhs, rhs) => { + ExprBinary(binop, + folder.fold_expr(lhs), + folder.fold_expr(rhs)) + } + ExprUnary(binop, ohs) => { + ExprUnary(binop, folder.fold_expr(ohs)) + } + ExprLit(l) => ExprLit(l), + ExprCast(expr, ty) => { + ExprCast(folder.fold_expr(expr), folder.fold_ty(ty)) + } + ExprAddrOf(m, ohs) => ExprAddrOf(m, folder.fold_expr(ohs)), + ExprIf(cond, tr, fl) => { + ExprIf(folder.fold_expr(cond), + folder.fold_block(tr), + fl.map(|x| folder.fold_expr(x))) + } + ExprWhile(cond, body, opt_ident) => { + ExprWhile(folder.fold_expr(cond), + folder.fold_block(body), + opt_ident.map(|i| folder.fold_ident(i))) + } + ExprForLoop(pat, iter, body, opt_ident) => { + ExprForLoop(folder.fold_pat(pat), + folder.fold_expr(iter), + folder.fold_block(body), + opt_ident.map(|i| folder.fold_ident(i))) + } + ExprLoop(body, opt_ident) => { + ExprLoop(folder.fold_block(body), opt_ident.map(|i| folder.fold_ident(i))) - } - ExprLoop(body, opt_ident) => { - ExprLoop(folder.fold_block(body), - opt_ident.map(|i| folder.fold_ident(i))) - } - ExprMatch(expr, ref arms) => { - ExprMatch(folder.fold_expr(expr), - arms.iter().map(|x| folder.fold_arm(x)).collect()) - } - ExprFnBlock(capture_clause, ref decl, ref body) => { - ExprFnBlock(capture_clause, - folder.fold_fn_decl(&**decl), - folder.fold_block(body.clone())) - } - ExprProc(ref decl, ref body) => { - ExprProc(folder.fold_fn_decl(&**decl), - folder.fold_block(body.clone())) - } - ExprUnboxedFn(capture_clause, kind, ref decl, ref body) => { - ExprUnboxedFn(capture_clause, - kind, - folder.fold_fn_decl(&**decl), - folder.fold_block(*body)) - } - ExprBlock(ref blk) => ExprBlock(folder.fold_block(*blk)), - ExprAssign(el, er) => { - ExprAssign(folder.fold_expr(el), folder.fold_expr(er)) - } - ExprAssignOp(op, el, er) => { - ExprAssignOp(op, - folder.fold_expr(el), - folder.fold_expr(er)) - } - ExprField(el, id, ref tys) => { - ExprField(folder.fold_expr(el), - respan(id.span, folder.fold_ident(id.node)), - tys.iter().map(|&x| folder.fold_ty(x)).collect()) - } - ExprTupField(el, id, ref tys) => { - ExprTupField(folder.fold_expr(el), - respan(id.span, folder.fold_uint(id.node)), - tys.iter().map(|&x| folder.fold_ty(x)).collect()) - } - ExprIndex(el, er) => { - ExprIndex(folder.fold_expr(el), folder.fold_expr(er)) - } - ExprPath(ref pth) => ExprPath(folder.fold_path(pth)), - ExprBreak(opt_ident) => ExprBreak(opt_ident.map(|x| folder.fold_ident(x))), - ExprAgain(opt_ident) => ExprAgain(opt_ident.map(|x| folder.fold_ident(x))), - ExprRet(ref e) => { - ExprRet(e.map(|x| folder.fold_expr(x))) - } - ExprInlineAsm(ref a) => { + } + ExprMatch(expr, arms) => { + ExprMatch(folder.fold_expr(expr), + arms.move_map(|x| folder.fold_arm(x))) + } + ExprFnBlock(capture_clause, decl, body) => { + ExprFnBlock(capture_clause, + folder.fold_fn_decl(decl), + folder.fold_block(body)) + } + ExprProc(decl, body) => { + ExprProc(folder.fold_fn_decl(decl), + folder.fold_block(body)) + } + ExprUnboxedFn(capture_clause, kind, decl, body) => { + ExprUnboxedFn(capture_clause, + kind, + folder.fold_fn_decl(decl), + folder.fold_block(body)) + } + ExprBlock(blk) => ExprBlock(folder.fold_block(blk)), + ExprAssign(el, er) => { + ExprAssign(folder.fold_expr(el), folder.fold_expr(er)) + } + ExprAssignOp(op, el, er) => { + ExprAssignOp(op, + folder.fold_expr(el), + folder.fold_expr(er)) + } + ExprField(el, ident, tys) => { + ExprField(folder.fold_expr(el), + respan(ident.span, folder.fold_ident(ident.node)), + tys.move_map(|x| folder.fold_ty(x))) + } + ExprTupField(el, ident, tys) => { + ExprTupField(folder.fold_expr(el), + respan(ident.span, folder.fold_uint(ident.node)), + tys.move_map(|x| folder.fold_ty(x))) + } + ExprIndex(el, er) => { + ExprIndex(folder.fold_expr(el), folder.fold_expr(er)) + } + ExprPath(pth) => ExprPath(folder.fold_path(pth)), + ExprBreak(opt_ident) => ExprBreak(opt_ident.map(|x| folder.fold_ident(x))), + ExprAgain(opt_ident) => ExprAgain(opt_ident.map(|x| folder.fold_ident(x))), + ExprRet(e) => ExprRet(e.map(|x| folder.fold_expr(x))), ExprInlineAsm(InlineAsm { - inputs: a.inputs.iter().map(|&(ref c, input)| { - ((*c).clone(), folder.fold_expr(input)) - }).collect(), - outputs: a.outputs.iter().map(|&(ref c, out, is_rw)| { - ((*c).clone(), folder.fold_expr(out), is_rw) - }).collect(), - .. (*a).clone() - }) - } - ExprMac(ref mac) => ExprMac(folder.fold_mac(mac)), - ExprStruct(ref path, ref fields, maybe_expr) => { - ExprStruct(folder.fold_path(path), - fields.iter().map(|x| folder.fold_field(*x)).collect(), - maybe_expr.map(|x| folder.fold_expr(x))) + inputs, + outputs, + asm, + asm_str_style, + clobbers, + volatile, + alignstack, + dialect + }) => ExprInlineAsm(InlineAsm { + inputs: inputs.move_map(|(c, input)| { + (c, folder.fold_expr(input)) + }), + outputs: outputs.move_map(|(c, out, is_rw)| { + (c, folder.fold_expr(out), is_rw) + }), + asm: asm, + asm_str_style: asm_str_style, + clobbers: clobbers, + volatile: volatile, + alignstack: alignstack, + dialect: dialect + }), + ExprMac(mac) => ExprMac(folder.fold_mac(mac)), + ExprStruct(path, fields, maybe_expr) => { + ExprStruct(folder.fold_path(path), + fields.move_map(|x| folder.fold_field(x)), + maybe_expr.map(|x| folder.fold_expr(x))) + }, + ExprParen(ex) => ExprParen(folder.fold_expr(ex)) }, - ExprParen(ex) => ExprParen(folder.fold_expr(ex)) - }; - - box(GC) Expr { - id: id, - node: node, - span: folder.new_span(e.span), + span: folder.new_span(span) } } -pub fn noop_fold_stmt(s: &Stmt, - folder: &mut T) -> SmallVector> { - let nodes = match s.node { +pub fn noop_fold_stmt(Spanned {node, span}: Stmt, folder: &mut T) + -> SmallVector> { + let span = folder.new_span(span); + match node { StmtDecl(d, id) => { let id = folder.new_id(id); - folder.fold_decl(d).move_iter() - .map(|d| StmtDecl(d, id)) - .collect() + folder.fold_decl(d).move_iter().map(|d| P(Spanned { + node: StmtDecl(d, id), + span: span + })).collect() } StmtExpr(e, id) => { let id = folder.new_id(id); - SmallVector::one(StmtExpr(folder.fold_expr(e), id)) + SmallVector::one(P(Spanned { + node: StmtExpr(folder.fold_expr(e), id), + span: span + })) } StmtSemi(e, id) => { let id = folder.new_id(id); - SmallVector::one(StmtSemi(folder.fold_expr(e), id)) + SmallVector::one(P(Spanned { + node: StmtSemi(folder.fold_expr(e), id), + span: span + })) } - StmtMac(ref mac, semi) => SmallVector::one(StmtMac(folder.fold_mac(mac), semi)) - }; - - nodes.move_iter().map(|node| box(GC) Spanned { - node: node, - span: folder.new_span(s.span), - }).collect() + StmtMac(mac, semi) => SmallVector::one(P(Spanned { + node: StmtMac(folder.fold_mac(mac), semi), + span: span + })) + } } #[cfg(test)] @@ -1278,7 +1243,7 @@ mod test { fn fold_ident(&mut self, _: ast::Ident) -> ast::Ident { token::str_to_ident("zz") } - fn fold_mac(&mut self, macro: &ast::Mac) -> ast::Mac { + fn fold_mac(&mut self, macro: ast::Mac) -> ast::Mac { fold::noop_fold_mac(macro, self) } } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 254428486f8b4..146b5a5b34865 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -27,6 +27,7 @@ #![feature(quote, struct_variant, unsafe_destructor, import_shadowing)] #![allow(deprecated)] +extern crate arena; extern crate fmt_macros; extern crate debug; #[phase(plugin, link)] extern crate log; @@ -63,6 +64,7 @@ pub mod diagnostic; pub mod fold; pub mod owned_slice; pub mod parse; +pub mod ptr; pub mod visit; pub mod print { diff --git a/src/libsyntax/owned_slice.rs b/src/libsyntax/owned_slice.rs index d368477cd33ab..7d8a9e08ba02b 100644 --- a/src/libsyntax/owned_slice.rs +++ b/src/libsyntax/owned_slice.rs @@ -11,7 +11,7 @@ use std::fmt; use std::default::Default; use std::hash; -use std::{mem, raw, ptr, slice}; +use std::{mem, raw, ptr, slice, vec}; use serialize::{Encodable, Decodable, Encoder, Decoder}; /// A non-growable owned slice. This would preferably become `~[T]` @@ -105,6 +105,10 @@ impl OwnedSlice { self.as_slice().iter() } + pub fn move_iter(self) -> vec::MoveItems { + self.into_vec().move_iter() + } + pub fn map(&self, f: |&T| -> U) -> OwnedSlice { self.iter().map(f).collect() } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index eca02d06ca9f8..74b93e75e64dd 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -15,8 +15,7 @@ use parse::common::*; //resolve bug? use parse::token; use parse::parser::Parser; use parse::token::INTERPOLATED; - -use std::gc::{Gc, GC}; +use ptr::P; /// A parser that can parse attributes. pub trait ParserAttr { @@ -24,9 +23,9 @@ pub trait ParserAttr { fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute; fn parse_inner_attrs_and_next(&mut self) -> (Vec, Vec); - fn parse_meta_item(&mut self) -> Gc; - fn parse_meta_seq(&mut self) -> Vec>; - fn parse_optional_meta(&mut self) -> Vec>; + fn parse_meta_item(&mut self) -> P; + fn parse_meta_seq(&mut self) -> Vec>; + fn parse_optional_meta(&mut self) -> Vec>; } impl<'a> ParserAttr for Parser<'a> { @@ -160,13 +159,20 @@ impl<'a> ParserAttr for Parser<'a> { /// matches meta_item = IDENT /// | IDENT = lit /// | IDENT meta_seq - fn parse_meta_item(&mut self) -> Gc { - match self.token { - token::INTERPOLATED(token::NtMeta(e)) => { + fn parse_meta_item(&mut self) -> P { + let nt_meta = match self.token { + token::INTERPOLATED(token::NtMeta(ref e)) => { + Some(e.clone()) + } + _ => None + }; + + match nt_meta { + Some(meta) => { self.bump(); - return e + return meta; } - _ => {} + None => {} } let lo = self.span.lo; @@ -187,29 +193,29 @@ impl<'a> ParserAttr for Parser<'a> { } } let hi = self.span.hi; - box(GC) spanned(lo, hi, ast::MetaNameValue(name, lit)) + P(spanned(lo, hi, ast::MetaNameValue(name, lit))) } token::LPAREN => { let inner_items = self.parse_meta_seq(); let hi = self.span.hi; - box(GC) spanned(lo, hi, ast::MetaList(name, inner_items)) + P(spanned(lo, hi, ast::MetaList(name, inner_items))) } _ => { let hi = self.last_span.hi; - box(GC) spanned(lo, hi, ast::MetaWord(name)) + P(spanned(lo, hi, ast::MetaWord(name))) } } } /// matches meta_seq = ( COMMASEP(meta_item) ) - fn parse_meta_seq(&mut self) -> Vec> { + fn parse_meta_seq(&mut self) -> Vec> { self.parse_seq(&token::LPAREN, &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_meta_item()).node } - fn parse_optional_meta(&mut self) -> Vec> { + fn parse_optional_meta(&mut self) -> Vec> { match self.token { token::LPAREN => self.parse_meta_seq(), _ => Vec::new() diff --git a/src/libsyntax/parse/classify.rs b/src/libsyntax/parse/classify.rs index 516f22cdf4d60..cdd221aca7cf0 100644 --- a/src/libsyntax/parse/classify.rs +++ b/src/libsyntax/parse/classify.rs @@ -13,7 +13,6 @@ // Predicates on exprs and stmts that the pretty-printer and parser use use ast; -use std::gc::Gc; /// Does this expression require a semicolon to be treated /// as a statement? The negation of this: 'can this expression @@ -22,7 +21,7 @@ use std::gc::Gc; /// if true {...} else {...} /// |x| 5 /// isn't parsed as (if true {...} else {...} | x) | 5 -pub fn expr_requires_semi_to_be_stmt(e: Gc) -> bool { +pub fn expr_requires_semi_to_be_stmt(e: &ast::Expr) -> bool { match e.node { ast::ExprIf(..) | ast::ExprMatch(..) @@ -34,25 +33,25 @@ pub fn expr_requires_semi_to_be_stmt(e: Gc) -> bool { } } -pub fn expr_is_simple_block(e: Gc) -> bool { +pub fn expr_is_simple_block(e: &ast::Expr) -> bool { match e.node { - ast::ExprBlock(block) => block.rules == ast::DefaultBlock, - _ => false + ast::ExprBlock(ref block) => block.rules == ast::DefaultBlock, + _ => false } } /// this statement requires a semicolon after it. /// note that in one case (stmt_semi), we've already /// seen the semicolon, and thus don't need another. -pub fn stmt_ends_with_semi(stmt: &ast::Stmt) -> bool { - return match stmt.node { - ast::StmtDecl(d, _) => { +pub fn stmt_ends_with_semi(stmt: &ast::Stmt_) -> bool { + match *stmt { + ast::StmtDecl(ref d, _) => { match d.node { ast::DeclLocal(_) => true, ast::DeclItem(_) => false } } - ast::StmtExpr(e, _) => { expr_requires_semi_to_be_stmt(e) } + ast::StmtExpr(ref e, _) => { expr_requires_semi_to_be_stmt(&**e) } ast::StmtSemi(..) => { false } ast::StmtMac(..) => { false } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 585b98925cc58..e5b6359000b6c 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -15,9 +15,9 @@ use codemap::{Span, CodeMap, FileMap}; use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto}; use parse::attr::ParserAttr; use parse::parser::Parser; +use ptr::P; use std::cell::RefCell; -use std::gc::Gc; use std::io::File; use std::rc::Rc; use std::str; @@ -106,7 +106,7 @@ pub fn parse_expr_from_source_str(name: String, source: String, cfg: ast::CrateConfig, sess: &ParseSess) - -> Gc { + -> P { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_expr(), p) } @@ -115,7 +115,7 @@ pub fn parse_item_from_source_str(name: String, source: String, cfg: ast::CrateConfig, sess: &ParseSess) - -> Option> { + -> Option> { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_item_with_outer_attributes(),p) } @@ -124,7 +124,7 @@ pub fn parse_meta_from_source_str(name: String, source: String, cfg: ast::CrateConfig, sess: &ParseSess) - -> Gc { + -> P { let mut p = new_parser_from_source_str(sess, cfg, name, source); maybe_aborted(p.parse_meta_item(),p) } @@ -134,7 +134,7 @@ pub fn parse_stmt_from_source_str(name: String, cfg: ast::CrateConfig, attrs: Vec , sess: &ParseSess) - -> Gc { + -> P { let mut p = new_parser_from_source_str( sess, cfg, @@ -702,7 +702,6 @@ pub fn integer_lit(s: &str, sd: &SpanHandler, sp: Span) -> ast::Lit_ { mod test { use super::*; use serialize::json; - use std::gc::GC; use codemap::{Span, BytePos, Spanned}; use owned_slice::OwnedSlice; use ast; @@ -711,6 +710,7 @@ mod test { use attr::AttrMetaMethods; use parse::parser::Parser; use parse::token::{str_to_ident}; + use ptr::P; use util::parser_testing::{string_to_tts, string_to_parser}; use util::parser_testing::{string_to_expr, string_to_item}; use util::parser_testing::string_to_stmt; @@ -722,7 +722,7 @@ mod test { #[test] fn path_exprs_1() { assert!(string_to_expr("a".to_string()) == - box(GC) ast::Expr{ + P(ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { span: sp(0, 1), @@ -736,12 +736,12 @@ mod test { ), }), span: sp(0, 1) - }) + })) } #[test] fn path_exprs_2 () { assert!(string_to_expr("::a::b".to_string()) == - box(GC) ast::Expr { + P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { span: sp(0, 6), @@ -760,7 +760,7 @@ mod test { ) }), span: sp(0, 6) - }) + })) } #[should_fail] @@ -953,9 +953,9 @@ mod test { #[test] fn ret_expr() { assert!(string_to_expr("return d".to_string()) == - box(GC) ast::Expr{ + P(ast::Expr{ id: ast::DUMMY_NODE_ID, - node:ast::ExprRet(Some(box(GC) ast::Expr{ + node:ast::ExprRet(Some(P(ast::Expr{ id: ast::DUMMY_NODE_ID, node:ast::ExprPath(ast::Path{ span: sp(7, 8), @@ -969,15 +969,15 @@ mod test { ), }), span:sp(7,8) - })), + }))), span:sp(0,8) - }) + })) } #[test] fn parse_stmt_1 () { assert!(string_to_stmt("b;".to_string()) == - box(GC) Spanned{ - node: ast::StmtExpr(box(GC) ast::Expr { + P(Spanned{ + node: ast::StmtExpr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { span:sp(0,1), @@ -990,9 +990,9 @@ mod test { } ), }), - span: sp(0,1)}, + span: sp(0,1)}), ast::DUMMY_NODE_ID), - span: sp(0,1)}) + span: sp(0,1)})) } @@ -1004,14 +1004,14 @@ mod test { let sess = new_parse_sess(); let mut parser = string_to_parser(&sess, "b".to_string()); assert!(parser.parse_pat() - == box(GC) ast::Pat{ + == P(ast::Pat{ id: ast::DUMMY_NODE_ID, node: ast::PatIdent(ast::BindByValue(ast::MutImmutable), Spanned{ span:sp(0, 1), node: str_to_ident("b") }, None), - span: sp(0,1)}); + span: sp(0,1)})); parser_done(parser); } @@ -1020,13 +1020,13 @@ mod test { // this test depends on the intern order of "fn" and "int" assert!(string_to_item("fn a (b : int) { b; }".to_string()) == Some( - box(GC) ast::Item{ident:str_to_ident("a"), + P(ast::Item{ident:str_to_ident("a"), attrs:Vec::new(), id: ast::DUMMY_NODE_ID, - node: ast::ItemFn(ast::P(ast::FnDecl { + node: ast::ItemFn(P(ast::FnDecl { inputs: vec!(ast::Arg{ - ty: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID, - node: ast::TyPath(ast::Path{ + ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, + node: ast::TyPath(ast::Path{ span:sp(10,13), global:false, segments: vec!( @@ -1040,7 +1040,7 @@ mod test { }, None, ast::DUMMY_NODE_ID), span:sp(10,13) }), - pat: box(GC) ast::Pat { + pat: P(ast::Pat { id: ast::DUMMY_NODE_ID, node: ast::PatIdent( ast::BindByValue(ast::MutImmutable), @@ -1050,12 +1050,12 @@ mod test { None ), span: sp(6,7) - }, + }), id: ast::DUMMY_NODE_ID }), - output: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID, - node: ast::TyNil, - span:sp(15,15)}), // not sure + output: P(ast::Ty{id: ast::DUMMY_NODE_ID, + node: ast::TyNil, + span:sp(15,15)}), // not sure cf: ast::Return, variadic: false }), @@ -1069,10 +1069,10 @@ mod test { predicates: Vec::new(), } }, - ast::P(ast::Block { + P(ast::Block { view_items: Vec::new(), - stmts: vec!(box(GC) Spanned{ - node: ast::StmtSemi(box(GC) ast::Expr{ + stmts: vec!(P(Spanned{ + node: ast::StmtSemi(P(ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprPath( ast::Path{ @@ -1090,28 +1090,28 @@ mod test { } ), }), - span: sp(17,18)}, + span: sp(17,18)}), ast::DUMMY_NODE_ID), - span: sp(17,19)}), + span: sp(17,19)})), expr: None, id: ast::DUMMY_NODE_ID, rules: ast::DefaultBlock, // no idea span: sp(15,21), })), vis: ast::Inherited, - span: sp(0,21)})); + span: sp(0,21)}))); } #[test] fn parse_exprs () { // just make sure that they parse.... string_to_expr("3 + 4".to_string()); - string_to_expr("a::z.froob(b,box(GC)(987+3))".to_string()); + string_to_expr("a::z.froob(b,&(987+3))".to_string()); } #[test] fn attrs_fix_bug () { string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) - -> Result, String> { + -> Result, String> { #[cfg(windows)] fn wb() -> c_int { (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index 9ed9e626c3d32..d47231bc3e2de 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -21,8 +21,7 @@ use ast::{Expr, ExprLit, LitNil}; use codemap::{Span, respan}; use parse::parser; use parse::token; - -use std::gc::{Gc, GC}; +use ptr::P; /// The specific types of unsupported syntax #[deriving(PartialEq, Eq, Hash)] @@ -44,7 +43,7 @@ pub trait ParserObsoleteMethods { fn obsolete(&mut self, sp: Span, kind: ObsoleteSyntax); /// Reports an obsolete syntax non-fatal error, and returns /// a placeholder expression - fn obsolete_expr(&mut self, sp: Span, kind: ObsoleteSyntax) -> Gc; + fn obsolete_expr(&mut self, sp: Span, kind: ObsoleteSyntax) -> P; fn report(&mut self, sp: Span, kind: ObsoleteSyntax, @@ -105,9 +104,9 @@ impl<'a> ParserObsoleteMethods for parser::Parser<'a> { /// Reports an obsolete syntax non-fatal error, and returns /// a placeholder expression - fn obsolete_expr(&mut self, sp: Span, kind: ObsoleteSyntax) -> Gc { + fn obsolete_expr(&mut self, sp: Span, kind: ObsoleteSyntax) -> P { self.obsolete(sp, kind); - self.mk_expr(sp.lo, sp.hi, ExprLit(box(GC) respan(sp, LitNil))) + self.mk_expr(sp.lo, sp.hi, ExprLit(P(respan(sp, LitNil)))) } fn report(&mut self, diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 328bdf883356c..f41362cad4185 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -39,9 +39,8 @@ use ast::{LitBool, LitChar, LitByte, LitBinary}; use ast::{LitNil, LitStr, LitInt, Local, LocalLet}; use ast::{MutImmutable, MutMutable, Mac_, MacInvocTT, Matcher, MatchNonterminal}; use ast::{MatchSeq, MatchTok, Method, MutTy, BiMul, Mutability}; -use ast::{MethodImplItem}; -use ast::{NamedField, UnNeg, NoReturn, UnNot, P, Pat, PatEnum}; -use ast::{PatIdent, PatLit, PatRange, PatRegion, PatStruct}; +use ast::{MethodImplItem, NamedField, UnNeg, NoReturn, UnNot}; +use ast::{Pat, PatEnum, PatIdent, PatLit, PatRange, PatRegion, PatStruct}; use ast::{PatTup, PatBox, PatWild, PatWildMulti, PatWildSingle}; use ast::{BiRem, RequiredMethod}; use ast::{RetStyle, Return, BiShl, BiShr, Stmt, StmtDecl}; @@ -79,12 +78,13 @@ use parse::token::{is_ident, is_ident_or_path, is_plain_ident}; use parse::token::{keywords, special_idents, token_to_binop}; use parse::token; use parse::{new_sub_parser_from_file, ParseSess}; +use ptr::P; use owned_slice::OwnedSlice; use std::collections::HashSet; use std::mem::replace; +use std::mem; use std::rc::Rc; -use std::gc::{Gc, GC}; use std::iter; #[allow(non_camel_case_types)] @@ -127,8 +127,8 @@ enum ItemOrViewItem { /// Indicates a failure to parse any kind of item. The attributes are /// returned. IoviNone(Vec), - IoviItem(Gc), - IoviForeignItem(Gc), + IoviItem(P), + IoviForeignItem(P), IoviViewItem(ViewItem) } @@ -143,8 +143,8 @@ macro_rules! maybe_whole_expr ( ($p:expr) => ( { let found = match $p.token { - INTERPOLATED(token::NtExpr(e)) => { - Some(e) + INTERPOLATED(token::NtExpr(ref e)) => { + Some((*e).clone()) } INTERPOLATED(token::NtPath(_)) => { // FIXME: The following avoids an issue with lexical borrowck scopes, @@ -156,7 +156,13 @@ macro_rules! maybe_whole_expr ( let span = $p.span; Some($p.mk_expr(span.lo, span.hi, ExprPath(pt))) } - INTERPOLATED(token::NtBlock(b)) => { + INTERPOLATED(token::NtBlock(_)) => { + // FIXME: The following avoids an issue with lexical borrowck scopes, + // but the clone is unfortunate. + let b = match $p.token { + INTERPOLATED(token::NtBlock(ref b)) => (*b).clone(), + _ => unreachable!() + }; let span = $p.span; Some($p.mk_expr(span.lo, span.hi, ExprBlock(b))) } @@ -286,8 +292,8 @@ fn maybe_append(lhs: Vec , rhs: Option >) struct ParsedItemsAndViewItems { attrs_remaining: Vec, view_items: Vec, - items: Vec>, - foreign_items: Vec> + items: Vec> , + foreign_items: Vec> } /* ident is handled by common.rs */ @@ -484,8 +490,7 @@ impl<'a> Parser<'a> { /// Commit to parsing a complete expression `e` expected to be /// followed by some token from the set edible + inedible. Recover /// from anticipated input errors, discarding erroneous characters. - pub fn commit_expr(&mut self, e: Gc, edible: &[token::Token], - inedible: &[token::Token]) { + pub fn commit_expr(&mut self, e: &Expr, edible: &[token::Token], inedible: &[token::Token]) { debug!("commit_expr {:?}", e); match e.node { ExprPath(..) => { @@ -500,17 +505,14 @@ impl<'a> Parser<'a> { self.expect_one_of(edible, inedible) } - pub fn commit_expr_expecting(&mut self, e: Gc, edible: token::Token) { + pub fn commit_expr_expecting(&mut self, e: &Expr, edible: token::Token) { self.commit_expr(e, &[edible], &[]) } /// Commit to parsing a complete statement `s`, which expects to be /// followed by some token from the set edible + inedible. Check /// for recoverable input errors, discarding erroneous characters. - pub fn commit_stmt(&mut self, s: Gc, edible: &[token::Token], - inedible: &[token::Token]) { - debug!("commit_stmt {:?}", s); - let _s = s; // unused, but future checks might want to inspect `s`. + pub fn commit_stmt(&mut self, edible: &[token::Token], inedible: &[token::Token]) { if self.last_token .as_ref() .map_or(false, |t| is_ident_or_path(&**t)) { @@ -522,8 +524,8 @@ impl<'a> Parser<'a> { self.expect_one_of(edible, inedible) } - pub fn commit_stmt_expecting(&mut self, s: Gc, edible: token::Token) { - self.commit_stmt(s, &[edible], &[]) + pub fn commit_stmt_expecting(&mut self, edible: token::Token) { + self.commit_stmt(&[edible], &[]) } pub fn parse_ident(&mut self) -> ast::Ident { @@ -1043,12 +1045,12 @@ impl<'a> Parser<'a> { self.expect_keyword(keywords::Fn); let (decl, lifetimes) = self.parse_ty_fn_decl(true); - return TyBareFn(box(GC) BareFnTy { + TyBareFn(P(BareFnTy { abi: abi, fn_style: fn_style, lifetimes: lifetimes, decl: decl - }); + })) } /// Parses a procedure type (`proc`). The initial `proc` keyword must @@ -1084,13 +1086,13 @@ impl<'a> Parser<'a> { cf: ret_style, variadic: variadic }); - TyProc(box(GC) ClosureTy { + TyProc(P(ClosureTy { fn_style: NormalFn, onceness: Once, bounds: bounds, decl: decl, lifetimes: lifetime_defs, - }) + })) } /// Parses an optional unboxed closure kind (`&:`, `&mut:`, or `:`). @@ -1176,19 +1178,19 @@ impl<'a> Parser<'a> { match optional_unboxed_closure_kind { Some(unboxed_closure_kind) => { - TyUnboxedFn(box(GC) UnboxedFnTy { + TyUnboxedFn(P(UnboxedFnTy { kind: unboxed_closure_kind, decl: decl, - }) + })) } None => { - TyClosure(box(GC) ClosureTy { + TyClosure(P(ClosureTy { fn_style: fn_style, onceness: onceness, bounds: bounds, decl: decl, lifetimes: lifetime_defs, - }) + })) } } } @@ -1291,8 +1293,9 @@ impl<'a> Parser<'a> { debug!("parse_trait_methods(): parsing provided method"); let (inner_attrs, body) = p.parse_inner_attrs_and_block(); - let attrs = attrs.append(inner_attrs.as_slice()); - ProvidedMethod(box(GC) ast::Method { + let mut attrs = attrs; + attrs.extend(inner_attrs.move_iter()); + ProvidedMethod(P(ast::Method { attrs: attrs, id: ast::DUMMY_NODE_ID, span: mk_sp(lo, hi), @@ -1304,7 +1307,7 @@ impl<'a> Parser<'a> { d, body, vis) - }) + })) } _ => { @@ -1400,7 +1403,7 @@ impl<'a> Parser<'a> { if ts.len() == 1 && !one_tuple { self.expect(&token::RPAREN); - TyParen(*ts.get(0)) + TyParen(ts.move_iter().nth(0).unwrap()) } else { let t = TyTup(ts); self.expect(&token::RPAREN); @@ -1588,7 +1591,7 @@ impl<'a> Parser<'a> { } } - pub fn maybe_parse_fixed_vstore(&mut self) -> Option> { + pub fn maybe_parse_fixed_vstore(&mut self) -> Option> { if self.token == token::COMMA && self.look_ahead(1, |t| *t == token::DOTDOT) { self.bump(); @@ -1640,12 +1643,12 @@ impl<'a> Parser<'a> { } /// matches '-' lit | lit - pub fn parse_literal_maybe_minus(&mut self) -> Gc { + pub fn parse_literal_maybe_minus(&mut self) -> P { let minus_lo = self.span.lo; let minus_present = self.eat(&token::BINOP(token::MINUS)); let lo = self.span.lo; - let literal = box(GC) self.parse_lit(); + let literal = P(self.parse_lit()); let hi = self.span.hi; let expr = self.mk_expr(lo, hi, ExprLit(literal)); @@ -1894,85 +1897,84 @@ impl<'a> Parser<'a> { let e = self.parse_expr(); ast::Field { ident: spanned(lo, hi, i), - expr: e, span: mk_sp(lo, e.span.hi), + expr: e, } } - pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: Expr_) -> Gc { - box(GC) Expr { + pub fn mk_expr(&mut self, lo: BytePos, hi: BytePos, node: Expr_) -> P { + P(Expr { id: ast::DUMMY_NODE_ID, node: node, span: mk_sp(lo, hi), - } + }) } - pub fn mk_unary(&mut self, unop: ast::UnOp, expr: Gc) -> ast::Expr_ { + pub fn mk_unary(&mut self, unop: ast::UnOp, expr: P) -> ast::Expr_ { ExprUnary(unop, expr) } - pub fn mk_binary(&mut self, binop: ast::BinOp, - lhs: Gc, rhs: Gc) -> ast::Expr_ { + pub fn mk_binary(&mut self, binop: ast::BinOp, lhs: P, rhs: P) -> ast::Expr_ { ExprBinary(binop, lhs, rhs) } - pub fn mk_call(&mut self, f: Gc, args: Vec>) -> ast::Expr_ { + pub fn mk_call(&mut self, f: P, args: Vec>) -> ast::Expr_ { ExprCall(f, args) } fn mk_method_call(&mut self, ident: ast::SpannedIdent, tps: Vec>, - args: Vec>) + args: Vec>) -> ast::Expr_ { ExprMethodCall(ident, tps, args) } - pub fn mk_index(&mut self, expr: Gc, idx: Gc) -> ast::Expr_ { + pub fn mk_index(&mut self, expr: P, idx: P) -> ast::Expr_ { ExprIndex(expr, idx) } - pub fn mk_field(&mut self, expr: Gc, ident: ast::SpannedIdent, + pub fn mk_field(&mut self, expr: P, ident: ast::SpannedIdent, tys: Vec>) -> ast::Expr_ { ExprField(expr, ident, tys) } - pub fn mk_tup_field(&mut self, expr: Gc, idx: codemap::Spanned, + pub fn mk_tup_field(&mut self, expr: P, idx: codemap::Spanned, tys: Vec>) -> ast::Expr_ { ExprTupField(expr, idx, tys) } pub fn mk_assign_op(&mut self, binop: ast::BinOp, - lhs: Gc, rhs: Gc) -> ast::Expr_ { + lhs: P, rhs: P) -> ast::Expr_ { ExprAssignOp(binop, lhs, rhs) } - pub fn mk_mac_expr(&mut self, lo: BytePos, hi: BytePos, m: Mac_) -> Gc { - box(GC) Expr { + pub fn mk_mac_expr(&mut self, lo: BytePos, hi: BytePos, m: Mac_) -> P { + P(Expr { id: ast::DUMMY_NODE_ID, node: ExprMac(codemap::Spanned {node: m, span: mk_sp(lo, hi)}), span: mk_sp(lo, hi), - } + }) } - pub fn mk_lit_u32(&mut self, i: u32) -> Gc { + pub fn mk_lit_u32(&mut self, i: u32) -> P { let span = &self.span; - let lv_lit = box(GC) codemap::Spanned { + let lv_lit = P(codemap::Spanned { node: LitInt(i as u64, ast::UnsignedIntLit(TyU32)), span: *span - }; + }); - box(GC) Expr { + P(Expr { id: ast::DUMMY_NODE_ID, node: ExprLit(lv_lit), span: *span, - } + }) } /// At the bottom (top?) of the precedence hierarchy, /// parse things like parenthesized exprs, /// macros, return, etc. - pub fn parse_bottom_expr(&mut self) -> Gc { + pub fn parse_bottom_expr(&mut self) -> P { maybe_whole_expr!(self); let lo = self.span.lo; @@ -1989,28 +1991,27 @@ impl<'a> Parser<'a> { if self.token == token::RPAREN { hi = self.span.hi; self.bump(); - let lit = box(GC) spanned(lo, hi, LitNil); + let lit = P(spanned(lo, hi, LitNil)); return self.mk_expr(lo, hi, ExprLit(lit)); } let mut es = vec!(self.parse_expr()); - self.commit_expr(*es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]); + self.commit_expr(&**es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]); while self.token == token::COMMA { self.bump(); if self.token != token::RPAREN { es.push(self.parse_expr()); - self.commit_expr(*es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]); - } - else { + self.commit_expr(&**es.last().unwrap(), &[], + &[token::COMMA, token::RPAREN]); + } else { trailing_comma = true; } } hi = self.span.hi; - self.commit_expr_expecting(*es.last().unwrap(), token::RPAREN); + self.commit_expr_expecting(&**es.last().unwrap(), token::RPAREN); return if es.len() == 1 && !trailing_comma { - self.mk_expr(lo, hi, ExprParen(*es.get(0))) - } - else { + self.mk_expr(lo, hi, ExprParen(es.move_iter().nth(0).unwrap())) + } else { self.mk_expr(lo, hi, ExprTup(es)) } }, @@ -2079,14 +2080,14 @@ impl<'a> Parser<'a> { let decl = self.parse_proc_decl(); let body = self.parse_expr(); let fakeblock = P(ast::Block { + id: ast::DUMMY_NODE_ID, view_items: Vec::new(), stmts: Vec::new(), - expr: Some(body), - id: ast::DUMMY_NODE_ID, rules: DefaultBlock, span: body.span, + expr: Some(body), }); - return self.mk_expr(lo, body.span.hi, ExprProc(decl, fakeblock)); + return self.mk_expr(lo, fakeblock.span.hi, ExprProc(decl, fakeblock)); } if self.eat_keyword(keywords::If) { return self.parse_if_expr(); @@ -2200,7 +2201,7 @@ impl<'a> Parser<'a> { } fields.push(self.parse_field()); - self.commit_expr(fields.last().unwrap().expr, + self.commit_expr(&*fields.last().unwrap().expr, &[token::COMMA], &[token::RBRACE]); } @@ -2227,7 +2228,7 @@ impl<'a> Parser<'a> { // other literal expression let lit = self.parse_lit(); hi = lit.span.hi; - ex = ExprLit(box(GC) lit); + ex = ExprLit(P(lit)); } } } @@ -2237,19 +2238,19 @@ impl<'a> Parser<'a> { /// Parse a block or unsafe block pub fn parse_block_expr(&mut self, lo: BytePos, blk_mode: BlockCheckMode) - -> Gc { + -> P { self.expect(&token::LBRACE); let blk = self.parse_block_tail(lo, blk_mode); return self.mk_expr(blk.span.lo, blk.span.hi, ExprBlock(blk)); } /// parse a.b or a(13) or a[4] or just a - pub fn parse_dot_or_call_expr(&mut self) -> Gc { + pub fn parse_dot_or_call_expr(&mut self) -> P { let b = self.parse_bottom_expr(); self.parse_dot_or_call_expr_with(b) } - pub fn parse_dot_or_call_expr_with(&mut self, e0: Gc) -> Gc { + pub fn parse_dot_or_call_expr_with(&mut self, e0: P) -> P { let mut e = e0; let lo = e.span.lo; let mut hi; @@ -2330,7 +2331,7 @@ impl<'a> Parser<'a> { } continue; } - if self.expr_is_complete(e) { break; } + if self.expr_is_complete(&*e) { break; } match self.token { // expr(...) token::LPAREN => { @@ -2351,7 +2352,7 @@ impl<'a> Parser<'a> { self.bump(); let ix = self.parse_expr(); hi = self.span.hi; - self.commit_expr_expecting(ix, token::RBRACKET); + self.commit_expr_expecting(&*ix, token::RBRACKET); let index = self.mk_index(e, ix); e = self.mk_expr(lo, hi, index) } @@ -2556,7 +2557,7 @@ impl<'a> Parser<'a> { } /// Parse a prefix-operator expr - pub fn parse_prefix_expr(&mut self) -> Gc { + pub fn parse_prefix_expr(&mut self) -> P { let lo = self.span.lo; let hi; @@ -2638,28 +2639,23 @@ impl<'a> Parser<'a> { } /// Parse an expression of binops - pub fn parse_binops(&mut self) -> Gc { + pub fn parse_binops(&mut self) -> P { let prefix_expr = self.parse_prefix_expr(); self.parse_more_binops(prefix_expr, 0) } /// Parse an expression of binops of at least min_prec precedence - pub fn parse_more_binops(&mut self, lhs: Gc, - min_prec: uint) -> Gc { - if self.expr_is_complete(lhs) { return lhs; } + pub fn parse_more_binops(&mut self, lhs: P, min_prec: uint) -> P { + if self.expr_is_complete(&*lhs) { return lhs; } // Prevent dynamic borrow errors later on by limiting the // scope of the borrows. - { - let token: &token::Token = &self.token; - let restriction: &restriction = &self.restriction; - match (token, restriction) { - (&token::BINOP(token::OR), &RESTRICT_NO_BAR_OP) => return lhs, - (&token::BINOP(token::OR), - &RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) => return lhs, - (&token::OROR, &RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) => return lhs, - _ => { } - } + match (&self.token, &self.restriction) { + (&token::BINOP(token::OR), &RESTRICT_NO_BAR_OP) => return lhs, + (&token::BINOP(token::OR), + &RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) => return lhs, + (&token::OROR, &RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) => return lhs, + _ => { } } let cur_opt = token_to_binop(&self.token); @@ -2670,8 +2666,10 @@ impl<'a> Parser<'a> { self.bump(); let expr = self.parse_prefix_expr(); let rhs = self.parse_more_binops(expr, cur_prec); + let lhs_span = lhs.span; + let rhs_span = rhs.span; let binary = self.mk_binary(cur_op, lhs, rhs); - let bin = self.mk_expr(lhs.span.lo, rhs.span.hi, binary); + let bin = self.mk_expr(lhs_span.lo, rhs_span.hi, binary); self.parse_more_binops(bin, min_prec) } else { lhs @@ -2694,7 +2692,7 @@ impl<'a> Parser<'a> { /// Parse an assignment expression.... /// actually, this seems to be the main entry point for /// parsing an arbitrary expression. - pub fn parse_assign_expr(&mut self) -> Gc { + pub fn parse_assign_expr(&mut self) -> P { let lo = self.span.lo; let lhs = self.parse_binops(); match self.token { @@ -2718,8 +2716,9 @@ impl<'a> Parser<'a> { token::SHL => BiShl, token::SHR => BiShr }; + let rhs_span = rhs.span; let assign_op = self.mk_assign_op(aop, lhs, rhs); - self.mk_expr(lo, rhs.span.hi, assign_op) + self.mk_expr(lo, rhs_span.hi, assign_op) } _ => { lhs @@ -2728,40 +2727,40 @@ impl<'a> Parser<'a> { } /// Parse an 'if' expression ('if' token already eaten) - pub fn parse_if_expr(&mut self) -> Gc { + pub fn parse_if_expr(&mut self) -> P { let lo = self.last_span.lo; let cond = self.parse_expr_res(RESTRICT_NO_STRUCT_LITERAL); let thn = self.parse_block(); - let mut els: Option> = None; + let mut els: Option> = None; let mut hi = thn.span.hi; if self.eat_keyword(keywords::Else) { let elexpr = self.parse_else_expr(); - els = Some(elexpr); hi = elexpr.span.hi; + els = Some(elexpr); } self.mk_expr(lo, hi, ExprIf(cond, thn, els)) } // `|args| expr` pub fn parse_lambda_expr(&mut self, capture_clause: CaptureClause) - -> Gc { + -> P { let lo = self.span.lo; let (decl, optional_unboxed_closure_kind) = self.parse_fn_block_decl(); let body = self.parse_expr(); let fakeblock = P(ast::Block { + id: ast::DUMMY_NODE_ID, view_items: Vec::new(), stmts: Vec::new(), + span: body.span, expr: Some(body), - id: ast::DUMMY_NODE_ID, rules: DefaultBlock, - span: body.span, }); match optional_unboxed_closure_kind { Some(unboxed_closure_kind) => { self.mk_expr(lo, - body.span.hi, + fakeblock.span.hi, ExprUnboxedFn(capture_clause, unboxed_closure_kind, decl, @@ -2769,13 +2768,13 @@ impl<'a> Parser<'a> { } None => { self.mk_expr(lo, - body.span.hi, + fakeblock.span.hi, ExprFnBlock(capture_clause, decl, fakeblock)) } } } - pub fn parse_else_expr(&mut self) -> Gc { + pub fn parse_else_expr(&mut self) -> P { if self.eat_keyword(keywords::If) { return self.parse_if_expr(); } else { @@ -2785,7 +2784,7 @@ impl<'a> Parser<'a> { } /// Parse a 'for' .. 'in' expression ('for' token already eaten) - pub fn parse_for_expr(&mut self, opt_ident: Option) -> Gc { + pub fn parse_for_expr(&mut self, opt_ident: Option) -> P { // Parse: `for in ` let lo = self.last_span.lo; @@ -2798,7 +2797,7 @@ impl<'a> Parser<'a> { self.mk_expr(lo, hi, ExprForLoop(pat, expr, loop_block, opt_ident)) } - pub fn parse_while_expr(&mut self, opt_ident: Option) -> Gc { + pub fn parse_while_expr(&mut self, opt_ident: Option) -> P { let lo = self.last_span.lo; let cond = self.parse_expr_res(RESTRICT_NO_STRUCT_LITERAL); let body = self.parse_block(); @@ -2806,17 +2805,17 @@ impl<'a> Parser<'a> { return self.mk_expr(lo, hi, ExprWhile(cond, body, opt_ident)); } - pub fn parse_loop_expr(&mut self, opt_ident: Option) -> Gc { + pub fn parse_loop_expr(&mut self, opt_ident: Option) -> P { let lo = self.last_span.lo; let body = self.parse_block(); let hi = body.span.hi; self.mk_expr(lo, hi, ExprLoop(body, opt_ident)) } - fn parse_match_expr(&mut self) -> Gc { + fn parse_match_expr(&mut self) -> P { let lo = self.last_span.lo; let discriminant = self.parse_expr_res(RESTRICT_NO_STRUCT_LITERAL); - self.commit_expr_expecting(discriminant, token::LBRACE); + self.commit_expr_expecting(&*discriminant, token::LBRACE); let mut arms: Vec = Vec::new(); while self.token != token::RBRACE { arms.push(self.parse_arm()); @@ -2837,11 +2836,11 @@ impl<'a> Parser<'a> { let expr = self.parse_expr_res(RESTRICT_STMT_EXPR); let require_comma = - !classify::expr_is_simple_block(expr) + !classify::expr_is_simple_block(&*expr) && self.token != token::RBRACE; if require_comma { - self.commit_expr(expr, &[token::COMMA], &[token::RBRACE]); + self.commit_expr(&*expr, &[token::COMMA], &[token::RBRACE]); } else { self.eat(&token::COMMA); } @@ -2855,12 +2854,12 @@ impl<'a> Parser<'a> { } /// Parse an expression - pub fn parse_expr(&mut self) -> Gc { + pub fn parse_expr(&mut self) -> P { return self.parse_expr_res(UNRESTRICTED); } /// Parse an expression, subject to the given restriction - pub fn parse_expr_res(&mut self, r: restriction) -> Gc { + pub fn parse_expr_res(&mut self, r: restriction) -> P { let old = self.restriction; self.restriction = r; let e = self.parse_assign_expr(); @@ -2869,7 +2868,7 @@ impl<'a> Parser<'a> { } /// Parse the RHS of a local variable declaration (e.g. '= 14;') - fn parse_initializer(&mut self) -> Option> { + fn parse_initializer(&mut self) -> Option> { if self.token == token::EQ { self.bump(); Some(self.parse_expr()) @@ -2879,7 +2878,7 @@ impl<'a> Parser<'a> { } /// Parse patterns, separated by '|' s - fn parse_pats(&mut self) -> Vec> { + fn parse_pats(&mut self) -> Vec> { let mut pats = Vec::new(); loop { pats.push(self.parse_pat()); @@ -2890,7 +2889,7 @@ impl<'a> Parser<'a> { fn parse_pat_vec_elements( &mut self, - ) -> (Vec> , Option>, Vec> ) { + ) -> (Vec>, Option>, Vec>) { let mut before = Vec::new(); let mut slice = None; let mut after = Vec::new(); @@ -2910,11 +2909,11 @@ impl<'a> Parser<'a> { if self.token == token::COMMA || self.token == token::RBRACKET { - slice = Some(box(GC) ast::Pat { + slice = Some(P(ast::Pat { id: ast::DUMMY_NODE_ID, node: PatWild(PatWildMulti), span: self.span, - }); + })); before_slice = false; } else { let _ = self.parse_pat(); @@ -2989,11 +2988,11 @@ impl<'a> Parser<'a> { self.parse_pat() } else { let fieldpath = codemap::Spanned{span:self.last_span, node: fieldname}; - box(GC) ast::Pat { + P(ast::Pat { id: ast::DUMMY_NODE_ID, node: PatIdent(bind_type, fieldpath, None), span: self.last_span - } + }) }; fields.push(ast::FieldPat { ident: fieldname, pat: subpat }); } @@ -3001,7 +3000,7 @@ impl<'a> Parser<'a> { } /// Parse a pattern. - pub fn parse_pat(&mut self) -> Gc { + pub fn parse_pat(&mut self) -> P { maybe_whole!(self, NtPat); let lo = self.span.lo; @@ -3013,11 +3012,11 @@ impl<'a> Parser<'a> { self.bump(); pat = PatWild(PatWildSingle); hi = self.last_span.hi; - return box(GC) ast::Pat { + return P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: mk_sp(lo, hi) - } + }) } token::TILDE => { // parse ~pat @@ -3027,11 +3026,11 @@ impl<'a> Parser<'a> { let last_span = self.last_span; hi = last_span.hi; self.obsolete(last_span, ObsoleteOwnedPattern); - return box(GC) ast::Pat { + return P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: mk_sp(lo, hi) - } + }) } token::BINOP(token::AND) | token::ANDAND => { // parse &pat @@ -3040,11 +3039,11 @@ impl<'a> Parser<'a> { let sub = self.parse_pat(); pat = PatRegion(sub); hi = self.last_span.hi; - return box(GC) ast::Pat { + return P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: mk_sp(lo, hi) - } + }) } token::LPAREN => { // parse (pat,pat,pat,...) as tuple @@ -3052,9 +3051,9 @@ impl<'a> Parser<'a> { if self.token == token::RPAREN { hi = self.span.hi; self.bump(); - let lit = box(GC) codemap::Spanned { + let lit = P(codemap::Spanned { node: LitNil, - span: mk_sp(lo, hi)}; + span: mk_sp(lo, hi)}); let expr = self.mk_expr(lo, hi, ExprLit(lit)); pat = PatLit(expr); } else { @@ -3071,11 +3070,11 @@ impl<'a> Parser<'a> { pat = PatTup(fields); } hi = self.last_span.hi; - return box(GC) ast::Pat { + return P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: mk_sp(lo, hi) - } + }) } token::LBRACKET => { // parse [pat,pat,...] as vector pattern @@ -3086,11 +3085,11 @@ impl<'a> Parser<'a> { self.expect(&token::RBRACKET); pat = ast::PatVec(before, slice, after); hi = self.last_span.hi; - return box(GC) ast::Pat { + return P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: mk_sp(lo, hi) - } + }) } _ => {} } @@ -3135,11 +3134,11 @@ impl<'a> Parser<'a> { let sub = self.parse_pat(); pat = PatBox(sub); hi = self.last_span.hi; - return box(GC) ast::Pat { + return P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: mk_sp(lo, hi) - } + }) } else { let can_be_enum_or_struct = self.look_ahead(1, |t| { match *t { @@ -3196,7 +3195,7 @@ impl<'a> Parser<'a> { pat = PatStruct(enum_path, fields, etc); } _ => { - let mut args: Vec> = Vec::new(); + let mut args: Vec> = Vec::new(); match self.token { token::LPAREN => { let is_dotdot = self.look_ahead(1, |t| { @@ -3251,11 +3250,11 @@ impl<'a> Parser<'a> { } } hi = self.last_span.hi; - box(GC) ast::Pat { + P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, span: mk_sp(lo, hi), - } + }) } /// Parse ident or ident @ pat @@ -3295,7 +3294,7 @@ impl<'a> Parser<'a> { } /// Parse a local variable declaration - fn parse_local(&mut self) -> Gc { + fn parse_local(&mut self) -> P { let lo = self.span.lo; let pat = self.parse_pat(); @@ -3308,21 +3307,21 @@ impl<'a> Parser<'a> { ty = self.parse_ty(true); } let init = self.parse_initializer(); - box(GC) ast::Local { + P(ast::Local { ty: ty, pat: pat, init: init, id: ast::DUMMY_NODE_ID, span: mk_sp(lo, self.last_span.hi), source: LocalLet, - } + }) } /// Parse a "let" stmt - fn parse_let(&mut self) -> Gc { + fn parse_let(&mut self) -> P { let lo = self.span.lo; let local = self.parse_local(); - box(GC) spanned(lo, self.last_span.hi, DeclLocal(local)) + P(spanned(lo, self.last_span.hi, DeclLocal(local))) } /// Parse a structure field @@ -3345,7 +3344,7 @@ impl<'a> Parser<'a> { /// Parse a statement. may include decl. /// Precondition: any attributes are parsed already - pub fn parse_stmt(&mut self, item_attrs: Vec) -> Gc { + pub fn parse_stmt(&mut self, item_attrs: Vec) -> P { maybe_whole!(self, NtStmt); fn check_expected_item(p: &mut Parser, found_attrs: bool) { @@ -3361,7 +3360,7 @@ impl<'a> Parser<'a> { check_expected_item(self, !item_attrs.is_empty()); self.expect_keyword(keywords::Let); let decl = self.parse_let(); - return box(GC) spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)); + P(spanned(lo, decl.span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) } else if is_ident(&self.token) && !token::is_any_keyword(&self.token) && self.look_ahead(1, |t| *t == token::NOT) { @@ -3409,17 +3408,17 @@ impl<'a> Parser<'a> { let hi = self.span.hi; if id.name == token::special_idents::invalid.name { - return box(GC) spanned(lo, hi, StmtMac( - spanned(lo, hi, MacInvocTT(pth, tts, EMPTY_CTXT)), false)); + P(spanned(lo, hi, StmtMac( + spanned(lo, hi, MacInvocTT(pth, tts, EMPTY_CTXT)), false))) } else { // if it has a special ident, it's definitely an item - return box(GC) spanned(lo, hi, StmtDecl( - box(GC) spanned(lo, hi, DeclItem( + P(spanned(lo, hi, StmtDecl( + P(spanned(lo, hi, DeclItem( self.mk_item( lo, hi, id /*id is good here*/, ItemMac(spanned(lo, hi, MacInvocTT(pth, tts, EMPTY_CTXT))), - Inherited, Vec::new(/*no attrs*/)))), - ast::DUMMY_NODE_ID)); + Inherited, Vec::new(/*no attrs*/))))), + ast::DUMMY_NODE_ID))) } } else { @@ -3427,8 +3426,8 @@ impl<'a> Parser<'a> { match self.parse_item_or_view_item(item_attrs, false) { IoviItem(i) => { let hi = i.span.hi; - let decl = box(GC) spanned(lo, hi, DeclItem(i)); - return box(GC) spanned(lo, hi, StmtDecl(decl, ast::DUMMY_NODE_ID)); + let decl = P(spanned(lo, hi, DeclItem(i))); + P(spanned(lo, hi, StmtDecl(decl, ast::DUMMY_NODE_ID))) } IoviViewItem(vi) => { self.span_fatal(vi.span, @@ -3437,21 +3436,21 @@ impl<'a> Parser<'a> { IoviForeignItem(_) => { self.fatal("foreign items are not allowed here"); } - IoviNone(_) => { /* fallthrough */ } - } + IoviNone(_) => { + check_expected_item(self, found_attrs); - check_expected_item(self, found_attrs); - - // Remainder are line-expr stmts. - let e = self.parse_expr_res(RESTRICT_STMT_EXPR); - return box(GC) spanned(lo, e.span.hi, StmtExpr(e, ast::DUMMY_NODE_ID)); + // Remainder are line-expr stmts. + let e = self.parse_expr_res(RESTRICT_STMT_EXPR); + P(spanned(lo, e.span.hi, StmtExpr(e, ast::DUMMY_NODE_ID))) + } + } } } /// Is this expression a successfully-parsed statement? - fn expr_is_complete(&mut self, e: Gc) -> bool { - return self.restriction == RESTRICT_STMT_EXPR && - !classify::expr_requires_semi_to_be_stmt(e); + fn expr_is_complete(&mut self, e: &Expr) -> bool { + self.restriction == RESTRICT_STMT_EXPR && + !classify::expr_requires_semi_to_be_stmt(e) } /// Parse a block. No inner attrs are allowed. @@ -3500,10 +3499,10 @@ impl<'a> Parser<'a> { } = self.parse_items_and_view_items(first_item_attrs, false, false); - for item in items.iter() { - let decl = box(GC) spanned(item.span.lo, item.span.hi, DeclItem(*item)); - stmts.push(box(GC) spanned(item.span.lo, item.span.hi, - StmtDecl(decl, ast::DUMMY_NODE_ID))); + for item in items.move_iter() { + let span = item.span; + let decl = P(spanned(span.lo, span.hi, DeclItem(item))); + stmts.push(P(spanned(span.lo, span.hi, StmtDecl(decl, ast::DUMMY_NODE_ID)))); } let mut attributes_box = attrs_remaining; @@ -3527,66 +3526,75 @@ impl<'a> Parser<'a> { _ => { let stmt = self.parse_stmt(attributes_box); attributes_box = Vec::new(); - match stmt.node { + stmt.and_then(|Spanned {node, span}| match node { StmtExpr(e, stmt_id) => { // expression without semicolon - if classify::stmt_ends_with_semi(&*stmt) { + if classify::expr_requires_semi_to_be_stmt(&*e) { // Just check for errors and recover; do not eat semicolon yet. - self.commit_stmt(stmt, &[], &[token::SEMI, token::RBRACE]); + self.commit_stmt(&[], &[token::SEMI, token::RBRACE]); } match self.token { token::SEMI => { self.bump(); let span_with_semi = Span { - lo: stmt.span.lo, + lo: span.lo, hi: self.last_span.hi, - expn_info: stmt.span.expn_info, + expn_info: span.expn_info, }; - stmts.push(box(GC) codemap::Spanned { + stmts.push(P(Spanned { node: StmtSemi(e, stmt_id), span: span_with_semi, - }); + })); } token::RBRACE => { expr = Some(e); } _ => { - stmts.push(stmt); + stmts.push(P(Spanned { + node: StmtExpr(e, stmt_id), + span: span + })); } } } - StmtMac(ref m, _) => { + StmtMac(m, semi) => { // statement macro; might be an expr match self.token { token::SEMI => { + stmts.push(P(Spanned { + node: StmtMac(m, true), + span: span, + })); self.bump(); - stmts.push(box(GC) codemap::Spanned { - node: StmtMac((*m).clone(), true), - span: stmt.span, - }); } token::RBRACE => { // if a block ends in `m!(arg)` without // a `;`, it must be an expr expr = Some( - self.mk_mac_expr(stmt.span.lo, - stmt.span.hi, - m.node.clone())); + self.mk_mac_expr(span.lo, + span.hi, + m.node)); } _ => { - stmts.push(stmt); + stmts.push(P(Spanned { + node: StmtMac(m, semi), + span: span + })); } } } _ => { // all other kinds of statements: - stmts.push(stmt.clone()); - - if classify::stmt_ends_with_semi(&*stmt) { - self.commit_stmt_expecting(stmt, token::SEMI); + if classify::stmt_ends_with_semi(&node) { + self.commit_stmt_expecting(token::SEMI); } + + stmts.push(P(Spanned { + node: node, + span: span + })); } - } + }) } } } @@ -4187,15 +4195,15 @@ impl<'a> Parser<'a> { fn mk_item(&mut self, lo: BytePos, hi: BytePos, ident: Ident, node: Item_, vis: Visibility, - attrs: Vec) -> Gc { - box(GC) Item { + attrs: Vec) -> P { + P(Item { ident: ident, attrs: attrs, id: ast::DUMMY_NODE_ID, node: node, vis: vis, span: mk_sp(lo, hi) - } + }) } /// Parse an item-position function declaration. @@ -4210,7 +4218,7 @@ impl<'a> Parser<'a> { /// Parse a method in a trait impl, starting with `attrs` attributes. pub fn parse_method(&mut self, already_parsed_attrs: Option>) - -> Gc { + -> P { let next_attrs = self.parse_outer_attributes(); let attrs = match already_parsed_attrs { Some(mut a) => { a.push_all_move(next_attrs); a } @@ -4264,6 +4272,7 @@ impl<'a> Parser<'a> { }); self.parse_where_clause(&mut generics); let (inner_attrs, body) = self.parse_inner_attrs_and_block(); + let body_span = body.span; let new_attrs = attrs.append(inner_attrs.as_slice()); (ast::MethDecl(ident, generics, @@ -4273,15 +4282,15 @@ impl<'a> Parser<'a> { decl, body, visa), - body.span.hi, new_attrs) + body_span.hi, new_attrs) } }; - box(GC) ast::Method { + P(ast::Method { attrs: new_attrs, id: ast::DUMMY_NODE_ID, span: mk_sp(lo, hi), node: method_, - } + }) } /// Parse trait Foo { ... } @@ -4444,12 +4453,12 @@ impl<'a> Parser<'a> { let _ = ast::DUMMY_NODE_ID; // FIXME: Workaround for crazy bug. let new_id = ast::DUMMY_NODE_ID; (class_name, - ItemStruct(box(GC) ast::StructDef { + ItemStruct(P(ast::StructDef { fields: fields, ctor_id: if is_tuple_like { Some(new_id) } else { None }, super_struct: super_struct, is_virtual: is_virtual, - }, generics), + }), generics), None) } @@ -4524,7 +4533,7 @@ impl<'a> Parser<'a> { items: starting_items, .. } = self.parse_items_and_view_items(first_item_attrs, true, true); - let mut items: Vec> = starting_items; + let mut items: Vec> = starting_items; let attrs_remaining_len = attrs_remaining.len(); // don't think this other loop is even necessary.... @@ -4574,7 +4583,7 @@ impl<'a> Parser<'a> { let ty = self.parse_ty(true); self.expect(&token::EQ); let e = self.parse_expr(); - self.commit_expr_expecting(e, token::SEMI); + self.commit_expr_expecting(&*e, token::SEMI); (id, ItemStatic(ty, m, e), None) } @@ -4726,7 +4735,7 @@ impl<'a> Parser<'a> { /// Parse a function declaration from a foreign module fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, - attrs: Vec) -> Gc { + attrs: Vec) -> P { let lo = self.span.lo; self.expect_keyword(keywords::Fn); @@ -4735,17 +4744,19 @@ impl<'a> Parser<'a> { self.parse_where_clause(&mut generics); let hi = self.span.hi; self.expect(&token::SEMI); - box(GC) ast::ForeignItem { ident: ident, - attrs: attrs, - node: ForeignItemFn(decl, generics), - id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, hi), - vis: vis } + P(ast::ForeignItem { + ident: ident, + attrs: attrs, + node: ForeignItemFn(decl, generics), + id: ast::DUMMY_NODE_ID, + span: mk_sp(lo, hi), + vis: vis + }) } /// Parse a static item from a foreign module fn parse_item_foreign_static(&mut self, vis: ast::Visibility, - attrs: Vec ) -> Gc { + attrs: Vec) -> P { let lo = self.span.lo; self.expect_keyword(keywords::Static); @@ -4756,14 +4767,14 @@ impl<'a> Parser<'a> { let ty = self.parse_ty(true); let hi = self.span.hi; self.expect(&token::SEMI); - box(GC) ast::ForeignItem { + P(ForeignItem { ident: ident, attrs: attrs, node: ForeignItemStatic(ty, mutbl), id: ast::DUMMY_NODE_ID, span: mk_sp(lo, hi), - vis: vis, - } + vis: vis + }) } /// Parse safe/unsafe and fn @@ -4903,19 +4914,19 @@ impl<'a> Parser<'a> { /// Parse a structure-like enum variant definition /// this should probably be renamed or refactored... - fn parse_struct_def(&mut self) -> Gc { + fn parse_struct_def(&mut self) -> P { let mut fields: Vec = Vec::new(); while self.token != token::RBRACE { fields.push(self.parse_struct_decl_field()); } self.bump(); - return box(GC) ast::StructDef { + P(StructDef { fields: fields, ctor_id: None, super_struct: None, is_virtual: false, - }; + }) } /// Parse the part of an "enum" decl following the '{' @@ -5034,16 +5045,21 @@ impl<'a> Parser<'a> { attrs: Vec , macros_allowed: bool) -> ItemOrViewItem { - match self.token { - INTERPOLATED(token::NtItem(item)) => { + let nt_item = match self.token { + INTERPOLATED(token::NtItem(ref item)) => { + Some((**item).clone()) + } + _ => None + }; + match nt_item { + Some(mut item) => { self.bump(); - let new_attrs = attrs.append(item.attrs.as_slice()); - return IoviItem(box(GC) Item { - attrs: new_attrs, - ..(*item).clone() - }); + let mut attrs = attrs; + mem::swap(&mut item.attrs, &mut attrs); + item.attrs.extend(attrs.move_iter()); + return IoviItem(P(item)); } - _ => {} + None => {} } let lo = self.span.lo; @@ -5328,12 +5344,12 @@ impl<'a> Parser<'a> { return IoviNone(attrs); } - pub fn parse_item_with_outer_attributes(&mut self) -> Option> { + pub fn parse_item_with_outer_attributes(&mut self) -> Option> { let attrs = self.parse_outer_attributes(); self.parse_item(attrs) } - pub fn parse_item(&mut self, attrs: Vec ) -> Option> { + pub fn parse_item(&mut self, attrs: Vec) -> Option> { match self.parse_item_or_view_item(attrs, true) { IoviNone(_) => None, IoviViewItem(_) => @@ -5355,7 +5371,7 @@ impl<'a> Parser<'a> { /// | MOD? non_global_path MOD_SEP LBRACE ident_seq RBRACE /// | MOD? non_global_path MOD_SEP STAR /// | MOD? non_global_path - fn parse_view_path(&mut self) -> Gc { + fn parse_view_path(&mut self) -> P { let lo = self.span.lo; if self.token == token::LBRACE { @@ -5369,8 +5385,8 @@ impl<'a> Parser<'a> { global: false, segments: Vec::new() }; - return box(GC) spanned(lo, self.span.hi, - ViewPathList(path, idents, ast::DUMMY_NODE_ID)); + return P(spanned(lo, self.span.hi, + ViewPathList(path, idents, ast::DUMMY_NODE_ID))); } let first_ident = self.parse_ident(); @@ -5399,9 +5415,9 @@ impl<'a> Parser<'a> { } }).collect() }; - return box(GC) spanned(lo, self.span.hi, - ViewPathSimple(first_ident, path, - ast::DUMMY_NODE_ID)); + return P(spanned(lo, self.span.hi, + ViewPathSimple(first_ident, path, + ast::DUMMY_NODE_ID))); } token::MOD_SEP => { @@ -5434,8 +5450,8 @@ impl<'a> Parser<'a> { } }).collect() }; - return box(GC) spanned(lo, self.span.hi, - ViewPathList(path, idents, ast::DUMMY_NODE_ID)); + return P(spanned(lo, self.span.hi, + ViewPathList(path, idents, ast::DUMMY_NODE_ID))); } // foo::bar::* @@ -5452,8 +5468,8 @@ impl<'a> Parser<'a> { } }).collect() }; - return box(GC) spanned(lo, self.span.hi, - ViewPathGlob(path, ast::DUMMY_NODE_ID)); + return P(spanned(lo, self.span.hi, + ViewPathGlob(path, ast::DUMMY_NODE_ID))); } _ => break @@ -5477,9 +5493,8 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::As) { rename_to = self.parse_ident() } - return box(GC) spanned(lo, - self.last_span.hi, - ViewPathSimple(rename_to, path, ast::DUMMY_NODE_ID)); + P(spanned(lo, self.last_span.hi, + ViewPathSimple(rename_to, path, ast::DUMMY_NODE_ID))) } /// Parses a sequence of items. Stops when it finds program diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index cce14be1ba526..f113e0e6cff75 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -9,15 +9,15 @@ // except according to those terms. use ast; -use ast::{P, Ident, Name, Mrk}; +use ast::{Ident, Name, Mrk}; use ext::mtwt; use parse::token; +use ptr::P; use util::interner::{RcStr, StrInterner}; use util::interner; use serialize::{Decodable, Decoder, Encodable, Encoder}; use std::fmt; -use std::gc::Gc; use std::mem; use std::path::BytesContainer; use std::rc::Rc; @@ -115,19 +115,19 @@ pub enum Token { #[deriving(Clone, Encodable, Decodable, PartialEq, Eq, Hash)] /// For interpolation during macro expansion. pub enum Nonterminal { - NtItem(Gc), + NtItem( P), NtBlock(P), - NtStmt(Gc), - NtPat( Gc), - NtExpr(Gc), - NtTy( P), + NtStmt( P), + NtPat( P), + NtExpr( P), + NtTy( P), /// See IDENT, above, for meaning of bool in NtIdent: NtIdent(Box, bool), /// Stuff inside brackets for attributes - NtMeta(Gc), + NtMeta( P), NtPath(Box), - NtTT( Gc), // needs Gc'd to break a circularity - NtMatchers(Vec ) + NtTT( P), // needs P'ed to break a circularity + NtMatchers(Vec) } impl fmt::Show for Nonterminal { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index a4dff45ad359f..d0df95d711ee4 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -10,7 +10,7 @@ use abi; use ast::{FnMutUnboxedClosureKind, FnOnceUnboxedClosureKind}; -use ast::{FnUnboxedClosureKind, MethodImplItem, P}; +use ast::{FnUnboxedClosureKind, MethodImplItem}; use ast::{RegionTyParamBound, TraitTyParamBound, UnboxedClosureKind}; use ast::{UnboxedFnTyParamBound, RequiredMethod, ProvidedMethod}; use ast; @@ -26,8 +26,8 @@ use parse; use print::pp::{break_offset, word, space, zerobreak, hardbreak}; use print::pp::{Breaks, Consistent, Inconsistent, eof}; use print::pp; +use ptr::P; -use std::gc::Gc; use std::io::{IoResult, MemWriter}; use std::io; use std::mem; @@ -246,7 +246,7 @@ pub fn ident_to_string(id: &ast::Ident) -> String { } pub fn fun_to_string(decl: &ast::FnDecl, fn_style: ast::FnStyle, name: ast::Ident, - opt_explicit_self: Option, + opt_explicit_self: Option<&ast::ExplicitSelf_>, generics: &ast::Generics) -> String { $to_string(|s| { try!(s.print_fn(decl, Some(fn_style), abi::Rust, @@ -278,7 +278,7 @@ pub fn lit_to_string(l: &ast::Lit) -> String { $to_string(|s| s.print_literal(l)) } -pub fn explicit_self_to_string(explicit_self: ast::ExplicitSelf_) -> String { +pub fn explicit_self_to_string(explicit_self: &ast::ExplicitSelf_) -> String { $to_string(|s| s.print_explicit_self(explicit_self, ast::MutImmutable).map(|_| {})) } @@ -502,7 +502,7 @@ impl<'a> State<'a> { } pub fn commasep_exprs(&mut self, b: Breaks, - exprs: &[Gc]) -> IoResult<()> { + exprs: &[P]) -> IoResult<()> { self.commasep_cmnt(b, exprs, |s, e| s.print_expr(&**e), |e| e.span) } @@ -574,7 +574,7 @@ impl<'a> State<'a> { ast::TyTup(ref elts) => { try!(self.popen()); try!(self.commasep(Inconsistent, elts.as_slice(), - |s, ty| s.print_type_ref(ty))); + |s, ty| s.print_type(&**ty))); if elts.len() == 1 { try!(word(&mut self.s, ",")); } @@ -585,7 +585,7 @@ impl<'a> State<'a> { try!(self.print_type(&**typ)); try!(self.pclose()); } - ast::TyBareFn(f) => { + ast::TyBareFn(ref f) => { let generics = ast::Generics { lifetimes: f.lifetimes.clone(), ty_params: OwnedSlice::empty(), @@ -605,7 +605,7 @@ impl<'a> State<'a> { None, None)); } - ast::TyClosure(f) => { + ast::TyClosure(ref f) => { let generics = ast::Generics { lifetimes: f.lifetimes.clone(), ty_params: OwnedSlice::empty(), @@ -645,7 +645,7 @@ impl<'a> State<'a> { None, None)); } - ast::TyUnboxedFn(f) => { + ast::TyUnboxedFn(ref f) => { try!(self.print_ty_fn(None, None, ast::NormalFn, @@ -679,10 +679,6 @@ impl<'a> State<'a> { self.end() } - pub fn print_type_ref(&mut self, ty: &P) -> IoResult<()> { - self.print_type(&**ty) - } - pub fn print_foreign_item(&mut self, item: &ast::ForeignItem) -> IoResult<()> { try!(self.hardbreak_if_not_bol()); @@ -794,10 +790,8 @@ impl<'a> State<'a> { if struct_def.is_virtual { try!(self.word_space("virtual")); } - try!(self.head(visibility_qualified(item.vis, - "struct").as_slice())); - try!(self.print_struct(&**struct_def, generics, item.ident, - item.span)); + try!(self.head(visibility_qualified(item.vis,"struct").as_slice())); + try!(self.print_struct(&**struct_def, generics, item.ident, item.span)); } ast::ItemImpl(ref generics, @@ -828,8 +822,8 @@ impl<'a> State<'a> { try!(self.print_inner_attributes(item.attrs.as_slice())); for impl_item in impl_items.iter() { match *impl_item { - ast::MethodImplItem(meth) => { - try!(self.print_method(&*meth)); + ast::MethodImplItem(ref meth) => { + try!(self.print_method(&**meth)); } } } @@ -1068,7 +1062,7 @@ impl<'a> State<'a> { Some(m.ident), &OwnedSlice::empty(), Some(&m.generics), - Some(m.explicit_self.node), + Some(&m.explicit_self.node), None)); word(&mut self.s, ";") } @@ -1097,18 +1091,18 @@ impl<'a> State<'a> { abi, ref explicit_self, fn_style, - decl, - body, + ref decl, + ref body, vis) => { - try!(self.print_fn(&*decl, + try!(self.print_fn(&**decl, Some(fn_style), abi, ident, generics, - Some(explicit_self.node), + Some(&explicit_self.node), vis)); try!(word(&mut self.s, " ")); - self.print_block_with_attrs(&*body, meth.attrs.as_slice()) + self.print_block_with_attrs(&**body, meth.attrs.as_slice()) }, ast::MethMac(codemap::Spanned { node: ast::MacInvocTT(ref pth, ref tts, _), ..}) => { @@ -1199,7 +1193,7 @@ impl<'a> State<'a> { } } } - if parse::classify::stmt_ends_with_semi(st) { + if parse::classify::stmt_ends_with_semi(&st.node) { try!(word(&mut self.s, ";")); } self.maybe_print_trailing_comment(st.span, None) @@ -1257,19 +1251,19 @@ impl<'a> State<'a> { self.ann.post(self, NodeBlock(blk)) } - fn print_else(&mut self, els: Option>) -> IoResult<()> { + fn print_else(&mut self, els: Option<&ast::Expr>) -> IoResult<()> { match els { Some(_else) => { match _else.node { // "another else-if" - ast::ExprIf(ref i, ref t, e) => { + ast::ExprIf(ref i, ref then, ref e) => { try!(self.cbox(indent_unit - 1u)); try!(self.ibox(0u)); try!(word(&mut self.s, " else if ")); try!(self.print_expr(&**i)); try!(space(&mut self.s)); - try!(self.print_block(&**t)); - self.print_else(e) + try!(self.print_block(&**then)); + self.print_else(e.as_ref().map(|e| &**e)) } // "final else" ast::ExprBlock(ref b) => { @@ -1289,7 +1283,7 @@ impl<'a> State<'a> { } pub fn print_if(&mut self, test: &ast::Expr, blk: &ast::Block, - elseopt: Option>, chk: bool) -> IoResult<()> { + elseopt: Option<&ast::Expr>, chk: bool) -> IoResult<()> { try!(self.head("if")); if chk { try!(self.word_nbsp("check")); } try!(self.print_expr(test)); @@ -1312,7 +1306,7 @@ impl<'a> State<'a> { } - fn print_call_post(&mut self, args: &[Gc]) -> IoResult<()> { + fn print_call_post(&mut self, args: &[P]) -> IoResult<()> { try!(self.popen()); try!(self.commasep_exprs(Inconsistent, args)); self.pclose() @@ -1361,7 +1355,7 @@ impl<'a> State<'a> { try!(self.end()); } - ast::ExprStruct(ref path, ref fields, wth) => { + ast::ExprStruct(ref path, ref fields, ref wth) => { try!(self.print_path(path, true)); try!(word(&mut self.s, "{")); try!(self.commasep_cmnt( @@ -1375,7 +1369,7 @@ impl<'a> State<'a> { s.end() }, |f| f.span)); - match wth { + match *wth { Some(ref expr) => { try!(self.ibox(indent_unit)); if !fields.is_empty() { @@ -1410,7 +1404,7 @@ impl<'a> State<'a> { if tys.len() > 0u { try!(word(&mut self.s, "::<")); try!(self.commasep(Inconsistent, tys.as_slice(), - |s, ty| s.print_type_ref(ty))); + |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ">")); } try!(self.print_call_post(base_args)); @@ -1437,8 +1431,8 @@ impl<'a> State<'a> { try!(self.word_space("as")); try!(self.print_type(&**ty)); } - ast::ExprIf(ref test, ref blk, elseopt) => { - try!(self.print_if(&**test, &**blk, elseopt, false)); + ast::ExprIf(ref test, ref blk, ref elseopt) => { + try!(self.print_if(&**test, &**blk, elseopt.as_ref().map(|e| &**e), false)); } ast::ExprWhile(ref test, ref blk, opt_ident) => { for ident in opt_ident.iter() { @@ -1500,13 +1494,13 @@ impl<'a> State<'a> { try!(self.print_block_unclosed(&**body)); } else { // we extract the block, so as not to create another set of boxes - match body.expr.unwrap().node { - ast::ExprBlock(blk) => { - try!(self.print_block_unclosed(&*blk)); + match body.expr.as_ref().unwrap().node { + ast::ExprBlock(ref blk) => { + try!(self.print_block_unclosed(&**blk)); } _ => { // this is a bare expression - try!(self.print_expr(&*body.expr.unwrap())); + try!(self.print_expr(&**body.expr.as_ref().unwrap())); try!(self.end()); // need to close a box } } @@ -1532,13 +1526,13 @@ impl<'a> State<'a> { try!(self.print_block_unclosed(&**body)); } else { // we extract the block, so as not to create another set of boxes - match body.expr.unwrap().node { + match body.expr.as_ref().unwrap().node { ast::ExprBlock(ref blk) => { try!(self.print_block_unclosed(&**blk)); } _ => { // this is a bare expression - try!(self.print_expr(&*body.expr.unwrap())); + try!(self.print_expr(body.expr.as_ref().map(|e| &**e).unwrap())); try!(self.end()); // need to close a box } } @@ -1560,13 +1554,13 @@ impl<'a> State<'a> { assert!(body.stmts.is_empty()); assert!(body.expr.is_some()); // we extract the block, so as not to create another set of boxes - match body.expr.unwrap().node { + match body.expr.as_ref().unwrap().node { ast::ExprBlock(ref blk) => { try!(self.print_block_unclosed(&**blk)); } _ => { // this is a bare expression - try!(self.print_expr(&*body.expr.unwrap())); + try!(self.print_expr(body.expr.as_ref().map(|e| &**e).unwrap())); try!(self.end()); // need to close a box } } @@ -1603,7 +1597,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "::<")); try!(self.commasep( Inconsistent, tys.as_slice(), - |s, ty| s.print_type_ref(ty))); + |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ">")); } } @@ -1615,7 +1609,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "::<")); try!(self.commasep( Inconsistent, tys.as_slice(), - |s, ty| s.print_type_ref(ty))); + |s, ty| s.print_type(&**ty))); try!(word(&mut self.s, ">")); } } @@ -1809,7 +1803,7 @@ impl<'a> State<'a> { try!(self.commasep( Inconsistent, segment.types.as_slice(), - |s, ty| s.print_type_ref(ty))); + |s, ty| s.print_type(&**ty))); } try!(word(&mut self.s, ">")) @@ -1841,7 +1835,7 @@ impl<'a> State<'a> { match pat.node { ast::PatWild(ast::PatWildSingle) => try!(word(&mut self.s, "_")), ast::PatWild(ast::PatWildMulti) => try!(word(&mut self.s, "..")), - ast::PatIdent(binding_mode, ref path1, sub) => { + ast::PatIdent(binding_mode, ref path1, ref sub) => { match binding_mode { ast::BindByRef(mutbl) => { try!(self.word_nbsp("ref")); @@ -1853,7 +1847,7 @@ impl<'a> State<'a> { } } try!(self.print_ident(path1.node)); - match sub { + match *sub { Some(ref p) => { try!(word(&mut self.s, "@")); try!(self.print_pat(&**p)); @@ -1921,7 +1915,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "..")); try!(self.print_expr(&**end)); } - ast::PatVec(ref before, slice, ref after) => { + ast::PatVec(ref before, ref slice, ref after) => { try!(word(&mut self.s, "[")); try!(self.commasep(Inconsistent, before.as_slice(), @@ -1994,10 +1988,10 @@ impl<'a> State<'a> { // Returns whether it printed anything fn print_explicit_self(&mut self, - explicit_self: ast::ExplicitSelf_, + explicit_self: &ast::ExplicitSelf_, mutbl: ast::Mutability) -> IoResult { try!(self.print_mutability(mutbl)); - match explicit_self { + match *explicit_self { ast::SelfStatic => { return Ok(false); } ast::SelfValue(_) => { try!(word(&mut self.s, "self")); @@ -2023,7 +2017,7 @@ impl<'a> State<'a> { abi: abi::Abi, name: ast::Ident, generics: &ast::Generics, - opt_explicit_self: Option, + opt_explicit_self: Option<&ast::ExplicitSelf_>, vis: ast::Visibility) -> IoResult<()> { try!(self.head("")); try!(self.print_fn_header_info(opt_explicit_self, fn_style, abi, vis)); @@ -2035,7 +2029,7 @@ impl<'a> State<'a> { } pub fn print_fn_args(&mut self, decl: &ast::FnDecl, - opt_explicit_self: Option) + opt_explicit_self: Option<&ast::ExplicitSelf_>) -> IoResult<()> { // It is unfortunate to duplicate the commasep logic, but we want the // self type and the args all in the same box. @@ -2043,7 +2037,7 @@ impl<'a> State<'a> { let mut first = true; for &explicit_self in opt_explicit_self.iter() { let m = match explicit_self { - ast::SelfStatic => ast::MutImmutable, + &ast::SelfStatic => ast::MutImmutable, _ => match decl.inputs.get(0).pat.node { ast::PatIdent(ast::BindByValue(m), _, _) => m, _ => ast::MutImmutable @@ -2068,7 +2062,7 @@ impl<'a> State<'a> { } pub fn print_fn_args_and_ret(&mut self, decl: &ast::FnDecl, - opt_explicit_self: Option) + opt_explicit_self: Option<&ast::ExplicitSelf_>) -> IoResult<()> { try!(self.popen()); try!(self.print_fn_args(decl, opt_explicit_self)); @@ -2413,7 +2407,7 @@ impl<'a> State<'a> { id: Option, bounds: &OwnedSlice, generics: Option<&ast::Generics>, - opt_explicit_self: Option, + opt_explicit_self: Option<&ast::ExplicitSelf_>, opt_unboxed_closure_kind: Option) -> IoResult<()> { @@ -2754,7 +2748,7 @@ impl<'a> State<'a> { } pub fn print_fn_header_info(&mut self, - _opt_explicit_self: Option, + _opt_explicit_self: Option<&ast::ExplicitSelf_>, opt_fn_style: Option, abi: abi::Abi, vis: ast::Visibility) -> IoResult<()> { @@ -2792,6 +2786,7 @@ mod test { use ast_util; use codemap; use parse::token; + use ptr::P; #[test] fn test_fun_to_string() { @@ -2799,9 +2794,9 @@ mod test { let decl = ast::FnDecl { inputs: Vec::new(), - output: ast::P(ast::Ty {id: 0, - node: ast::TyNil, - span: codemap::DUMMY_SP}), + output: P(ast::Ty {id: 0, + node: ast::TyNil, + span: codemap::DUMMY_SP}), cf: ast::Return, variadic: false }; diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs new file mode 100644 index 0000000000000..bd560abf3bda7 --- /dev/null +++ b/src/libsyntax/ptr.rs @@ -0,0 +1,116 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! The AST pointer +//! +//! Provides `P`, a frozen owned smart pointer, as a replacement for `@T` in the AST. +//! +//! # Motivations and benefits +//! +//! * **Identity**: sharing AST nodes is problematic for the various analysis passes +//! (e.g. one may be able to bypass the borrow checker with a shared `ExprAddrOf` +//! node taking a mutable borrow). The only reason `@T` in the AST hasn't caused +//! issues is because of inefficient folding passes which would always deduplicate +//! any such shared nodes. Even if the AST were to switch to an arena, this would +//! still hold, i.e. it couldn't use `&'a T`, but rather a wrapper like `P<'a, T>`. +//! +//! * **Immutability**: `P` disallows mutating its inner `T`, unlike `Box` +//! (unless it contains an `Unsafe` interior, but that may be denied later). +//! This mainly prevents mistakes, but can also enforces a kind of "purity". +//! +//! * **Efficiency**: folding can reuse allocation space for `P` and `Vec`, +//! the latter even when the input and output types differ (as it would be the +//! case with arenas or a GADT AST using type parameters to toggle features). +//! +//! * **Maintainability**: `P` provides a fixed interface - `Deref`, +//! `and_then` and `map` - which can remain fully functional even if the +//! implementation changes (using a special thread-local heap, for example). +//! Moreover, a switch to, e.g. `P<'a, T>` would be easy and mostly automated. + +use std::fmt; +use std::fmt::Show; +use std::hash::Hash; +use serialize::{Encodable, Decodable, Encoder, Decoder}; + +/// An owned smart pointer. +pub struct P { + ptr: Box +} + +#[allow(non_snake_case)] +/// Construct a `P` from a `T` value. +pub fn P(value: T) -> P { + P { + ptr: box value + } +} + +impl P { + /// Move out of the pointer. + /// Intended for chaining transformations not covered by `map`. + pub fn and_then(self, f: |T| -> U) -> U { + f(*self.ptr) + } + + /// Transform the inner value, consuming `self` and producing a new `P`. + pub fn map(mut self, f: |T| -> T) -> P { + use std::{mem, ptr}; + unsafe { + let p = &mut *self.ptr; + // FIXME(#5016) this shouldn't need to zero to be safe. + mem::move_val_init(p, f(ptr::read_and_zero(p))); + } + self + } +} + +impl Deref for P { + fn deref<'a>(&'a self) -> &'a T { + &*self.ptr + } +} + +impl Clone for P { + fn clone(&self) -> P { + P((**self).clone()) + } +} + +impl PartialEq for P { + fn eq(&self, other: &P) -> bool { + **self == **other + } +} + +impl Eq for P {} + +impl Show for P { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + (**self).fmt(f) + } +} + +impl> Hash for P { + fn hash(&self, state: &mut S) { + (**self).hash(state); + } +} + +impl, T: 'static + Decodable> Decodable for P { + fn decode(d: &mut D) -> Result, E> { + Decodable::decode(d).map(P) + } +} + +impl, T: Encodable> Encodable for P { + fn encode(&self, s: &mut S) -> Result<(), E> { + (**self).encode(s) + } +} diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 7b96cf3c60dff..d0faa3c682064 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -14,8 +14,7 @@ use parse::{ParseSess,string_to_filemap,filemap_to_tts}; use parse::{new_parser_from_source_str}; use parse::parser::Parser; use parse::token; - -use std::gc::Gc; +use ptr::P; /// Map a string to tts, using a made-up filename: pub fn string_to_tts(source_str: String) -> Vec { @@ -48,21 +47,21 @@ pub fn string_to_crate (source_str : String) -> ast::Crate { } /// Parse a string, return an expr -pub fn string_to_expr (source_str : String) -> Gc { +pub fn string_to_expr (source_str : String) -> P { with_error_checking_parse(source_str, |p| { p.parse_expr() }) } /// Parse a string, return an item -pub fn string_to_item (source_str : String) -> Option> { +pub fn string_to_item (source_str : String) -> Option> { with_error_checking_parse(source_str, |p| { p.parse_item(Vec::new()) }) } /// Parse a string, return a stmt -pub fn string_to_stmt(source_str : String) -> Gc { +pub fn string_to_stmt(source_str : String) -> P { with_error_checking_parse(source_str, |p| { p.parse_stmt(Vec::new()) }) @@ -70,7 +69,7 @@ pub fn string_to_stmt(source_str : String) -> Gc { /// Parse a string, return a pat. Uses "irrefutable"... which doesn't /// (currently) affect parsing. -pub fn string_to_pat(source_str: String) -> Gc { +pub fn string_to_pat(source_str: String) -> P { string_to_parser(&new_parse_sess(), source_str).parse_pat() } diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index 517c5e5bf47da..47aef987a63d0 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -12,6 +12,8 @@ use std::mem; use std::slice; use std::vec; +use fold::MoveMap; + /// A vector type optimized for cases where the size is almost always 0 or 1 pub struct SmallVector { repr: SmallVectorRepr, @@ -20,7 +22,7 @@ pub struct SmallVector { enum SmallVectorRepr { Zero, One(T), - Many(Vec ), + Many(Vec), } impl Collection for SmallVector { @@ -160,6 +162,17 @@ impl Iterator for MoveItems { } } +impl MoveMap for SmallVector { + fn move_map(self, f: |T| -> T) -> SmallVector { + let repr = match self.repr { + Zero => Zero, + One(v) => One(f(v)), + Many(vs) => Many(vs.move_map(f)) + }; + SmallVector { repr: repr } + } +} + #[cfg(test)] mod test { use super::*; diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 2a989e6d63a23..30a38e28729f0 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -27,10 +27,9 @@ use abi::Abi; use ast::*; use ast; use codemap::Span; +use ptr::P; use owned_slice::OwnedSlice; -use std::gc::Gc; - pub enum FnKind<'a> { /// fn foo() or extern "Abi" fn foo() FkItemFn(Ident, &'a Generics, FnStyle, Abi), @@ -121,16 +120,8 @@ pub fn walk_inlined_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v InlinedI match *item { IIItem(ref i) => visitor.visit_item(&**i), IIForeign(ref i) => visitor.visit_foreign_item(&**i), - IITraitItem(_, ref iti) => { - match *iti { - ProvidedInlinedTraitItem(ref m) => { - walk_method_helper(visitor, &**m) - } - RequiredInlinedTraitItem(ref m) => { - walk_method_helper(visitor, &**m) - } - } - } + IITraitItem(_, ref ti) => visitor.visit_trait_item(ti), + IIImplItem(_, MethodImplItem(ref m)) => walk_method_helper(visitor, &**m) } } @@ -644,14 +635,14 @@ pub fn walk_decl<'v, V: Visitor<'v>>(visitor: &mut V, declaration: &'v Decl) { } pub fn walk_expr_opt<'v, V: Visitor<'v>>(visitor: &mut V, - optional_expression: &'v Option>) { + optional_expression: &'v Option>) { match *optional_expression { None => {} Some(ref expression) => visitor.visit_expr(&**expression), } } -pub fn walk_exprs<'v, V: Visitor<'v>>(visitor: &mut V, expressions: &'v [Gc]) { +pub fn walk_exprs<'v, V: Visitor<'v>>(visitor: &mut V, expressions: &'v [P]) { for expression in expressions.iter() { visitor.visit_expr(&**expression) } diff --git a/src/test/auxiliary/macro_crate_test.rs b/src/test/auxiliary/macro_crate_test.rs index fbbee2e625a8c..dd1f9c3404f02 100644 --- a/src/test/auxiliary/macro_crate_test.rs +++ b/src/test/auxiliary/macro_crate_test.rs @@ -20,10 +20,9 @@ use syntax::codemap::Span; use syntax::ext::base::*; use syntax::parse::token; use syntax::parse; +use syntax::ptr::P; use rustc::plugin::Registry; -use std::gc::{Gc, GC}; - #[macro_export] macro_rules! exported_macro (() => (2i)) @@ -57,12 +56,12 @@ fn expand_identity(cx: &mut ExtCtxt, _span: Span, tts: &[TokenTree]) MacExpr::new(quote_expr!(&mut *cx, $expr)) } -fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: Gc, it: Gc) - -> Gc { - box(GC) Item { +fn expand_into_foo(cx: &mut ExtCtxt, sp: Span, attr: &MetaItem, it: P) + -> P { + P(Item { attrs: it.attrs.clone(), ..(*quote_item!(cx, enum Foo { Bar, Baz }).unwrap()).clone() - } + }) } fn expand_forged_ident(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { diff --git a/src/test/run-pass-fulldeps/quote-tokens.rs b/src/test/run-pass-fulldeps/quote-tokens.rs index 60b8f09bb3ded..b7c4c14638226 100644 --- a/src/test/run-pass-fulldeps/quote-tokens.rs +++ b/src/test/run-pass-fulldeps/quote-tokens.rs @@ -16,24 +16,24 @@ extern crate syntax; use syntax::ext::base::ExtCtxt; -use std::gc::Gc; +use syntax::ptr::P; fn syntax_extension(cx: &ExtCtxt) { let e_toks : Vec = quote_tokens!(cx, 1 + 2); let p_toks : Vec = quote_tokens!(cx, (x, 1 .. 4, *)); - let a: Gc = quote_expr!(cx, 1 + 2); - let _b: Option> = quote_item!(cx, static foo : int = $e_toks; ); - let _c: Gc = quote_pat!(cx, (x, 1 .. 4, *) ); - let _d: Gc = quote_stmt!(cx, let x = $a; ); + let a: P = quote_expr!(cx, 1 + 2); + let _b: Option> = quote_item!(cx, static foo : int = $e_toks; ); + let _c: P = quote_pat!(cx, (x, 1 .. 4, *) ); + let _d: P = quote_stmt!(cx, let x = $a; ); let _d: syntax::ast::Arm = quote_arm!(cx, (ref x, ref y) = (x, y) ); - let _e: Gc = quote_expr!(cx, match foo { $p_toks => 10 } ); + let _e: P = quote_expr!(cx, match foo { $p_toks => 10 } ); - let _f: Gc = quote_expr!(cx, ()); - let _g: Gc = quote_expr!(cx, true); - let _h: Gc = quote_expr!(cx, 'a'); + let _f: P = quote_expr!(cx, ()); + let _g: P = quote_expr!(cx, true); + let _h: P = quote_expr!(cx, 'a'); - let i: Option> = quote_item!(cx, #[deriving(Eq)] struct Foo; ); + let i: Option> = quote_item!(cx, #[deriving(Eq)] struct Foo; ); assert!(i.is_some()); }