diff --git a/src/etc/x86.supp b/src/etc/x86.supp index 722f7c4aee699..16a3144c01717 100644 --- a/src/etc/x86.supp +++ b/src/etc/x86.supp @@ -415,6 +415,13 @@ ... } +{ + enum-instruction-scheduling-8 + Memcheck:Cond + fun:*should_set_output_format_to_markdown_if_requested* + ... +} + { llvm-user-new-leak Memcheck:Leak diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index 2e8e4a6d51e0e..bd35bf50cefc0 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -165,49 +165,49 @@ pub struct Session_ { pub type Session = @Session_; pub impl Session { - fn span_fatal(sp: span, msg: ~str) -> ! { + fn span_fatal(&self, sp: span, msg: ~str) -> ! { self.span_diagnostic.span_fatal(sp, msg) } - fn fatal(msg: ~str) -> ! { + fn fatal(&self, msg: ~str) -> ! { self.span_diagnostic.handler().fatal(msg) } - fn span_err(sp: span, msg: ~str) { + fn span_err(&self, sp: span, msg: ~str) { self.span_diagnostic.span_err(sp, msg) } - fn err(msg: ~str) { + fn err(&self, msg: ~str) { self.span_diagnostic.handler().err(msg) } - fn has_errors() -> bool { + fn has_errors(&self) -> bool { self.span_diagnostic.handler().has_errors() } - fn abort_if_errors() { + fn abort_if_errors(&self) { self.span_diagnostic.handler().abort_if_errors() } - fn span_warn(sp: span, msg: ~str) { + fn span_warn(&self, sp: span, msg: ~str) { self.span_diagnostic.span_warn(sp, msg) } - fn warn(msg: ~str) { + fn warn(&self, msg: ~str) { self.span_diagnostic.handler().warn(msg) } - fn span_note(sp: span, msg: ~str) { + fn span_note(&self, sp: span, msg: ~str) { self.span_diagnostic.span_note(sp, msg) } - fn note(msg: ~str) { + fn note(&self, msg: ~str) { self.span_diagnostic.handler().note(msg) } - fn span_bug(sp: span, msg: ~str) -> ! { + fn span_bug(&self, sp: span, msg: ~str) -> ! { self.span_diagnostic.span_bug(sp, msg) } - fn bug(msg: ~str) -> ! { + fn bug(&self, msg: ~str) -> ! { self.span_diagnostic.handler().bug(msg) } - fn span_unimpl(sp: span, msg: ~str) -> ! { + fn span_unimpl(&self, sp: span, msg: ~str) -> ! { self.span_diagnostic.span_unimpl(sp, msg) } - fn unimpl(msg: ~str) -> ! { + fn unimpl(&self, msg: ~str) -> ! { self.span_diagnostic.handler().unimpl(msg) } - fn span_lint_level(level: lint::level, sp: span, +msg: ~str) { + fn span_lint_level(&self, level: lint::level, sp: span, +msg: ~str) { match level { lint::allow => { }, lint::warn => self.span_warn(sp, msg), @@ -216,7 +216,7 @@ pub impl Session { } } } - fn span_lint(lint_mode: lint::lint, + fn span_lint(&self, lint_mode: lint::lint, expr_id: ast::node_id, item_id: ast::node_id, span: span, @@ -225,45 +225,55 @@ pub impl Session { self.lint_settings, lint_mode, expr_id, item_id); self.span_lint_level(level, span, msg); } - fn next_node_id() -> ast::node_id { + fn next_node_id(&self) -> ast::node_id { return syntax::parse::next_node_id(self.parse_sess); } - fn diagnostic() -> diagnostic::span_handler { + fn diagnostic(&self) -> diagnostic::span_handler { self.span_diagnostic } - fn debugging_opt(opt: uint) -> bool { + fn debugging_opt(&self, opt: uint) -> bool { (self.opts.debugging_opts & opt) != 0u } // This exists to help with refactoring to eliminate impossible // cases later on - fn impossible_case(sp: span, msg: &str) -> ! { + fn impossible_case(&self, sp: span, msg: &str) -> ! { self.span_bug(sp, fmt!("Impossible case reached: %s", msg)); } - fn verbose() -> bool { self.debugging_opt(verbose) } - fn time_passes() -> bool { self.debugging_opt(time_passes) } - fn count_llvm_insns() -> bool { self.debugging_opt(count_llvm_insns) } - fn count_type_sizes() -> bool { self.debugging_opt(count_type_sizes) } - fn time_llvm_passes() -> bool { self.debugging_opt(time_llvm_passes) } - fn trans_stats() -> bool { self.debugging_opt(trans_stats) } - fn meta_stats() -> bool { self.debugging_opt(meta_stats) } - fn no_asm_comments() -> bool { self.debugging_opt(no_asm_comments) } - fn no_verify() -> bool { self.debugging_opt(no_verify) } - fn trace() -> bool { self.debugging_opt(trace) } - fn coherence() -> bool { self.debugging_opt(coherence) } - fn borrowck_stats() -> bool { self.debugging_opt(borrowck_stats) } - fn borrowck_note_pure() -> bool { self.debugging_opt(borrowck_note_pure) } - fn borrowck_note_loan() -> bool { self.debugging_opt(borrowck_note_loan) } - fn no_monomorphic_collapse() -> bool { + fn verbose(&self) -> bool { self.debugging_opt(verbose) } + fn time_passes(&self) -> bool { self.debugging_opt(time_passes) } + fn count_llvm_insns(&self) -> bool { + self.debugging_opt(count_llvm_insns) + } + fn count_type_sizes(&self) -> bool { + self.debugging_opt(count_type_sizes) + } + fn time_llvm_passes(&self) -> bool { + self.debugging_opt(time_llvm_passes) + } + fn trans_stats(&self) -> bool { self.debugging_opt(trans_stats) } + fn meta_stats(&self) -> bool { self.debugging_opt(meta_stats) } + fn no_asm_comments(&self) -> bool { self.debugging_opt(no_asm_comments) } + fn no_verify(&self) -> bool { self.debugging_opt(no_verify) } + fn trace(&self) -> bool { self.debugging_opt(trace) } + fn coherence(&self) -> bool { self.debugging_opt(coherence) } + fn borrowck_stats(&self) -> bool { self.debugging_opt(borrowck_stats) } + fn borrowck_note_pure(&self) -> bool { + self.debugging_opt(borrowck_note_pure) + } + fn borrowck_note_loan(&self) -> bool { + self.debugging_opt(borrowck_note_loan) + } + fn no_monomorphic_collapse(&self) -> bool { self.debugging_opt(no_monomorphic_collapse) } - fn str_of(id: ast::ident) -> @~str { + fn str_of(&self, id: ast::ident) -> @~str { self.parse_sess.interner.get(id) } - fn ident_of(+st: ~str) -> ast::ident { + fn ident_of(&self, +st: ~str) -> ast::ident { self.parse_sess.interner.intern(@st) } - fn intr() -> @syntax::parse::token::ident_interner { + fn intr(&self) -> @syntax::parse::token::ident_interner { self.parse_sess.interner } } diff --git a/src/librustc/metadata/filesearch.rs b/src/librustc/metadata/filesearch.rs index 2175d0f074c60..82ea0b6d6f186 100644 --- a/src/librustc/metadata/filesearch.rs +++ b/src/librustc/metadata/filesearch.rs @@ -29,10 +29,10 @@ pub fn pick_file(file: Path, path: &Path) -> Option { } pub trait FileSearch { - fn sysroot() -> Path; - fn lib_search_paths() -> ~[Path]; - fn get_target_lib_path() -> Path; - fn get_target_lib_file_path(file: &Path) -> Path; + fn sysroot(&self) -> Path; + fn lib_search_paths(&self) -> ~[Path]; + fn get_target_lib_path(&self) -> Path; + fn get_target_lib_file_path(&self, file: &Path) -> Path; } pub fn mk_filesearch(maybe_sysroot: Option, @@ -44,8 +44,8 @@ pub fn mk_filesearch(maybe_sysroot: Option, target_triple: ~str } impl FileSearch for FileSearchImpl { - fn sysroot() -> Path { /*bad*/copy self.sysroot } - fn lib_search_paths() -> ~[Path] { + fn sysroot(&self) -> Path { /*bad*/copy self.sysroot } + fn lib_search_paths(&self) -> ~[Path] { let mut paths = /*bad*/copy self.addl_lib_search_paths; paths.push( @@ -61,10 +61,10 @@ pub fn mk_filesearch(maybe_sysroot: Option, } paths } - fn get_target_lib_path() -> Path { + fn get_target_lib_path(&self) -> Path { make_target_lib_path(&self.sysroot, self.target_triple) } - fn get_target_lib_file_path(file: &Path) -> Path { + fn get_target_lib_file_path(&self, file: &Path) -> Path { self.get_target_lib_path().push_rel(file) } } diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 4152a32fc3330..0b1abd683b122 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -75,11 +75,11 @@ struct ExtendedDecodeContext { } trait tr { - fn tr(xcx: @ExtendedDecodeContext) -> Self; + fn tr(&self, xcx: @ExtendedDecodeContext) -> Self; } trait tr_intern { - fn tr_intern(xcx: @ExtendedDecodeContext) -> ast::def_id; + fn tr_intern(&self, xcx: @ExtendedDecodeContext) -> ast::def_id; } // ______________________________________________________________________ @@ -227,41 +227,41 @@ impl ExtendedDecodeContext { } impl tr_intern for ast::def_id { - fn tr_intern(xcx: @ExtendedDecodeContext) -> ast::def_id { - xcx.tr_intern_def_id(self) + fn tr_intern(&self, xcx: @ExtendedDecodeContext) -> ast::def_id { + xcx.tr_intern_def_id(*self) } } impl tr for ast::def_id { - fn tr(xcx: @ExtendedDecodeContext) -> ast::def_id { - xcx.tr_def_id(self) + fn tr(&self, xcx: @ExtendedDecodeContext) -> ast::def_id { + xcx.tr_def_id(*self) } } impl tr for span { - fn tr(xcx: @ExtendedDecodeContext) -> span { - xcx.tr_span(self) + fn tr(&self, xcx: @ExtendedDecodeContext) -> span { + xcx.tr_span(*self) } } trait def_id_encoder_helpers { - fn emit_def_id(did: ast::def_id); + fn emit_def_id(&self, did: ast::def_id); } impl def_id_encoder_helpers for S { - fn emit_def_id(did: ast::def_id) { - did.encode(&self) + fn emit_def_id(&self, did: ast::def_id) { + did.encode(self) } } trait def_id_decoder_helpers { - fn read_def_id(xcx: @ExtendedDecodeContext) -> ast::def_id; + fn read_def_id(&self, xcx: @ExtendedDecodeContext) -> ast::def_id; } impl def_id_decoder_helpers for D { - fn read_def_id(xcx: @ExtendedDecodeContext) -> ast::def_id { - let did: ast::def_id = Decodable::decode(&self); + fn read_def_id(&self, xcx: @ExtendedDecodeContext) -> ast::def_id { + let did: ast::def_id = Decodable::decode(self); did.tr(xcx) } } @@ -405,8 +405,8 @@ fn decode_def(xcx: @ExtendedDecodeContext, doc: ebml::Doc) -> ast::def { } impl tr for ast::def { - fn tr(xcx: @ExtendedDecodeContext) -> ast::def { - match self { + fn tr(&self, xcx: @ExtendedDecodeContext) -> ast::def { + match *self { ast::def_fn(did, p) => { ast::def_fn(did.tr(xcx), p) } ast::def_static_method(did, did2_opt, p) => { ast::def_static_method(did.tr(xcx), @@ -450,7 +450,7 @@ impl tr for ast::def { // Encoding and decoding of adjustment information impl tr for ty::AutoAdjustment { - fn tr(xcx: @ExtendedDecodeContext) -> ty::AutoAdjustment { + fn tr(&self, xcx: @ExtendedDecodeContext) -> ty::AutoAdjustment { ty::AutoAdjustment { autoderefs: self.autoderefs, autoref: self.autoref.map(|ar| ar.tr(xcx)), @@ -459,7 +459,7 @@ impl tr for ty::AutoAdjustment { } impl tr for ty::AutoRef { - fn tr(xcx: @ExtendedDecodeContext) -> ty::AutoRef { + fn tr(&self, xcx: @ExtendedDecodeContext) -> ty::AutoRef { ty::AutoRef { kind: self.kind, region: self.region.tr(xcx), @@ -469,21 +469,21 @@ impl tr for ty::AutoRef { } impl tr for ty::Region { - fn tr(xcx: @ExtendedDecodeContext) -> ty::Region { - match self { + fn tr(&self, xcx: @ExtendedDecodeContext) -> ty::Region { + match *self { ty::re_bound(br) => ty::re_bound(br.tr(xcx)), ty::re_free(id, br) => ty::re_free(xcx.tr_id(id), br.tr(xcx)), ty::re_scope(id) => ty::re_scope(xcx.tr_id(id)), - ty::re_static | ty::re_infer(*) => self, + ty::re_static | ty::re_infer(*) => *self, } } } impl tr for ty::bound_region { - fn tr(xcx: @ExtendedDecodeContext) -> ty::bound_region { - match self { + fn tr(&self, xcx: @ExtendedDecodeContext) -> ty::bound_region { + match *self { ty::br_anon(_) | ty::br_named(_) | ty::br_self | - ty::br_fresh(_) => self, + ty::br_fresh(_) => *self, ty::br_cap_avoid(id, br) => ty::br_cap_avoid(xcx.tr_id(id), @br.tr(xcx)) } @@ -498,18 +498,20 @@ fn encode_freevar_entry(ebml_w: writer::Encoder, fv: @freevar_entry) { } trait ebml_decoder_helper { - fn read_freevar_entry(xcx: @ExtendedDecodeContext) -> freevar_entry; + fn read_freevar_entry(&self, xcx: @ExtendedDecodeContext) + -> freevar_entry; } impl ebml_decoder_helper for reader::Decoder { - fn read_freevar_entry(xcx: @ExtendedDecodeContext) -> freevar_entry { - let fv: freevar_entry = Decodable::decode(&self); + fn read_freevar_entry(&self, xcx: @ExtendedDecodeContext) + -> freevar_entry { + let fv: freevar_entry = Decodable::decode(self); fv.tr(xcx) } } impl tr for freevar_entry { - fn tr(xcx: @ExtendedDecodeContext) -> freevar_entry { + fn tr(&self, xcx: @ExtendedDecodeContext) -> freevar_entry { freevar_entry { def: self.def.tr(xcx), span: self.span.tr(xcx), @@ -521,18 +523,20 @@ impl tr for freevar_entry { // Encoding and decoding of CaptureVar information trait capture_var_helper { - fn read_capture_var(xcx: @ExtendedDecodeContext) -> moves::CaptureVar; + fn read_capture_var(&self, xcx: @ExtendedDecodeContext) + -> moves::CaptureVar; } impl capture_var_helper for reader::Decoder { - fn read_capture_var(xcx: @ExtendedDecodeContext) -> moves::CaptureVar { - let cvar: moves::CaptureVar = Decodable::decode(&self); + fn read_capture_var(&self, xcx: @ExtendedDecodeContext) + -> moves::CaptureVar { + let cvar: moves::CaptureVar = Decodable::decode(self); cvar.tr(xcx) } } impl tr for moves::CaptureVar { - fn tr(xcx: @ExtendedDecodeContext) -> moves::CaptureVar { + fn tr(&self, xcx: @ExtendedDecodeContext) -> moves::CaptureVar { moves::CaptureVar { def: self.def.tr(xcx), span: self.span.tr(xcx), @@ -545,7 +549,8 @@ impl tr for moves::CaptureVar { // Encoding and decoding of method_map_entry trait read_method_map_entry_helper { - fn read_method_map_entry(xcx: @ExtendedDecodeContext) -> method_map_entry; + fn read_method_map_entry(&self, xcx: @ExtendedDecodeContext) + -> method_map_entry; } fn encode_method_map_entry(ecx: @e::EncodeContext, @@ -565,7 +570,7 @@ fn encode_method_map_entry(ecx: @e::EncodeContext, } impl read_method_map_entry_helper for reader::Decoder { - fn read_method_map_entry(xcx: @ExtendedDecodeContext) + fn read_method_map_entry(&self, xcx: @ExtendedDecodeContext) -> method_map_entry { do self.read_rec { method_map_entry { @@ -573,12 +578,12 @@ impl read_method_map_entry_helper for reader::Decoder { self.read_arg(xcx) }), explicit_self: self.read_field(~"explicit_self", 2u, || { - let self_type: ast::self_ty_ = Decodable::decode(&self); + let self_type: ast::self_ty_ = Decodable::decode(self); self_type }), origin: self.read_field(~"origin", 1u, || { let method_origin: method_origin = - Decodable::decode(&self); + Decodable::decode(self); method_origin.tr(xcx) }), } @@ -587,8 +592,8 @@ impl read_method_map_entry_helper for reader::Decoder { } impl tr for method_origin { - fn tr(xcx: @ExtendedDecodeContext) -> method_origin { - match self { + fn tr(&self, xcx: @ExtendedDecodeContext) -> method_origin { + match *self { typeck::method_static(did) => { typeck::method_static(did.tr(xcx)) } @@ -672,17 +677,19 @@ fn encode_vtable_origin(ecx: @e::EncodeContext, } trait vtable_decoder_helpers { - fn read_vtable_res(xcx: @ExtendedDecodeContext) -> typeck::vtable_res; - fn read_vtable_origin(xcx: @ExtendedDecodeContext) + fn read_vtable_res(&self, xcx: @ExtendedDecodeContext) + -> typeck::vtable_res; + fn read_vtable_origin(&self, xcx: @ExtendedDecodeContext) -> typeck::vtable_origin; } impl vtable_decoder_helpers for reader::Decoder { - fn read_vtable_res(xcx: @ExtendedDecodeContext) -> typeck::vtable_res { + fn read_vtable_res(&self, xcx: @ExtendedDecodeContext) + -> typeck::vtable_res { @self.read_to_vec(|| self.read_vtable_origin(xcx) ) } - fn read_vtable_origin(xcx: @ExtendedDecodeContext) + fn read_vtable_origin(&self, xcx: @ExtendedDecodeContext) -> typeck::vtable_origin { do self.read_enum(~"vtable_origin") { do self.read_enum_variant |i| { @@ -736,6 +743,7 @@ trait get_ty_str_ctxt { } impl get_ty_str_ctxt for @e::EncodeContext { + // IMPLICIT SELF WARNING: fix this! fn ty_str_ctxt() -> @tyencode::ctxt { @tyencode::ctxt {diag: self.tcx.sess.diagnostic(), ds: e::def_to_str, @@ -746,46 +754,48 @@ impl get_ty_str_ctxt for @e::EncodeContext { } trait ebml_writer_helpers { - fn emit_arg(ecx: @e::EncodeContext, arg: ty::arg); - fn emit_ty(ecx: @e::EncodeContext, ty: ty::t); - fn emit_vstore(ecx: @e::EncodeContext, vstore: ty::vstore); - fn emit_tys(ecx: @e::EncodeContext, tys: ~[ty::t]); - fn emit_bounds(ecx: @e::EncodeContext, bs: ty::param_bounds); - fn emit_tpbt(ecx: @e::EncodeContext, tpbt: ty::ty_param_bounds_and_ty); + fn emit_arg(&self, ecx: @e::EncodeContext, arg: ty::arg); + fn emit_ty(&self, ecx: @e::EncodeContext, ty: ty::t); + fn emit_vstore(&self, ecx: @e::EncodeContext, vstore: ty::vstore); + fn emit_tys(&self, ecx: @e::EncodeContext, tys: ~[ty::t]); + fn emit_bounds(&self, ecx: @e::EncodeContext, bs: ty::param_bounds); + fn emit_tpbt(&self, ecx: @e::EncodeContext, + tpbt: ty::ty_param_bounds_and_ty); } impl ebml_writer_helpers for writer::Encoder { - fn emit_ty(ecx: @e::EncodeContext, ty: ty::t) { + fn emit_ty(&self, ecx: @e::EncodeContext, ty: ty::t) { do self.emit_opaque { - e::write_type(ecx, self, ty) + e::write_type(ecx, *self, ty) } } - fn emit_vstore(ecx: @e::EncodeContext, vstore: ty::vstore) { + fn emit_vstore(&self, ecx: @e::EncodeContext, vstore: ty::vstore) { do self.emit_opaque { - e::write_vstore(ecx, self, vstore) + e::write_vstore(ecx, *self, vstore) } } - fn emit_arg(ecx: @e::EncodeContext, arg: ty::arg) { + fn emit_arg(&self, ecx: @e::EncodeContext, arg: ty::arg) { do self.emit_opaque { tyencode::enc_arg(self.writer, ecx.ty_str_ctxt(), arg); } } - fn emit_tys(ecx: @e::EncodeContext, tys: ~[ty::t]) { + fn emit_tys(&self, ecx: @e::EncodeContext, tys: ~[ty::t]) { do self.emit_from_vec(tys) |ty| { self.emit_ty(ecx, *ty) } } - fn emit_bounds(ecx: @e::EncodeContext, bs: ty::param_bounds) { + fn emit_bounds(&self, ecx: @e::EncodeContext, bs: ty::param_bounds) { do self.emit_opaque { tyencode::enc_bounds(self.writer, ecx.ty_str_ctxt(), bs) } } - fn emit_tpbt(ecx: @e::EncodeContext, tpbt: ty::ty_param_bounds_and_ty) { + fn emit_tpbt(&self, ecx: @e::EncodeContext, + tpbt: ty::ty_param_bounds_and_ty) { do self.emit_rec { do self.emit_field(~"bounds", 0) { do self.emit_from_vec(*tpbt.bounds) |bs| { @@ -793,7 +803,7 @@ impl ebml_writer_helpers for writer::Encoder { } } do self.emit_field(~"region_param", 1u) { - tpbt.region_param.encode(&self); + tpbt.region_param.encode(self); } do self.emit_field(~"ty", 2u) { self.emit_ty(ecx, tpbt.ty); @@ -803,16 +813,16 @@ impl ebml_writer_helpers for writer::Encoder { } trait write_tag_and_id { - fn tag(tag_id: c::astencode_tag, f: fn()); - fn id(id: ast::node_id); + fn tag(&self, tag_id: c::astencode_tag, f: fn()); + fn id(&self, id: ast::node_id); } impl write_tag_and_id for writer::Encoder { - fn tag(tag_id: c::astencode_tag, f: fn()) { + fn tag(&self, tag_id: c::astencode_tag, f: fn()) { do self.wr_tag(tag_id as uint) { f() } } - fn id(id: ast::node_id) { + fn id(&self, id: ast::node_id) { self.wr_tagged_u64(c::tag_table_id as uint, id as u64) } } @@ -981,31 +991,31 @@ fn encode_side_tables_for_id(ecx: @e::EncodeContext, } trait doc_decoder_helpers { - fn as_int() -> int; - fn opt_child(tag: c::astencode_tag) -> Option; + fn as_int(&self) -> int; + fn opt_child(&self, tag: c::astencode_tag) -> Option; } impl doc_decoder_helpers for ebml::Doc { - fn as_int() -> int { reader::doc_as_u64(self) as int } - fn opt_child(tag: c::astencode_tag) -> Option { - reader::maybe_get_doc(self, tag as uint) + fn as_int(&self) -> int { reader::doc_as_u64(*self) as int } + fn opt_child(&self, tag: c::astencode_tag) -> Option { + reader::maybe_get_doc(*self, tag as uint) } } trait ebml_decoder_decoder_helpers { - fn read_arg(xcx: @ExtendedDecodeContext) -> ty::arg; - fn read_ty(xcx: @ExtendedDecodeContext) -> ty::t; - fn read_tys(xcx: @ExtendedDecodeContext) -> ~[ty::t]; - fn read_bounds(xcx: @ExtendedDecodeContext) -> @~[ty::param_bound]; - fn read_ty_param_bounds_and_ty(xcx: @ExtendedDecodeContext) + fn read_arg(&self, xcx: @ExtendedDecodeContext) -> ty::arg; + fn read_ty(&self, xcx: @ExtendedDecodeContext) -> ty::t; + fn read_tys(&self, xcx: @ExtendedDecodeContext) -> ~[ty::t]; + fn read_bounds(&self, xcx: @ExtendedDecodeContext) -> @~[ty::param_bound]; + fn read_ty_param_bounds_and_ty(&self, xcx: @ExtendedDecodeContext) -> ty::ty_param_bounds_and_ty; - fn convert_def_id(xcx: @ExtendedDecodeContext, + fn convert_def_id(&self, xcx: @ExtendedDecodeContext, source: DefIdSource, did: ast::def_id) -> ast::def_id; } impl ebml_decoder_decoder_helpers for reader::Decoder { - fn read_arg(xcx: @ExtendedDecodeContext) -> ty::arg { + fn read_arg(&self, xcx: @ExtendedDecodeContext) -> ty::arg { do self.read_opaque |doc| { tydecode::parse_arg_data( doc.data, xcx.dcx.cdata.cnum, doc.start, xcx.dcx.tcx, @@ -1013,7 +1023,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder { } } - fn read_ty(xcx: @ExtendedDecodeContext) -> ty::t { + fn read_ty(&self, xcx: @ExtendedDecodeContext) -> ty::t { // Note: regions types embed local node ids. In principle, we // should translate these node ids into the new decode // context. However, we do not bother, because region types @@ -1040,11 +1050,12 @@ impl ebml_decoder_decoder_helpers for reader::Decoder { } } - fn read_tys(xcx: @ExtendedDecodeContext) -> ~[ty::t] { + fn read_tys(&self, xcx: @ExtendedDecodeContext) -> ~[ty::t] { self.read_to_vec(|| self.read_ty(xcx) ) } - fn read_bounds(xcx: @ExtendedDecodeContext) -> @~[ty::param_bound] { + fn read_bounds(&self, xcx: @ExtendedDecodeContext) + -> @~[ty::param_bound] { do self.read_opaque |doc| { tydecode::parse_bounds_data( doc.data, doc.start, xcx.dcx.cdata.cnum, xcx.dcx.tcx, @@ -1052,7 +1063,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder { } } - fn read_ty_param_bounds_and_ty(xcx: @ExtendedDecodeContext) + fn read_ty_param_bounds_and_ty(&self, xcx: @ExtendedDecodeContext) -> ty::ty_param_bounds_and_ty { do self.read_rec { @@ -1061,7 +1072,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder { @self.read_to_vec(|| self.read_bounds(xcx) ) }), region_param: self.read_field(~"region_param", 1u, || { - Decodable::decode(&self) + Decodable::decode(self) }), ty: self.read_field(~"ty", 2u, || { self.read_ty(xcx) @@ -1070,7 +1081,7 @@ impl ebml_decoder_decoder_helpers for reader::Decoder { } } - fn convert_def_id(xcx: @ExtendedDecodeContext, + fn convert_def_id(&self, xcx: @ExtendedDecodeContext, source: tydecode::DefIdSource, did: ast::def_id) -> ast::def_id { /*! @@ -1192,10 +1203,10 @@ fn decode_item_ast(par_doc: ebml::Doc) -> @ast::item { #[cfg(test)] trait fake_ext_ctxt { - fn cfg() -> ast::crate_cfg; - fn parse_sess() -> @mut parse::ParseSess; - fn call_site() -> span; - fn ident_of(+st: ~str) -> ast::ident; + fn cfg(&self) -> ast::crate_cfg; + fn parse_sess(&self) -> @mut parse::ParseSess; + fn call_site(&self) -> span; + fn ident_of(&self, +st: ~str) -> ast::ident; } #[cfg(test)] @@ -1203,16 +1214,16 @@ type fake_session = @mut parse::ParseSess; #[cfg(test)] impl fake_ext_ctxt for fake_session { - fn cfg() -> ast::crate_cfg { ~[] } - fn parse_sess() -> @mut parse::ParseSess { self } - fn call_site() -> span { + fn cfg(&self) -> ast::crate_cfg { ~[] } + fn parse_sess(&self) -> @mut parse::ParseSess { *self } + fn call_site(&self) -> span { codemap::span { lo: codemap::BytePos(0), hi: codemap::BytePos(0), expn_info: None } } - fn ident_of(+st: ~str) -> ast::ident { + fn ident_of(&self, +st: ~str) -> ast::ident { self.interner.intern(@st) } } diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index 8519439ee934c..afefec00c5040 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -90,16 +90,16 @@ enum assignment_type { } impl assignment_type { - fn checked_by_liveness() -> bool { + fn checked_by_liveness(&self) -> bool { // the liveness pass guarantees that immutable local variables // are only assigned once; but it doesn't consider &mut - match self { + match *self { at_straight_up => true, at_swap => true } } - fn ing_form(desc: ~str) -> ~str { - match self { + fn ing_form(&self, desc: ~str) -> ~str { + match *self { at_straight_up => ~"assigning to " + desc, at_swap => ~"swapping to and from " + desc } diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 983ee2ca141df..b997c94a71b4d 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -82,7 +82,7 @@ pub struct LanguageItems { } pub impl LanguageItems { - static pub fn new() -> LanguageItems { + static pub fn new(&self) -> LanguageItems { LanguageItems { items: [ None, ..34 ] } @@ -96,7 +96,7 @@ pub impl LanguageItems { } } - static pub fn item_name(index: uint) -> &static/str { + static pub fn item_name(&self, index: uint) -> &static/str { match index { 0 => "const", 1 => "copy", @@ -315,7 +315,7 @@ struct LanguageItemCollector { } impl LanguageItemCollector { - fn match_and_collect_meta_item(item_def_id: def_id, + fn match_and_collect_meta_item(&self, item_def_id: def_id, meta_item: meta_item) { match meta_item.node { meta_name_value(key, literal) => { @@ -330,7 +330,7 @@ impl LanguageItemCollector { } } - fn collect_item(item_index: uint, item_def_id: def_id) { + fn collect_item(&self, item_index: uint, item_def_id: def_id) { // Check for duplicates. match self.items.items[item_index] { Some(original_def_id) if original_def_id != item_def_id => { @@ -346,7 +346,8 @@ impl LanguageItemCollector { self.items.items[item_index] = Some(item_def_id); } - fn match_and_collect_item(item_def_id: def_id, key: @~str, value: @~str) { + fn match_and_collect_item(&self, + item_def_id: def_id, key: @~str, value: @~str) { if *key != ~"lang" { return; // Didn't match. } @@ -361,7 +362,7 @@ impl LanguageItemCollector { } } - fn collect_local_language_items() { + fn collect_local_language_items(&self) { let this = unsafe { ptr::addr_of(&self) }; visit_crate(*self.crate, (), mk_simple_visitor(@SimpleVisitor { visit_item: |item| { @@ -378,7 +379,7 @@ impl LanguageItemCollector { })); } - fn collect_external_language_items() { + fn collect_external_language_items(&self) { let crate_store = self.session.cstore; do iter_crate_data(crate_store) |crate_number, _crate_metadata| { for each_lang_item(crate_store, crate_number) @@ -389,7 +390,7 @@ impl LanguageItemCollector { } } - fn check_completeness() { + fn check_completeness(&self) { for self.item_refs.each |&key, &item_ref| { match self.items.items[item_ref] { None => { @@ -402,7 +403,7 @@ impl LanguageItemCollector { } } - fn collect() { + fn collect(&self) { self.collect_local_language_items(); self.collect_external_language_items(); self.check_completeness(); diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index a7cfde0e70fec..75bf7cf26091c 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -255,7 +255,7 @@ impl to_str::ToStr for Variable { // assignment. And so forth. impl LiveNode { - pure fn is_valid() -> bool { *self != uint::max_value } + pure fn is_valid(&self) -> bool { **self != uint::max_value } } fn invalid_node() -> LiveNode { LiveNode(uint::max_value) } @@ -699,7 +699,7 @@ fn Liveness(ir: @mut IrMaps, specials: Specials) -> Liveness { } impl Liveness { - fn live_node(node_id: node_id, span: span) -> LiveNode { + fn live_node(&self, node_id: node_id, span: span) -> LiveNode { match self.ir.live_node_map.find(&node_id) { Some(ln) => ln, None => { @@ -714,7 +714,7 @@ impl Liveness { } } - fn variable_from_path(expr: @expr) -> Option { + fn variable_from_path(&self, expr: @expr) -> Option { match expr.node { expr_path(_) => { let def = self.tcx.def_map.get(&expr.id); @@ -726,11 +726,11 @@ impl Liveness { } } - fn variable(node_id: node_id, span: span) -> Variable { + fn variable(&self, node_id: node_id, span: span) -> Variable { self.ir.variable(node_id, span) } - fn variable_from_def_map(node_id: node_id, + fn variable_from_def_map(&self, node_id: node_id, span: span) -> Option { match self.tcx.def_map.find(&node_id) { Some(def) => { @@ -745,7 +745,7 @@ impl Liveness { } } - fn pat_bindings(pat: @pat, f: fn(LiveNode, Variable, span)) { + fn pat_bindings(&self, pat: @pat, f: fn(LiveNode, Variable, span)) { let def_map = self.tcx.def_map; do pat_util::pat_bindings(def_map, pat) |_bm, p_id, sp, _n| { let ln = self.live_node(p_id, sp); @@ -754,7 +754,8 @@ impl Liveness { } } - fn arm_pats_bindings(pats: &[@pat], f: fn(LiveNode, Variable, span)) { + fn arm_pats_bindings(&self, + pats: &[@pat], f: fn(LiveNode, Variable, span)) { // only consider the first pattern; any later patterns must have // the same bindings, and we also consider the first pattern to be // the "authoratative" set of ids @@ -763,11 +764,11 @@ impl Liveness { } } - fn define_bindings_in_pat(pat: @pat, succ: LiveNode) -> LiveNode { + fn define_bindings_in_pat(&self, pat: @pat, succ: LiveNode) -> LiveNode { self.define_bindings_in_arm_pats([pat], succ) } - fn define_bindings_in_arm_pats(pats: &[@pat], + fn define_bindings_in_arm_pats(&self, pats: &[@pat], succ: LiveNode) -> LiveNode { let mut succ = succ; do self.arm_pats_bindings(pats) |ln, var, _sp| { @@ -778,11 +779,11 @@ impl Liveness { succ } - fn idx(ln: LiveNode, var: Variable) -> uint { + fn idx(&self, ln: LiveNode, var: Variable) -> uint { *ln * self.ir.num_vars + *var } - fn live_on_entry(ln: LiveNode, var: Variable) + fn live_on_entry(&self, ln: LiveNode, var: Variable) -> Option { assert ln.is_valid(); @@ -793,18 +794,18 @@ impl Liveness { /* Is this variable live on entry to any of its successor nodes? */ - fn live_on_exit(ln: LiveNode, var: Variable) + fn live_on_exit(&self, ln: LiveNode, var: Variable) -> Option { self.live_on_entry(copy self.successors[*ln], var) } - fn used_on_entry(ln: LiveNode, var: Variable) -> bool { + fn used_on_entry(&self, ln: LiveNode, var: Variable) -> bool { assert ln.is_valid(); self.users[self.idx(ln, var)].used } - fn assigned_on_entry(ln: LiveNode, var: Variable) + fn assigned_on_entry(&self, ln: LiveNode, var: Variable) -> Option { assert ln.is_valid(); @@ -812,13 +813,13 @@ impl Liveness { if writer.is_valid() {Some(self.ir.lnk(writer))} else {None} } - fn assigned_on_exit(ln: LiveNode, var: Variable) + fn assigned_on_exit(&self, ln: LiveNode, var: Variable) -> Option { self.assigned_on_entry(copy self.successors[*ln], var) } - fn indices(ln: LiveNode, op: fn(uint)) { + fn indices(&self, ln: LiveNode, op: fn(uint)) { let node_base_idx = self.idx(ln, Variable(0)); for uint::range(0, self.ir.num_vars) |var_idx| { op(node_base_idx + var_idx) @@ -834,7 +835,7 @@ impl Liveness { } } - fn write_vars(wr: io::Writer, + fn write_vars(&self, wr: io::Writer, ln: LiveNode, test: fn(uint) -> LiveNode) { let node_base_idx = self.idx(ln, Variable(0)); @@ -847,7 +848,7 @@ impl Liveness { } } - fn find_loop_scope(opt_label: Option, id: node_id, sp: span) + fn find_loop_scope(&self, opt_label: Option, id: node_id, sp: span) -> node_id { match opt_label { Some(_) => // Refers to a labeled loop. Use the results of resolve @@ -869,7 +870,7 @@ impl Liveness { } } - fn ln_str(ln: LiveNode) -> ~str { + fn ln_str(&self, ln: LiveNode) -> ~str { do io::with_str_writer |wr| { wr.write_str(~"[ln("); wr.write_uint(*ln); @@ -886,7 +887,7 @@ impl Liveness { } } - fn init_empty(ln: LiveNode, succ_ln: LiveNode) { + fn init_empty(&self, ln: LiveNode, succ_ln: LiveNode) { self.successors[*ln] = succ_ln; // It is not necessary to initialize the @@ -899,7 +900,7 @@ impl Liveness { // } } - fn init_from_succ(ln: LiveNode, succ_ln: LiveNode) { + fn init_from_succ(&self, ln: LiveNode, succ_ln: LiveNode) { // more efficient version of init_empty() / merge_from_succ() self.successors[*ln] = succ_ln; self.indices2(ln, succ_ln, |idx, succ_idx| { @@ -909,7 +910,7 @@ impl Liveness { self.ln_str(ln), self.ln_str(succ_ln)); } - fn merge_from_succ(ln: LiveNode, succ_ln: LiveNode, + fn merge_from_succ(&self, ln: LiveNode, succ_ln: LiveNode, first_merge: bool) -> bool { if ln == succ_ln { return false; } @@ -943,7 +944,7 @@ impl Liveness { // Indicates that a local variable was *defined*; we know that no // uses of the variable can precede the definition (resolve checks // this) so we just clear out all the data. - fn define(writer: LiveNode, var: Variable) { + fn define(&self, writer: LiveNode, var: Variable) { let idx = self.idx(writer, var); self.users[idx].reader = invalid_node(); self.users[idx].writer = invalid_node(); @@ -953,7 +954,7 @@ impl Liveness { } // Either read, write, or both depending on the acc bitset - fn acc(ln: LiveNode, var: Variable, acc: uint) { + fn acc(&self, ln: LiveNode, var: Variable, acc: uint) { let idx = self.idx(ln, var); let user = &mut self.users[idx]; @@ -978,7 +979,7 @@ impl Liveness { // _______________________________________________________________________ - fn compute(decl: fn_decl, body: blk) -> LiveNode { + fn compute(&self, decl: fn_decl, body: blk) -> LiveNode { // if there is a `break` or `again` at the top level, then it's // effectively a return---this only occurs in `for` loops, // where the body is really a closure. @@ -1003,7 +1004,8 @@ impl Liveness { entry_ln } - fn propagate_through_fn_block(decl: fn_decl, blk: blk) -> LiveNode { + fn propagate_through_fn_block(&self, decl: fn_decl, blk: blk) + -> LiveNode { // inputs passed by & mode should be considered live on exit: for decl.inputs.each |arg| { match ty::resolved_mode(self.tcx, arg.mode) { @@ -1036,14 +1038,15 @@ impl Liveness { self.propagate_through_block(blk, self.s.fallthrough_ln) } - fn propagate_through_block(blk: blk, succ: LiveNode) -> LiveNode { + fn propagate_through_block(&self, blk: blk, succ: LiveNode) -> LiveNode { let succ = self.propagate_through_opt_expr(blk.node.expr, succ); do blk.node.stmts.foldr(succ) |stmt, succ| { self.propagate_through_stmt(*stmt, succ) } } - fn propagate_through_stmt(stmt: @stmt, succ: LiveNode) -> LiveNode { + fn propagate_through_stmt(&self, stmt: @stmt, succ: LiveNode) + -> LiveNode { match stmt.node { stmt_decl(decl, _) => { return self.propagate_through_decl(decl, succ); @@ -1059,7 +1062,8 @@ impl Liveness { } } - fn propagate_through_decl(decl: @decl, succ: LiveNode) -> LiveNode { + fn propagate_through_decl(&self, decl: @decl, succ: LiveNode) + -> LiveNode { match /*bad*/copy decl.node { decl_local(locals) => { do locals.foldr(succ) |local, succ| { @@ -1072,7 +1076,8 @@ impl Liveness { } } - fn propagate_through_local(local: @local, succ: LiveNode) -> LiveNode { + fn propagate_through_local(&self, local: @local, succ: LiveNode) + -> LiveNode { // Note: we mark the variable as defined regardless of whether // there is an initializer. Initially I had thought to only mark // the live variable as defined if it was initialized, and then we @@ -1091,21 +1096,22 @@ impl Liveness { self.define_bindings_in_pat(local.node.pat, succ) } - fn propagate_through_exprs(exprs: ~[@expr], + fn propagate_through_exprs(&self, exprs: ~[@expr], succ: LiveNode) -> LiveNode { do exprs.foldr(succ) |expr, succ| { self.propagate_through_expr(*expr, succ) } } - fn propagate_through_opt_expr(opt_expr: Option<@expr>, + fn propagate_through_opt_expr(&self, opt_expr: Option<@expr>, succ: LiveNode) -> LiveNode { do opt_expr.foldl(succ) |succ, expr| { self.propagate_through_expr(*expr, *succ) } } - fn propagate_through_expr(expr: @expr, succ: LiveNode) -> LiveNode { + fn propagate_through_expr(&self, expr: @expr, succ: LiveNode) + -> LiveNode { debug!("propagate_through_expr: %s", expr_to_str(expr, self.tcx.sess.intr())); @@ -1365,7 +1371,7 @@ impl Liveness { } } - fn propagate_through_lvalue_components(expr: @expr, + fn propagate_through_lvalue_components(&self, expr: @expr, succ: LiveNode) -> LiveNode { // # Lvalues // @@ -1424,7 +1430,7 @@ impl Liveness { } // see comment on propagate_through_lvalue() - fn write_lvalue(expr: @expr, + fn write_lvalue(&self, expr: @expr, succ: LiveNode, acc: uint) -> LiveNode { match expr.node { @@ -1438,7 +1444,8 @@ impl Liveness { } } - fn access_path(expr: @expr, succ: LiveNode, acc: uint) -> LiveNode { + fn access_path(&self, expr: @expr, succ: LiveNode, acc: uint) + -> LiveNode { let def = self.tcx.def_map.get(&expr.id); match relevant_def(def) { Some(nid) => { @@ -1454,7 +1461,7 @@ impl Liveness { } } - fn propagate_through_loop(expr: @expr, + fn propagate_through_loop(&self, expr: @expr, cond: Option<@expr>, body: blk, succ: LiveNode) -> LiveNode { @@ -1510,7 +1517,7 @@ impl Liveness { cond_ln } - fn with_loop_nodes(loop_node_id: node_id, + fn with_loop_nodes(&self, loop_node_id: node_id, break_ln: LiveNode, cont_ln: LiveNode, f: fn() -> R) -> R { @@ -1646,7 +1653,7 @@ enum ReadKind { } impl @Liveness { - fn check_ret(id: node_id, sp: span, _fk: visit::fn_kind, + fn check_ret(&self, id: node_id, sp: span, _fk: visit::fn_kind, entry_ln: LiveNode) { if self.live_on_entry(entry_ln, self.s.no_ret_var).is_some() { // if no_ret_var is live, then we fall off the end of the @@ -1666,7 +1673,7 @@ impl @Liveness { } } - fn check_move_from_var(ln: LiveNode, + fn check_move_from_var(&self, ln: LiveNode, var: Variable, move_expr: @expr) { @@ -1691,7 +1698,7 @@ impl @Liveness { } } - fn consider_last_use(expr: @expr, ln: LiveNode, var: Variable) { + fn consider_last_use(&self, expr: @expr, ln: LiveNode, var: Variable) { debug!("consider_last_use(expr.id=%?, ln=%s, var=%s)", expr.id, ln.to_str(), var.to_str()); @@ -1701,7 +1708,7 @@ impl @Liveness { } } - fn check_lvalue(expr: @expr, vt: vt<@Liveness>) { + fn check_lvalue(&self, expr: @expr, vt: vt<@Liveness>) { match expr.node { expr_path(_) => { match self.tcx.def_map.get(&expr.id) { @@ -1729,18 +1736,18 @@ impl @Liveness { _ => { // For other kinds of lvalues, no checks are required, // and any embedded expressions are actually rvalues - visit::visit_expr(expr, self, vt); + visit::visit_expr(expr, *self, vt); } } } - fn check_for_reassignments_in_pat(pat: @pat) { + fn check_for_reassignments_in_pat(&self, pat: @pat) { do self.pat_bindings(pat) |ln, var, sp| { self.check_for_reassignment(ln, var, sp); } } - fn check_for_reassignment(ln: LiveNode, var: Variable, + fn check_for_reassignment(&self, ln: LiveNode, var: Variable, orig_span: span) { match self.assigned_on_exit(ln, var) { Some(ExprNode(span)) => { @@ -1761,7 +1768,7 @@ impl @Liveness { } } - fn report_illegal_move(lnk: LiveNodeKind, + fn report_illegal_move(&self, lnk: LiveNodeKind, var: Variable, move_expr: @expr) { @@ -1827,7 +1834,7 @@ impl @Liveness { }; } - fn report_move_location(move_expr: @expr, + fn report_move_location(&self, move_expr: @expr, var: Variable, expr_descr: &str, pronoun: &str) @@ -1842,7 +1849,7 @@ impl @Liveness { ty_to_str(self.tcx, move_expr_ty))); } - fn report_illegal_read(chk_span: span, + fn report_illegal_read(&self, chk_span: span, lnk: LiveNodeKind, var: Variable, rk: ReadKind) { @@ -1873,12 +1880,12 @@ impl @Liveness { } } - fn should_warn(var: Variable) -> Option<@~str> { + fn should_warn(&self, var: Variable) -> Option<@~str> { let name = self.ir.variable_name(var); if name[0] == ('_' as u8) { None } else { Some(name) } } - fn warn_about_unused_args(decl: fn_decl, entry_ln: LiveNode) { + fn warn_about_unused_args(&self, decl: fn_decl, entry_ln: LiveNode) { for decl.inputs.each |arg| { do pat_util::pat_bindings(self.tcx.def_map, arg.pat) |_bm, p_id, sp, _n| { @@ -1888,7 +1895,7 @@ impl @Liveness { } } - fn warn_about_unused_or_dead_vars_in_pat(pat: @pat) { + fn warn_about_unused_or_dead_vars_in_pat(&self, pat: @pat) { do self.pat_bindings(pat) |ln, var, sp| { if !self.warn_about_unused(sp, ln, var) { self.warn_about_dead_assign(sp, ln, var); @@ -1896,7 +1903,8 @@ impl @Liveness { } } - fn warn_about_unused(sp: span, ln: LiveNode, var: Variable) -> bool { + fn warn_about_unused(&self, sp: span, ln: LiveNode, var: Variable) + -> bool { if !self.used_on_entry(ln, var) { for self.should_warn(var).each |name| { @@ -1925,7 +1933,7 @@ impl @Liveness { return false; } - fn warn_about_dead_assign(sp: span, ln: LiveNode, var: Variable) { + fn warn_about_dead_assign(&self, sp: span, ln: LiveNode, var: Variable) { if self.live_on_exit(ln, var).is_none() { for self.should_warn(var).each |name| { // FIXME(#3266)--make liveness warnings lintable diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 0621beb43a3d5..f027ca99d514f 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -277,27 +277,27 @@ pub fn cat_variant( } pub trait ast_node { - fn id() -> ast::node_id; - fn span() -> span; + fn id(&self) -> ast::node_id; + fn span(&self) -> span; } pub impl ast_node for @ast::expr { - fn id() -> ast::node_id { self.id } - fn span() -> span { self.span } + fn id(&self) -> ast::node_id { self.id } + fn span(&self) -> span { self.span } } pub impl ast_node for @ast::pat { - fn id() -> ast::node_id { self.id } - fn span() -> span { self.span } + fn id(&self) -> ast::node_id { self.id } + fn span(&self) -> span { self.span } } pub trait get_type_for_node { - fn ty(node: N) -> ty::t; + fn ty(&self, node: N) -> ty::t; } pub impl get_type_for_node for ty::ctxt { - fn ty(node: N) -> ty::t { - ty::node_id_to_type(self, node.id()) + fn ty(&self, node: N) -> ty::t { + ty::node_id_to_type(*self, node.id()) } } @@ -313,7 +313,7 @@ impl ToStr for MutabilityCategory { } impl MutabilityCategory { - static fn from_mutbl(m: ast::mutability) -> MutabilityCategory { + static fn from_mutbl(&self, m: ast::mutability) -> MutabilityCategory { match m { m_imm => McImmutable, m_const => McReadOnly, diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index 9c5e4b9f0e086..689d6ca40eefe 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -154,8 +154,8 @@ pub enum NamespaceResult { } pub impl NamespaceResult { - pure fn is_unknown() -> bool { - match self { + pure fn is_unknown(&self) -> bool { + match *self { UnknownResult => true, _ => false } @@ -206,11 +206,11 @@ pub enum ResolveResult { } pub impl ResolveResult { - fn failed() -> bool { - match self { Failed => true, _ => false } + fn failed(&self) -> bool { + match *self { Failed => true, _ => false } } - fn indeterminate() -> bool { - match self { Indeterminate => true, _ => false } + fn indeterminate(&self) -> bool { + match *self { Indeterminate => true, _ => false } } } @@ -417,7 +417,7 @@ pub fn ImportResolution(privacy: Privacy, } pub impl ImportResolution { - fn target_for_namespace(namespace: Namespace) -> Option { + fn target_for_namespace(&self, namespace: Namespace) -> Option { match namespace { TypeNS => return copy self.type_target, ValueNS => return copy self.value_target @@ -503,7 +503,7 @@ pub fn Module(parent_link: ParentLink, } pub impl Module { - fn all_imports_resolved() -> bool { + fn all_imports_resolved(&self) -> bool { return self.imports.len() == self.resolved_import_count; } } @@ -706,7 +706,7 @@ pub struct PrimitiveTypeTable { } pub impl PrimitiveTypeTable { - fn intern(intr: @ident_interner, string: @~str, + fn intern(&self, intr: @ident_interner, string: @~str, primitive_type: prim_ty) { let ident = intr.intern(string); self.primitive_types.insert(ident, primitive_type); diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 9e3be94294aca..f5fa83e3fff3f 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -105,27 +105,27 @@ pub fn icx_popper(ccx: @CrateContext) -> icx_popper { } pub trait get_insn_ctxt { - fn insn_ctxt(s: &str) -> icx_popper; + fn insn_ctxt(&self, s: &str) -> icx_popper; } pub impl get_insn_ctxt for @CrateContext { - fn insn_ctxt(s: &str) -> icx_popper { + fn insn_ctxt(&self, s: &str) -> icx_popper { debug!("new insn_ctxt: %s", s); if self.sess.count_llvm_insns() { self.stats.llvm_insn_ctxt.push(str::from_slice(s)); } - icx_popper(self) + icx_popper(*self) } } pub impl get_insn_ctxt for block { - fn insn_ctxt(s: &str) -> icx_popper { + fn insn_ctxt(&self, s: &str) -> icx_popper { self.ccx().insn_ctxt(s) } } pub impl get_insn_ctxt for fn_ctxt { - fn insn_ctxt(s: &str) -> icx_popper { + fn insn_ctxt(&self, s: &str) -> icx_popper { self.ccx.insn_ctxt(s) } } diff --git a/src/librustc/middle/trans/closure.rs b/src/librustc/middle/trans/closure.rs index dc68eff9c7f64..1409199a0d2d7 100644 --- a/src/librustc/middle/trans/closure.rs +++ b/src/librustc/middle/trans/closure.rs @@ -131,7 +131,7 @@ pub impl EnvAction { } pub impl EnvValue { - fn to_str(ccx: @CrateContext) -> ~str { + fn to_str(&self, ccx: @CrateContext) -> ~str { fmt!("%s(%s)", self.action.to_str(), self.datum.to_str(ccx)) } } diff --git a/src/librustc/middle/trans/common.rs b/src/librustc/middle/trans/common.rs index b58e374320ae6..969119f0aaf63 100644 --- a/src/librustc/middle/trans/common.rs +++ b/src/librustc/middle/trans/common.rs @@ -535,17 +535,17 @@ pub struct scope_info { } pub trait get_node_info { - fn info() -> Option; + fn info(&self) -> Option; } pub impl get_node_info for @ast::expr { - fn info() -> Option { + fn info(&self) -> Option { Some(NodeInfo { id: self.id, span: self.span }) } } pub impl get_node_info for ast::blk { - fn info() -> Option { + fn info(&self) -> Option { Some(NodeInfo { id: self.node.id, span: self.span }) } } @@ -554,7 +554,7 @@ pub impl get_node_info for ast::blk { pub type optional_boxed_ast_expr = Option<@ast::expr>; pub impl get_node_info for optional_boxed_ast_expr { - fn info() -> Option { + fn info(&self) -> Option { self.chain_ref(|s| s.info()) } } @@ -627,7 +627,7 @@ pub fn rslt(bcx: block, val: ValueRef) -> Result { } pub impl Result { - fn unpack(bcx: &mut block) -> ValueRef { + fn unpack(&self, bcx: &mut block) -> ValueRef { *bcx = self.bcx; return self.val; } @@ -691,27 +691,27 @@ pub fn block_parent(cx: block) -> block { // Accessors pub impl block { - pure fn ccx() -> @CrateContext { *self.fcx.ccx } - pure fn tcx() -> ty::ctxt { self.fcx.ccx.tcx } - pure fn sess() -> Session { self.fcx.ccx.sess } + pure fn ccx(&self) -> @CrateContext { *self.fcx.ccx } + pure fn tcx(&self) -> ty::ctxt { self.fcx.ccx.tcx } + pure fn sess(&self) -> Session { self.fcx.ccx.sess } - fn node_id_to_str(id: ast::node_id) -> ~str { + fn node_id_to_str(&self, id: ast::node_id) -> ~str { ast_map::node_id_to_str(self.tcx().items, id, self.sess().intr()) } - fn expr_to_str(e: @ast::expr) -> ~str { + fn expr_to_str(&self, e: @ast::expr) -> ~str { expr_repr(self.tcx(), e) } - fn expr_is_lval(e: @ast::expr) -> bool { + fn expr_is_lval(&self, e: @ast::expr) -> bool { ty::expr_is_lval(self.tcx(), self.ccx().maps.method_map, e) } - fn expr_kind(e: @ast::expr) -> ty::ExprKind { + fn expr_kind(&self, e: @ast::expr) -> ty::ExprKind { ty::expr_kind(self.tcx(), self.ccx().maps.method_map, e) } - fn def(nid: ast::node_id) -> ast::def { + fn def(&self, nid: ast::node_id) -> ast::def { match self.tcx().def_map.find(&nid) { Some(v) => v, None => { @@ -721,15 +721,15 @@ pub impl block { } } - fn val_str(val: ValueRef) -> @str { + fn val_str(&self, val: ValueRef) -> @str { val_str(self.ccx().tn, val) } - fn llty_str(llty: TypeRef) -> @str { + fn llty_str(&self, llty: TypeRef) -> @str { ty_str(self.ccx().tn, llty) } - fn ty_to_str(t: ty::t) -> ~str { + fn ty_to_str(&self, t: ty::t) -> ~str { ty_to_str(self.tcx(), t) } fn to_str(&self) -> ~str { diff --git a/src/librustc/middle/trans/datum.rs b/src/librustc/middle/trans/datum.rs index ffeffa5775e9c..ba56eb56c0a9d 100644 --- a/src/librustc/middle/trans/datum.rs +++ b/src/librustc/middle/trans/datum.rs @@ -142,12 +142,12 @@ pub enum DatumMode { } pub impl DatumMode { - fn is_by_ref() -> bool { - match self { ByRef => true, ByValue => false } + fn is_by_ref(&self) -> bool { + match *self { ByRef => true, ByValue => false } } - fn is_by_value() -> bool { - match self { ByRef => false, ByValue => true } + fn is_by_value(&self) -> bool { + match *self { ByRef => false, ByValue => true } } } @@ -216,7 +216,7 @@ pub fn appropriate_mode(ty: ty::t) -> DatumMode { } pub impl Datum { - fn store_to(bcx: block, id: ast::node_id, + fn store_to(&self, bcx: block, id: ast::node_id, action: CopyAction, dst: ValueRef) -> block { /*! * @@ -231,7 +231,7 @@ pub impl Datum { } } - fn store_to_dest(bcx: block, id: ast::node_id, + fn store_to_dest(&self, bcx: block, id: ast::node_id, dest: expr::Dest) -> block { match dest { expr::Ignore => { @@ -243,7 +243,7 @@ pub impl Datum { } } - fn store_to_datum(bcx: block, id: ast::node_id, + fn store_to_datum(&self, bcx: block, id: ast::node_id, action: CopyAction, datum: Datum) -> block { debug!("store_to_datum(self=%s, action=%?, datum=%s)", self.to_str(bcx.ccx()), action, datum.to_str(bcx.ccx())); @@ -251,17 +251,20 @@ pub impl Datum { self.store_to(bcx, id, action, datum.val) } - fn move_to_datum(bcx: block, action: CopyAction, datum: Datum) -> block { + fn move_to_datum(&self, bcx: block, action: CopyAction, datum: Datum) + -> block { assert datum.mode.is_by_ref(); self.move_to(bcx, action, datum.val) } - fn copy_to_datum(bcx: block, action: CopyAction, datum: Datum) -> block { + fn copy_to_datum(&self, bcx: block, action: CopyAction, datum: Datum) + -> block { assert datum.mode.is_by_ref(); self.copy_to(bcx, action, datum.val) } - fn copy_to(bcx: block, action: CopyAction, dst: ValueRef) -> block { + fn copy_to(&self, bcx: block, action: CopyAction, dst: ValueRef) + -> block { /*! * * Copies the value into `dst`, which should be a pointer to a @@ -303,7 +306,7 @@ pub impl Datum { } } - fn copy_to_no_check(bcx: block, action: CopyAction, + fn copy_to_no_check(&self, bcx: block, action: CopyAction, dst: ValueRef) -> block { /*! @@ -333,7 +336,8 @@ pub impl Datum { // This works like copy_val, except that it deinitializes the source. // Since it needs to zero out the source, src also needs to be an lval. // - fn move_to(bcx: block, action: CopyAction, dst: ValueRef) -> block { + fn move_to(&self, bcx: block, action: CopyAction, dst: ValueRef) + -> block { let _icx = bcx.insn_ctxt("move_to"); let mut bcx = bcx; @@ -362,7 +366,7 @@ pub impl Datum { return bcx; } - fn add_clean(bcx: block) { + fn add_clean(&self, bcx: block) { /*! * * Schedules this datum for cleanup in `bcx`. The datum @@ -379,7 +383,7 @@ pub impl Datum { } } - fn cancel_clean(bcx: block) { + fn cancel_clean(&self, bcx: block) { if ty::type_needs_drop(bcx.tcx(), self.ty) { match self.source { RevokeClean => { @@ -396,7 +400,7 @@ pub impl Datum { } } - fn to_str(ccx: &CrateContext) -> ~str { + fn to_str(&self, ccx: &CrateContext) -> ~str { fmt!("Datum { val=%s, ty=%s, mode=%?, source=%? }", val_str(ccx.tn, self.val), ty_to_str(ccx.tcx, self.ty), @@ -404,7 +408,7 @@ pub impl Datum { self.source) } - fn to_value_datum(bcx: block) -> Datum { + fn to_value_datum(&self, bcx: block) -> Datum { /*! * * Yields a by-ref form of this datum. This may involve @@ -413,7 +417,7 @@ pub impl Datum { * it will not live longer than the current datum. */ match self.mode { - ByValue => self, + ByValue => *self, ByRef => { Datum {val: self.to_value_llval(bcx), mode: ByValue, ty: self.ty, source: RevokeClean} @@ -421,7 +425,7 @@ pub impl Datum { } } - fn to_value_llval(bcx: block) -> ValueRef { + fn to_value_llval(&self, bcx: block) -> ValueRef { /*! * * Yields the value itself. */ @@ -442,7 +446,7 @@ pub impl Datum { } } - fn to_ref_datum(bcx: block) -> Datum { + fn to_ref_datum(&self, bcx: block) -> Datum { /*! * * Yields a by-ref form of this datum. This may involve @@ -451,7 +455,7 @@ pub impl Datum { * it will not live longer than the current datum. */ match self.mode { - ByRef => self, + ByRef => *self, ByValue => { Datum {val: self.to_ref_llval(bcx), mode: ByRef, ty: self.ty, source: RevokeClean} @@ -459,7 +463,7 @@ pub impl Datum { } } - fn to_ref_llval(bcx: block) -> ValueRef { + fn to_ref_llval(&self, bcx: block) -> ValueRef { match self.mode { ByRef => self.val, ByValue => { @@ -474,13 +478,13 @@ pub impl Datum { } } - fn appropriate_mode() -> DatumMode { + fn appropriate_mode(&self) -> DatumMode { /*! See the `appropriate_mode()` function */ appropriate_mode(self.ty) } - fn to_appropriate_llval(bcx: block) -> ValueRef { + fn to_appropriate_llval(&self, bcx: block) -> ValueRef { /*! * * Yields an llvalue with the `appropriate_mode()`. */ @@ -491,7 +495,7 @@ pub impl Datum { } } - fn to_appropriate_datum(bcx: block) -> Datum { + fn to_appropriate_datum(&self, bcx: block) -> Datum { /*! * * Yields a datum with the `appropriate_mode()`. */ @@ -502,7 +506,7 @@ pub impl Datum { } } - fn GEPi(bcx: block, + fn GEPi(&self, bcx: block, ixs: &[uint], ty: ty::t, source: DatumCleanup) @@ -516,7 +520,7 @@ pub impl Datum { } } - fn root(bcx: block, root_info: RootInfo) -> block { + fn root(&self, bcx: block, root_info: RootInfo) -> block { /*! * * In some cases, borrowck will decide that an @T/@[]/@str @@ -555,7 +559,7 @@ pub impl Datum { } } - fn perform_write_guard(bcx: block) -> block { + fn perform_write_guard(&self, bcx: block) -> block { // Create scratch space, but do not root it. let llval = match self.mode { ByValue => self.val, @@ -569,7 +573,7 @@ pub impl Datum { expr::Ignore) } - fn drop_val(bcx: block) -> block { + fn drop_val(&self, bcx: block) -> block { if !ty::type_needs_drop(bcx.tcx(), self.ty) { return bcx; } @@ -580,7 +584,7 @@ pub impl Datum { }; } - fn box_body(bcx: block) -> Datum { + fn box_body(&self, bcx: block) -> Datum { /*! * * This datum must represent an @T or ~T box. Returns a new @@ -600,7 +604,7 @@ pub impl Datum { Datum {val: body, ty: content_ty, mode: ByRef, source: ZeroMem} } - fn to_rptr(bcx: block) -> Datum { + fn to_rptr(&self, bcx: block) -> Datum { //! // // Returns a new datum of region-pointer type containing the @@ -618,7 +622,7 @@ pub impl Datum { mode: ByValue, source: RevokeClean} } - fn try_deref( + fn try_deref(&self, bcx: block, // block wherein to generate insn's expr_id: ast::node_id, // id of expr being deref'd derefs: uint, // number of times deref'd already @@ -656,11 +660,11 @@ pub impl Datum { if is_auto { // unsafe ptrs are not AUTO-derefable return (None, bcx); } else { - return (Some(deref_ptr(bcx, &self, mt.ty)), bcx); + return (Some(deref_ptr(bcx, self, mt.ty)), bcx); } } ty::ty_rptr(_, mt) => { - return (Some(deref_ptr(bcx, &self, mt.ty)), bcx); + return (Some(deref_ptr(bcx, self, mt.ty)), bcx); } ty::ty_enum(did, ref substs) => { // Check whether this enum is a newtype enum: @@ -695,7 +699,7 @@ pub impl Datum { // code in place here to do the right // thing if this change ever goes through. assert ty::type_is_immediate(ty); - (Some(Datum {ty: ty, ..self}), bcx) + (Some(Datum {ty: ty, ..*self}), bcx) } }; } @@ -733,7 +737,7 @@ pub impl Datum { // code in place here to do the right thing if this // change ever goes through. assert ty::type_is_immediate(ty); - (Some(Datum {ty: ty, ..self}), bcx) + (Some(Datum {ty: ty, ..*self}), bcx) } } } @@ -752,7 +756,7 @@ pub impl Datum { } } - fn deref(bcx: block, + fn deref(&self, bcx: block, expr: @ast::expr, // the expression whose value is being deref'd derefs: uint) -> DatumBlock { @@ -765,7 +769,7 @@ pub impl Datum { } } - fn autoderef(bcx: block, + fn autoderef(&self, bcx: block, expr_id: ast::node_id, max: uint) -> DatumBlock { @@ -775,7 +779,7 @@ pub impl Datum { expr_id, max, self.to_str(bcx.ccx())); let _indenter = indenter(); - let mut datum = self; + let mut datum = *self; let mut derefs = 0u; let mut bcx = bcx; while derefs < max { @@ -796,56 +800,56 @@ pub impl Datum { DatumBlock { bcx: bcx, datum: datum } } - fn get_base_and_len(bcx: block) -> (ValueRef, ValueRef) { + fn get_base_and_len(&self, bcx: block) -> (ValueRef, ValueRef) { tvec::get_base_and_len(bcx, self.to_appropriate_llval(bcx), self.ty) } - fn to_result(bcx: block) -> common::Result { + fn to_result(&self, bcx: block) -> common::Result { rslt(bcx, self.to_appropriate_llval(bcx)) } } pub impl DatumBlock { - fn unpack(bcx: &mut block) -> Datum { + fn unpack(&self, bcx: &mut block) -> Datum { *bcx = self.bcx; return self.datum; } - fn assert_by_ref() -> DatumBlock { + fn assert_by_ref(&self) -> DatumBlock { assert self.datum.mode.is_by_ref(); - self + *self } - fn drop_val() -> block { + fn drop_val(&self) -> block { self.datum.drop_val(self.bcx) } - fn store_to(id: ast::node_id, action: CopyAction, + fn store_to(&self, id: ast::node_id, action: CopyAction, dst: ValueRef) -> block { self.datum.store_to(self.bcx, id, action, dst) } - fn copy_to(action: CopyAction, dst: ValueRef) -> block { + fn copy_to(&self, action: CopyAction, dst: ValueRef) -> block { self.datum.copy_to(self.bcx, action, dst) } - fn move_to(action: CopyAction, dst: ValueRef) -> block { + fn move_to(&self, action: CopyAction, dst: ValueRef) -> block { self.datum.move_to(self.bcx, action, dst) } - fn to_value_llval() -> ValueRef { + fn to_value_llval(&self) -> ValueRef { self.datum.to_value_llval(self.bcx) } - fn to_result() -> common::Result { + fn to_result(&self) -> common::Result { rslt(self.bcx, self.datum.to_appropriate_llval(self.bcx)) } - fn ccx() -> @CrateContext { + fn ccx(&self) -> @CrateContext { self.bcx.ccx() } - fn tcx() -> ty::ctxt { + fn tcx(&self) -> ty::ctxt { self.bcx.tcx() } diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index 83265975b5065..a7b12d13d4e12 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -157,8 +157,8 @@ pub enum Dest { } impl Dest { - fn to_str(ccx: @CrateContext) -> ~str { - match self { + fn to_str(&self, ccx: @CrateContext) -> ~str { + match *self { SaveIn(v) => fmt!("SaveIn(%s)", val_str(ccx.tn, v)), Ignore => ~"Ignore" } diff --git a/src/librustc/middle/trans/tvec.rs b/src/librustc/middle/trans/tvec.rs index 345a20aa5fc69..df89647321ab4 100644 --- a/src/librustc/middle/trans/tvec.rs +++ b/src/librustc/middle/trans/tvec.rs @@ -146,7 +146,7 @@ pub struct VecTypes { } pub impl VecTypes { - fn to_str(ccx: @CrateContext) -> ~str { + fn to_str(&self, ccx: @CrateContext) -> ~str { fmt!("VecTypes {vec_ty=%s, unit_ty=%s, llunit_ty=%s, llunit_size=%s}", ty_to_str(ccx.tcx, self.vec_ty), ty_to_str(ccx.tcx, self.unit_ty), diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index fdcbf2d995714..239e86623cabd 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -657,11 +657,11 @@ impl to_bytes::IterBytes for param_bound { } pub trait Vid { - pure fn to_uint() -> uint; + pure fn to_uint(&self) -> uint; } pub impl Vid for TyVid { - pure fn to_uint() -> uint { *self } + pure fn to_uint(&self) -> uint { **self } } pub impl ToStr for TyVid { @@ -669,7 +669,7 @@ pub impl ToStr for TyVid { } pub impl Vid for IntVid { - pure fn to_uint() -> uint { *self } + pure fn to_uint(&self) -> uint { **self } } pub impl ToStr for IntVid { @@ -677,7 +677,7 @@ pub impl ToStr for IntVid { } pub impl Vid for FloatVid { - pure fn to_uint() -> uint { *self } + pure fn to_uint(&self) -> uint { **self } } pub impl ToStr for FloatVid { @@ -685,7 +685,7 @@ pub impl ToStr for FloatVid { } pub impl Vid for RegionVid { - pure fn to_uint() -> uint { *self } + pure fn to_uint(&self) -> uint { **self } } pub impl ToStr for RegionVid { diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 3553caf5c33f7..e63e46ace3d05 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -660,19 +660,19 @@ pub impl FnCtxt { } pub impl region_scope for @mut FnCtxt { - pure fn anon_region(span: span) -> Result { + pure fn anon_region(&self, span: span) -> Result { // XXX: Unsafe to work around purity unsafe { result::Ok(self.infcx().next_region_var_nb(span)) } } - pure fn self_region(_span: span) -> Result { + pure fn self_region(&self, _span: span) -> Result { // XXX: Unsafe to work around purity unsafe { self.search_in_scope_regions(ty::br_self) } } - pure fn named_region(_span: span, id: ast::ident) + pure fn named_region(&self, _span: span, id: ast::ident) -> Result { // XXX: Unsafe to work around purity unsafe { diff --git a/src/librustc/middle/typeck/coherence.rs b/src/librustc/middle/typeck/coherence.rs index 649f4c798785c..29738f2826661 100644 --- a/src/librustc/middle/typeck/coherence.rs +++ b/src/librustc/middle/typeck/coherence.rs @@ -197,6 +197,7 @@ pub struct CoherenceChecker { } pub impl CoherenceChecker { + // IMPLICIT SELF WARNING: fix this! fn check_coherence(crate: @crate) { // Check implementations and traits. This populates the tables // containing the inherent methods and extension methods. It also @@ -235,7 +236,8 @@ pub impl CoherenceChecker { self.populate_destructor_table(); } - fn check_implementation(item: @item, associated_traits: ~[@trait_ref]) { + fn check_implementation(&self, + item: @item, associated_traits: ~[@trait_ref]) { let self_type = self.crate_context.tcx.tcache.get( &local_def(item.id)); @@ -302,7 +304,8 @@ pub impl CoherenceChecker { let implementation; match implementation_opt { None => { - implementation = self.create_impl_from_item(item); + implementation = + self.create_impl_from_item(item); } Some(copy existing_implementation) => { implementation = existing_implementation; @@ -321,7 +324,7 @@ pub impl CoherenceChecker { // Creates default method IDs and performs type substitutions for an impl // and trait pair. Then, for each provided method in the trait, inserts a // `ProvidedMethodInfo` instance into the `provided_method_sources` map. - fn instantiate_default_methods(impl_id: ast::node_id, + fn instantiate_default_methods(&self, impl_id: ast::node_id, trait_did: ast::def_id) { for self.each_provided_trait_method(trait_did) |trait_method| { // Synthesize an ID. @@ -330,7 +333,8 @@ pub impl CoherenceChecker { let new_did = local_def(new_id); // XXX: Perform substitutions. - let new_polytype = ty::lookup_item_type(tcx, trait_method.def_id); + let new_polytype = ty::lookup_item_type(tcx, + trait_method.def_id); tcx.tcache.insert(new_did, new_polytype); // Pair the new synthesized ID up with the @@ -380,7 +384,8 @@ pub impl CoherenceChecker { } } - fn add_inherent_method(base_def_id: def_id, implementation: @Impl) { + fn add_inherent_method(&self, + base_def_id: def_id, implementation: @Impl) { let implementation_list; match self.crate_context.coherence_info.inherent_methods .find(&base_def_id) { @@ -397,7 +402,7 @@ pub impl CoherenceChecker { implementation_list.push(implementation); } - fn add_trait_method(trait_id: def_id, implementation: @Impl) { + fn add_trait_method(&self, trait_id: def_id, implementation: @Impl) { let implementation_list; match self.crate_context.coherence_info.extension_methods .find(&trait_id) { @@ -414,7 +419,7 @@ pub impl CoherenceChecker { implementation_list.push(implementation); } - fn check_implementation_coherence() { + fn check_implementation_coherence(&self) { let coherence_info = &mut self.crate_context.coherence_info; let extension_methods = &coherence_info.extension_methods; @@ -423,7 +428,7 @@ pub impl CoherenceChecker { } } - fn check_implementation_coherence_of(trait_def_id: def_id) { + fn check_implementation_coherence_of(&self, trait_def_id: def_id) { // Unify pairs of polytypes. do self.iter_impls_of_trait(trait_def_id) |a| { @@ -459,7 +464,8 @@ pub impl CoherenceChecker { // Adds an impl of trait trait_t for self type self_t; that impl // is the_impl - fn add_impl_for_trait(trait_t: def_id, self_t: t, the_impl: @Impl) { + fn add_impl_for_trait(&self, + trait_t: def_id, self_t: t, the_impl: @Impl) { debug!("Adding impl %? of %? for %s", the_impl.did, trait_t, ty_to_str(self.crate_context.tcx, self_t)); @@ -475,7 +481,7 @@ pub impl CoherenceChecker { } } - fn iter_impls_of_trait(trait_def_id: def_id, + fn iter_impls_of_trait(&self, trait_def_id: def_id, f: &fn(@Impl)) { let coherence_info = &mut self.crate_context.coherence_info; @@ -491,7 +497,7 @@ pub impl CoherenceChecker { } } - fn each_provided_trait_method( + fn each_provided_trait_method(&self, trait_did: ast::def_id, f: &fn(x: &ty::method) -> bool) { // Make a list of all the names of the provided methods. @@ -511,7 +517,7 @@ pub impl CoherenceChecker { } } - fn polytypes_unify(polytype_a: ty_param_bounds_and_ty, + fn polytypes_unify(&self, polytype_a: ty_param_bounds_and_ty, polytype_b: ty_param_bounds_and_ty) -> bool { let universally_quantified_a = @@ -527,7 +533,7 @@ pub impl CoherenceChecker { // Converts a polytype to a monotype by replacing all parameters with // type variables. Returns the monotype and the type variables created. - fn universally_quantify_polytype(polytype: ty_param_bounds_and_ty) + fn universally_quantify_polytype(&self, polytype: ty_param_bounds_and_ty) -> UniversalQuantificationResult { // NDM--this span is bogus. let self_region = @@ -558,7 +564,8 @@ pub impl CoherenceChecker { } } - fn can_unify_universally_quantified(a: &a/UniversalQuantificationResult, + fn can_unify_universally_quantified(&self, + a: &a/UniversalQuantificationResult, b: &a/UniversalQuantificationResult) -> bool { let mut might_unify = true; @@ -610,12 +617,13 @@ pub impl CoherenceChecker { might_unify } - fn get_self_type_for_implementation(implementation: @Impl) + fn get_self_type_for_implementation(&self, implementation: @Impl) -> ty_param_bounds_and_ty { return self.crate_context.tcx.tcache.get(&implementation.did); } // Privileged scope checking + // IMPLICIT SELF WARNING: fix this! fn check_privileged_scopes(crate: @crate) { visit_crate(*crate, (), mk_vt(@Visitor { visit_item: |item, _context, visitor| { @@ -699,7 +707,7 @@ pub impl CoherenceChecker { })); } - fn trait_ref_to_trait_def_id(trait_ref: @trait_ref) -> def_id { + fn trait_ref_to_trait_def_id(&self, trait_ref: @trait_ref) -> def_id { let def_map = self.crate_context.tcx.def_map; let trait_def = def_map.get(&trait_ref.ref_id); let trait_id = def_id_of_def(trait_def); @@ -708,7 +716,7 @@ pub impl CoherenceChecker { // This check doesn't really have anything to do with coherence. It's // here for historical reasons - fn please_check_that_trait_methods_are_implemented( + fn please_check_that_trait_methods_are_implemented(&self, all_methods: &mut ~[@MethodInfo], trait_did: def_id, trait_ref_span: span) { @@ -735,7 +743,7 @@ pub impl CoherenceChecker { } // Converts an implementation in the AST to an Impl structure. - fn create_impl_from_item(item: @item) -> @Impl { + fn create_impl_from_item(&self, item: @item) -> @Impl { fn add_provided_methods(all_methods: &mut ~[@MethodInfo], all_provided_methods: ~[@ProvidedMethodInfo], sess: driver::session::Session) { @@ -806,7 +814,7 @@ pub impl CoherenceChecker { } } - fn span_of_impl(implementation: @Impl) -> span { + fn span_of_impl(&self, implementation: @Impl) -> span { assert implementation.did.crate == local_crate; match self.crate_context.tcx.items.find(&implementation.did.node) { Some(node_item(item, _)) => { @@ -822,7 +830,7 @@ pub impl CoherenceChecker { // External crate handling - fn add_impls_for_module(impls_seen: HashMap, + fn add_impls_for_module(&self, impls_seen: HashMap, crate_store: @mut CStore, module_def_id: def_id) { let implementations = get_impls_for_mod(crate_store, @@ -907,7 +915,8 @@ pub impl CoherenceChecker { } } - fn add_default_methods_for_external_trait(trait_def_id: ast::def_id) { + fn add_default_methods_for_external_trait(&self, + trait_def_id: ast::def_id) { let tcx = self.crate_context.tcx; let pmm = tcx.provided_methods; @@ -942,7 +951,7 @@ pub impl CoherenceChecker { // Adds implementations and traits from external crates to the coherence // info. - fn add_external_crates() { + fn add_external_crates(&self) { let impls_seen = HashMap(); let crate_store = self.crate_context.tcx.sess.cstore; @@ -983,7 +992,7 @@ pub impl CoherenceChecker { // Destructors // - fn populate_destructor_table() { + fn populate_destructor_table(&self) { let coherence_info = &mut self.crate_context.coherence_info; let tcx = self.crate_context.tcx; let drop_trait = tcx.lang_items.drop_trait(); diff --git a/src/librustc/middle/typeck/infer/combine.rs b/src/librustc/middle/typeck/infer/combine.rs index 34068a5eb0d43..1c6b1507629c5 100644 --- a/src/librustc/middle/typeck/infer/combine.rs +++ b/src/librustc/middle/typeck/infer/combine.rs @@ -78,37 +78,38 @@ pub fn macros() { } pub trait Combine { - fn infcx() -> @mut InferCtxt; - fn tag() -> ~str; - fn a_is_expected() -> bool; - fn span() -> span; - - fn sub() -> Sub; - fn lub() -> Lub; - fn glb() -> Glb; - - fn mts(a: ty::mt, b: ty::mt) -> cres; - fn contratys(a: ty::t, b: ty::t) -> cres; - fn tys(a: ty::t, b: ty::t) -> cres; - fn tps(as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]>; - fn self_tys(a: Option, b: Option) -> cres>; - fn substs(did: ast::def_id, as_: &ty::substs, + fn infcx(&self) -> @mut InferCtxt; + fn tag(&self) -> ~str; + fn a_is_expected(&self) -> bool; + fn span(&self) -> span; + + fn sub(&self) -> Sub; + fn lub(&self) -> Lub; + fn glb(&self) -> Glb; + + fn mts(&self, a: ty::mt, b: ty::mt) -> cres; + fn contratys(&self, a: ty::t, b: ty::t) -> cres; + fn tys(&self, a: ty::t, b: ty::t) -> cres; + fn tps(&self, as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]>; + fn self_tys(&self, a: Option, b: Option) + -> cres>; + fn substs(&self, did: ast::def_id, as_: &ty::substs, bs: &ty::substs) -> cres; - fn bare_fn_tys(a: &ty::BareFnTy, + fn bare_fn_tys(&self, a: &ty::BareFnTy, b: &ty::BareFnTy) -> cres; - fn closure_tys(a: &ty::ClosureTy, + fn closure_tys(&self, a: &ty::ClosureTy, b: &ty::ClosureTy) -> cres; - fn fn_sigs(a: &ty::FnSig, b: &ty::FnSig) -> cres; - fn flds(a: ty::field, b: ty::field) -> cres; - fn modes(a: ast::mode, b: ast::mode) -> cres; - fn args(a: ty::arg, b: ty::arg) -> cres; - fn sigils(p1: ast::Sigil, p2: ast::Sigil) -> cres; - fn purities(a: purity, b: purity) -> cres; - fn abis(a: ast::Abi, b: ast::Abi) -> cres; - fn oncenesses(a: Onceness, b: Onceness) -> cres; - fn contraregions(a: ty::Region, b: ty::Region) -> cres; - fn regions(a: ty::Region, b: ty::Region) -> cres; - fn vstores(vk: ty::terr_vstore_kind, + fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres; + fn flds(&self, a: ty::field, b: ty::field) -> cres; + fn modes(&self, a: ast::mode, b: ast::mode) -> cres; + fn args(&self, a: ty::arg, b: ty::arg) -> cres; + fn sigils(&self, p1: ast::Sigil, p2: ast::Sigil) -> cres; + fn purities(&self, a: purity, b: purity) -> cres; + fn abis(&self, a: ast::Abi, b: ast::Abi) -> cres; + fn oncenesses(&self, a: Onceness, b: Onceness) -> cres; + fn contraregions(&self, a: ty::Region, b: ty::Region) -> cres; + fn regions(&self, a: ty::Region, b: ty::Region) -> cres; + fn vstores(&self, vk: ty::terr_vstore_kind, a: ty::vstore, b: ty::vstore) -> cres; } diff --git a/src/librustc/middle/typeck/infer/glb.rs b/src/librustc/middle/typeck/infer/glb.rs index 464a149a488ff..5008791723eee 100644 --- a/src/librustc/middle/typeck/infer/glb.rs +++ b/src/librustc/middle/typeck/infer/glb.rs @@ -28,16 +28,16 @@ use std::list; pub enum Glb = CombineFields; // "greatest lower bound" (common subtype) pub impl Combine for Glb { - fn infcx() -> @mut InferCtxt { self.infcx } - fn tag() -> ~str { ~"glb" } - fn a_is_expected() -> bool { self.a_is_expected } - fn span() -> span { self.span } + fn infcx(&self) -> @mut InferCtxt { self.infcx } + fn tag(&self) -> ~str { ~"glb" } + fn a_is_expected(&self) -> bool { self.a_is_expected } + fn span(&self) -> span { self.span } - fn sub() -> Sub { Sub(*self) } - fn lub() -> Lub { Lub(*self) } - fn glb() -> Glb { Glb(*self) } + fn sub(&self) -> Sub { Sub(**self) } + fn lub(&self) -> Lub { Lub(**self) } + fn glb(&self) -> Glb { Glb(**self) } - fn mts(a: ty::mt, b: ty::mt) -> cres { + fn mts(&self, a: ty::mt, b: ty::mt) -> cres { let tcx = self.infcx.tcx; debug!("%s.mts(%s, %s)", @@ -49,17 +49,17 @@ pub impl Combine for Glb { // If one side or both is mut, then the GLB must use // the precise type from the mut side. (m_mutbl, m_const) => { - Sub(*self).tys(a.ty, b.ty).chain(|_t| { + Sub(**self).tys(a.ty, b.ty).chain(|_t| { Ok(ty::mt {ty: a.ty, mutbl: m_mutbl}) }) } (m_const, m_mutbl) => { - Sub(*self).tys(b.ty, a.ty).chain(|_t| { + Sub(**self).tys(b.ty, a.ty).chain(|_t| { Ok(ty::mt {ty: b.ty, mutbl: m_mutbl}) }) } (m_mutbl, m_mutbl) => { - eq_tys(&self, a.ty, b.ty).then(|| { + eq_tys(self, a.ty, b.ty).then(|| { Ok(ty::mt {ty: a.ty, mutbl: m_mutbl}) }) } @@ -90,11 +90,11 @@ pub impl Combine for Glb { } } - fn contratys(a: ty::t, b: ty::t) -> cres { - Lub(*self).tys(a, b) + fn contratys(&self, a: ty::t, b: ty::t) -> cres { + Lub(**self).tys(a, b) } - fn purities(a: purity, b: purity) -> cres { + fn purities(&self, a: purity, b: purity) -> cres { match (a, b) { (pure_fn, _) | (_, pure_fn) => Ok(pure_fn), (extern_fn, _) | (_, extern_fn) => Ok(extern_fn), @@ -103,14 +103,14 @@ pub impl Combine for Glb { } } - fn oncenesses(a: Onceness, b: Onceness) -> cres { + fn oncenesses(&self, a: Onceness, b: Onceness) -> cres { match (a, b) { (Many, _) | (_, Many) => Ok(Many), (Once, Once) => Ok(Once) } } - fn regions(a: ty::Region, b: ty::Region) -> cres { + fn regions(&self, a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%?, %?)", self.tag(), a.inf_str(self.infcx), @@ -121,34 +121,35 @@ pub impl Combine for Glb { } } - fn contraregions(a: ty::Region, b: ty::Region) -> cres { - Lub(*self).regions(a, b) + fn contraregions(&self, a: ty::Region, b: ty::Region) + -> cres { + Lub(**self).regions(a, b) } - fn tys(a: ty::t, b: ty::t) -> cres { - super_lattice_tys(&self, a, b) + fn tys(&self, a: ty::t, b: ty::t) -> cres { + super_lattice_tys(self, a, b) } // Traits please (FIXME: #2794): - fn flds(a: ty::field, b: ty::field) -> cres { - super_flds(&self, a, b) + fn flds(&self, a: ty::field, b: ty::field) -> cres { + super_flds(self, a, b) } - fn vstores(vk: ty::terr_vstore_kind, + fn vstores(&self, vk: ty::terr_vstore_kind, a: ty::vstore, b: ty::vstore) -> cres { - super_vstores(&self, vk, a, b) + super_vstores(self, vk, a, b) } - fn modes(a: ast::mode, b: ast::mode) -> cres { - super_modes(&self, a, b) + fn modes(&self, a: ast::mode, b: ast::mode) -> cres { + super_modes(self, a, b) } - fn args(a: ty::arg, b: ty::arg) -> cres { - super_args(&self, a, b) + fn args(&self, a: ty::arg, b: ty::arg) -> cres { + super_args(self, a, b) } - fn fn_sigs(a: &ty::FnSig, b: &ty::FnSig) -> cres { + fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres { // Note: this is a subtle algorithm. For a full explanation, // please see the large comment in `region_inference.rs`. @@ -166,14 +167,14 @@ pub impl Combine for Glb { let (a_with_fresh, a_isr) = self.infcx.replace_bound_regions_with_fresh_regions( self.span, a); - let a_vars = var_ids(&self, a_isr); + let a_vars = var_ids(self, a_isr); let (b_with_fresh, b_isr) = self.infcx.replace_bound_regions_with_fresh_regions( self.span, b); - let b_vars = var_ids(&self, b_isr); + let b_vars = var_ids(self, b_isr); // Collect constraints. - let sig0 = if_ok!(super_fn_sigs(&self, &a_with_fresh, &b_with_fresh)); + let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh)); debug!("sig0 = %s", sig0.inf_str(self.infcx)); // Generalize the regions appearing in fn_ty0 if possible @@ -182,7 +183,7 @@ pub impl Combine for Glb { let sig1 = self.infcx.fold_regions_in_sig( &sig0, - |r, _in_fn| generalize_region(&self, snapshot, + |r, _in_fn| generalize_region(self, snapshot, new_vars, a_isr, a_vars, b_vars, r)); debug!("sig1 = %s", sig1.inf_str(self.infcx)); @@ -267,36 +268,37 @@ pub impl Combine for Glb { } } - fn sigils(p1: ast::Sigil, p2: ast::Sigil) -> cres { - super_sigils(&self, p1, p2) + fn sigils(&self, p1: ast::Sigil, p2: ast::Sigil) -> cres { + super_sigils(self, p1, p2) } - fn abis(p1: ast::Abi, p2: ast::Abi) -> cres { - super_abis(&self, p1, p2) + fn abis(&self, p1: ast::Abi, p2: ast::Abi) -> cres { + super_abis(self, p1, p2) } - fn bare_fn_tys(a: &ty::BareFnTy, + fn bare_fn_tys(&self, a: &ty::BareFnTy, b: &ty::BareFnTy) -> cres { - super_bare_fn_tys(&self, a, b) + super_bare_fn_tys(self, a, b) } - fn closure_tys(a: &ty::ClosureTy, + fn closure_tys(&self, a: &ty::ClosureTy, b: &ty::ClosureTy) -> cres { - super_closure_tys(&self, a, b) + super_closure_tys(self, a, b) } - fn substs(did: ast::def_id, + fn substs(&self, did: ast::def_id, as_: &ty::substs, bs: &ty::substs) -> cres { - super_substs(&self, did, as_, bs) + super_substs(self, did, as_, bs) } - fn tps(as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]> { - super_tps(&self, as_, bs) + fn tps(&self, as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]> { + super_tps(self, as_, bs) } - fn self_tys(a: Option, b: Option) -> cres> { - super_self_tys(&self, a, b) + fn self_tys(&self, a: Option, b: Option) + -> cres> { + super_self_tys(self, a, b) } } diff --git a/src/librustc/middle/typeck/infer/lattice.rs b/src/librustc/middle/typeck/infer/lattice.rs index c7ee1b871a8ed..1a919ac0f3b0b 100644 --- a/src/librustc/middle/typeck/infer/lattice.rs +++ b/src/librustc/middle/typeck/infer/lattice.rs @@ -50,23 +50,27 @@ use middle::typeck::infer::to_str::InferStr; use std::list; pub trait LatticeValue { - static fn sub(cf: &CombineFields, a: &Self, b: &Self) -> ures; - static fn lub(cf: &CombineFields, a: &Self, b: &Self) -> cres; - static fn glb(cf: &CombineFields, a: &Self, b: &Self) -> cres; + static fn sub(&self, cf: &CombineFields, a: &Self, b: &Self) -> ures; + static fn lub(&self, cf: &CombineFields, a: &Self, b: &Self) + -> cres; + static fn glb(&self, cf: &CombineFields, a: &Self, b: &Self) + -> cres; } pub type LatticeOp = &fn(cf: &CombineFields, a: &T, b: &T) -> cres; pub impl LatticeValue for ty::t { - static fn sub(cf: &CombineFields, a: &ty::t, b: &ty::t) -> ures { + static fn sub(&self, cf: &CombineFields, a: &ty::t, b: &ty::t) -> ures { Sub(*cf).tys(*a, *b).to_ures() } - static fn lub(cf: &CombineFields, a: &ty::t, b: &ty::t) -> cres { + static fn lub(&self, cf: &CombineFields, a: &ty::t, b: &ty::t) + -> cres { Lub(*cf).tys(*a, *b) } - static fn glb(cf: &CombineFields, a: &ty::t, b: &ty::t) -> cres { + static fn glb(&self, cf: &CombineFields, a: &ty::t, b: &ty::t) + -> cres { Glb(*cf).tys(*a, *b) } } @@ -292,39 +296,39 @@ pub impl CombineFields { // for pairs of variables or for variables and values. pub trait LatticeDir { - fn combine_fields() -> CombineFields; - fn bnd(b: &Bounds) -> Option; - fn with_bnd(b: &Bounds, +t: T) -> Bounds; + fn combine_fields(&self) -> CombineFields; + fn bnd(&self, b: &Bounds) -> Option; + fn with_bnd(&self, b: &Bounds, +t: T) -> Bounds; } pub trait TyLatticeDir { - fn ty_bot(t: ty::t) -> cres; + fn ty_bot(&self, t: ty::t) -> cres; } pub impl LatticeDir for Lub { - fn combine_fields() -> CombineFields { *self } - fn bnd(b: &Bounds) -> Option { b.ub } - fn with_bnd(b: &Bounds, +t: T) -> Bounds { + fn combine_fields(&self) -> CombineFields { **self } + fn bnd(&self, b: &Bounds) -> Option { b.ub } + fn with_bnd(&self, b: &Bounds, +t: T) -> Bounds { Bounds { ub: Some(t), ..*b } } } pub impl TyLatticeDir for Lub { - fn ty_bot(t: ty::t) -> cres { + fn ty_bot(&self, t: ty::t) -> cres { Ok(t) } } pub impl LatticeDir for Glb { - fn combine_fields() -> CombineFields { *self } - fn bnd(b: &Bounds) -> Option { b.lb } - fn with_bnd(b: &Bounds, +t: T) -> Bounds { + fn combine_fields(&self) -> CombineFields { **self } + fn bnd(&self, b: &Bounds) -> Option { b.lb } + fn with_bnd(&self, b: &Bounds, +t: T) -> Bounds { Bounds { lb: Some(t), ..*b } } } pub impl TyLatticeDir for Glb { - fn ty_bot(_t: ty::t) -> cres { + fn ty_bot(&self, _t: ty::t) -> cres { Ok(ty::mk_bot(self.infcx.tcx)) } } diff --git a/src/librustc/middle/typeck/infer/lub.rs b/src/librustc/middle/typeck/infer/lub.rs index 60f6cd40e0430..df4b8c0be09b5 100644 --- a/src/librustc/middle/typeck/infer/lub.rs +++ b/src/librustc/middle/typeck/infer/lub.rs @@ -32,21 +32,22 @@ pub fn macros() { pub enum Lub = CombineFields; // least-upper-bound: common supertype pub impl Lub { - fn bot_ty(b: ty::t) -> cres { Ok(b) } - fn ty_bot(b: ty::t) -> cres { self.bot_ty(b) } // commutative + fn bot_ty(&self, b: ty::t) -> cres { Ok(b) } + fn ty_bot(&self, b: ty::t) + -> cres { self.bot_ty(b) } // commutative } pub impl Combine for Lub { - fn infcx() -> @mut InferCtxt { self.infcx } - fn tag() -> ~str { ~"lub" } - fn a_is_expected() -> bool { self.a_is_expected } - fn span() -> span { self.span } + fn infcx(&self) -> @mut InferCtxt { self.infcx } + fn tag(&self) -> ~str { ~"lub" } + fn a_is_expected(&self) -> bool { self.a_is_expected } + fn span(&self) -> span { self.span } - fn sub() -> Sub { Sub(*self) } - fn lub() -> Lub { Lub(*self) } - fn glb() -> Glb { Glb(*self) } + fn sub(&self) -> Sub { Sub(**self) } + fn lub(&self) -> Lub { Lub(**self) } + fn glb(&self) -> Glb { Glb(**self) } - fn mts(a: ty::mt, b: ty::mt) -> cres { + fn mts(&self, a: ty::mt, b: ty::mt) -> cres { let tcx = self.infcx.tcx; debug!("%s.mts(%s, %s)", @@ -67,7 +68,7 @@ pub impl Combine for Lub { m_mutbl => { self.infcx.try(|| { - eq_tys(&self, a.ty, b.ty).then(|| { + eq_tys(self, a.ty, b.ty).then(|| { Ok(ty::mt {ty: a.ty, mutbl: m}) }) }).chain_err(|_e| { @@ -79,11 +80,11 @@ pub impl Combine for Lub { } } - fn contratys(a: ty::t, b: ty::t) -> cres { - Glb(*self).tys(a, b) + fn contratys(&self, a: ty::t, b: ty::t) -> cres { + Glb(**self).tys(a, b) } - fn purities(a: purity, b: purity) -> cres { + fn purities(&self, a: purity, b: purity) -> cres { match (a, b) { (unsafe_fn, _) | (_, unsafe_fn) => Ok(unsafe_fn), (impure_fn, _) | (_, impure_fn) => Ok(impure_fn), @@ -92,18 +93,19 @@ pub impl Combine for Lub { } } - fn oncenesses(a: Onceness, b: Onceness) -> cres { + fn oncenesses(&self, a: Onceness, b: Onceness) -> cres { match (a, b) { (Once, _) | (_, Once) => Ok(Once), (Many, Many) => Ok(Many) } } - fn contraregions(a: ty::Region, b: ty::Region) -> cres { - return Glb(*self).regions(a, b); + fn contraregions(&self, a: ty::Region, b: ty::Region) + -> cres { + return Glb(**self).regions(a, b); } - fn regions(a: ty::Region, b: ty::Region) -> cres { + fn regions(&self, a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%?, %?)", self.tag(), a.inf_str(self.infcx), @@ -114,7 +116,7 @@ pub impl Combine for Lub { } } - fn fn_sigs(a: &ty::FnSig, b: &ty::FnSig) -> cres { + fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres { // Note: this is a subtle algorithm. For a full explanation, // please see the large comment in `region_inference.rs`. @@ -133,7 +135,7 @@ pub impl Combine for Lub { self.span, b); // Collect constraints. - let sig0 = if_ok!(super_fn_sigs(&self, &a_with_fresh, &b_with_fresh)); + let sig0 = if_ok!(super_fn_sigs(self, &a_with_fresh, &b_with_fresh)); debug!("sig0 = %s", sig0.inf_str(self.infcx)); // Generalize the regions appearing in sig0 if possible @@ -142,7 +144,7 @@ pub impl Combine for Lub { let sig1 = self.infcx.fold_regions_in_sig( &sig0, - |r, _in_fn| generalize_region(&self, snapshot, new_vars, + |r, _in_fn| generalize_region(self, snapshot, new_vars, a_isr, r)); return Ok(sig1); @@ -191,58 +193,60 @@ pub impl Combine for Lub { } } - fn bare_fn_tys(a: &ty::BareFnTy, + fn bare_fn_tys(&self, a: &ty::BareFnTy, b: &ty::BareFnTy) -> cres { - super_bare_fn_tys(&self, a, b) + super_bare_fn_tys(self, a, b) } - fn closure_tys(a: &ty::ClosureTy, + fn closure_tys(&self, a: &ty::ClosureTy, b: &ty::ClosureTy) -> cres { - super_closure_tys(&self, a, b) + super_closure_tys(self, a, b) } // Traits please (FIXME: #2794): - fn sigils(p1: ast::Sigil, p2: ast::Sigil) -> cres { - super_sigils(&self, p1, p2) + fn sigils(&self, p1: ast::Sigil, p2: ast::Sigil) + -> cres { + super_sigils(self, p1, p2) } - fn abis(p1: ast::Abi, p2: ast::Abi) -> cres { - super_abis(&self, p1, p2) + fn abis(&self, p1: ast::Abi, p2: ast::Abi) -> cres { + super_abis(self, p1, p2) } - fn tys(a: ty::t, b: ty::t) -> cres { - super_lattice_tys(&self, a, b) + fn tys(&self, a: ty::t, b: ty::t) -> cres { + super_lattice_tys(self, a, b) } - fn flds(a: ty::field, b: ty::field) -> cres { - super_flds(&self, a, b) + fn flds(&self, a: ty::field, b: ty::field) -> cres { + super_flds(self, a, b) } - fn vstores(vk: ty::terr_vstore_kind, + fn vstores(&self, vk: ty::terr_vstore_kind, a: ty::vstore, b: ty::vstore) -> cres { - super_vstores(&self, vk, a, b) + super_vstores(self, vk, a, b) } - fn modes(a: ast::mode, b: ast::mode) -> cres { - super_modes(&self, a, b) + fn modes(&self, a: ast::mode, b: ast::mode) -> cres { + super_modes(self, a, b) } - fn args(a: ty::arg, b: ty::arg) -> cres { - super_args(&self, a, b) + fn args(&self, a: ty::arg, b: ty::arg) -> cres { + super_args(self, a, b) } - fn substs(did: ast::def_id, + fn substs(&self, did: ast::def_id, as_: &ty::substs, bs: &ty::substs) -> cres { - super_substs(&self, did, as_, bs) + super_substs(self, did, as_, bs) } - fn tps(as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]> { - super_tps(&self, as_, bs) + fn tps(&self, as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]> { + super_tps(self, as_, bs) } - fn self_tys(a: Option, b: Option) -> cres> { - super_self_tys(&self, a, b) + fn self_tys(&self, a: Option, b: Option) + -> cres> { + super_self_tys(self, a, b) } } diff --git a/src/librustc/middle/typeck/infer/mod.rs b/src/librustc/middle/typeck/infer/mod.rs index 7d799b7ea2fb6..f013712595281 100644 --- a/src/librustc/middle/typeck/infer/mod.rs +++ b/src/librustc/middle/typeck/infer/mod.rs @@ -489,24 +489,24 @@ fn resolve_borrowings(cx: @mut InferCtxt) { */ trait then { - fn then(f: fn() -> Result) + fn then(&self, f: fn() -> Result) -> Result; } impl then for ures { - fn then(f: fn() -> Result) + fn then(&self, f: fn() -> Result) -> Result { self.chain(|_i| f()) } } trait ToUres { - fn to_ures() -> ures; + fn to_ures(&self) -> ures; } impl ToUres for cres { - fn to_ures() -> ures { - match self { + fn to_ures(&self) -> ures { + match *self { Ok(ref _v) => Ok(()), Err(ref e) => Err((*e)) } @@ -514,14 +514,14 @@ impl ToUres for cres { } trait CresCompare { - fn compare(t: T, f: fn() -> ty::type_err) -> cres; + fn compare(&self, t: T, f: fn() -> ty::type_err) -> cres; } impl CresCompare for cres { - fn compare(t: T, f: fn() -> ty::type_err) -> cres { + fn compare(&self, t: T, f: fn() -> ty::type_err) -> cres { do self.chain |s| { if s == t { - self + *self } else { Err(f()) } @@ -551,22 +551,22 @@ struct Snapshot { } impl @mut InferCtxt { - fn combine_fields(a_is_expected: bool, + fn combine_fields(&self, a_is_expected: bool, span: span) -> CombineFields { - CombineFields {infcx: self, + CombineFields {infcx: *self, a_is_expected: a_is_expected, span: span} } - fn sub(a_is_expected: bool, span: span) -> Sub { + fn sub(&self, a_is_expected: bool, span: span) -> Sub { Sub(self.combine_fields(a_is_expected, span)) } - fn in_snapshot() -> bool { + fn in_snapshot(&self) -> bool { self.region_vars.in_snapshot() } - fn start_snapshot() -> Snapshot { + fn start_snapshot(&self) -> Snapshot { Snapshot { ty_var_bindings_len: self.ty_var_bindings.bindings.len(), @@ -579,7 +579,7 @@ impl @mut InferCtxt { } } - fn rollback_to(snapshot: &Snapshot) { + fn rollback_to(&self, snapshot: &Snapshot) { debug!("rollback!"); rollback_to(&mut self.ty_var_bindings, snapshot.ty_var_bindings_len); @@ -643,7 +643,7 @@ fn next_simple_var( } impl @mut InferCtxt { - fn next_ty_var_id() -> TyVid { + fn next_ty_var_id(&self) -> TyVid { let id = self.ty_var_counter; self.ty_var_counter += 1; let vals = self.ty_var_bindings.vals; @@ -651,37 +651,37 @@ impl @mut InferCtxt { return TyVid(id); } - fn next_ty_var() -> ty::t { + fn next_ty_var(&self) -> ty::t { ty::mk_var(self.tcx, self.next_ty_var_id()) } - fn next_ty_vars(n: uint) -> ~[ty::t] { + fn next_ty_vars(&self, n: uint) -> ~[ty::t] { vec::from_fn(n, |_i| self.next_ty_var()) } - fn next_int_var_id() -> IntVid { + fn next_int_var_id(&self) -> IntVid { IntVid(next_simple_var(&mut self.int_var_counter, &mut self.int_var_bindings)) } - fn next_int_var() -> ty::t { + fn next_int_var(&self) -> ty::t { ty::mk_int_var(self.tcx, self.next_int_var_id()) } - fn next_float_var_id() -> FloatVid { + fn next_float_var_id(&self) -> FloatVid { FloatVid(next_simple_var(&mut self.float_var_counter, &mut self.float_var_bindings)) } - fn next_float_var() -> ty::t { + fn next_float_var(&self) -> ty::t { ty::mk_float_var(self.tcx, self.next_float_var_id()) } - fn next_region_var_nb(span: span) -> ty::Region { + fn next_region_var_nb(&self, span: span) -> ty::Region { ty::re_infer(ty::ReVar(self.region_vars.new_region_var(span))) } - fn next_region_var_with_lb(span: span, + fn next_region_var_with_lb(&self, span: span, lb_region: ty::Region) -> ty::Region { let region_var = self.next_region_var_nb(span); @@ -693,27 +693,28 @@ impl @mut InferCtxt { return region_var; } - fn next_region_var(span: span, scope_id: ast::node_id) -> ty::Region { + fn next_region_var(&self, span: span, scope_id: ast::node_id) + -> ty::Region { self.next_region_var_with_lb(span, ty::re_scope(scope_id)) } - fn resolve_regions() { + fn resolve_regions(&self) { self.region_vars.resolve_regions(); } - fn ty_to_str(t: ty::t) -> ~str { + fn ty_to_str(&self, t: ty::t) -> ~str { ty_to_str(self.tcx, self.resolve_type_vars_if_possible(t)) } - fn resolve_type_vars_if_possible(typ: ty::t) -> ty::t { - match resolve_type(self, typ, resolve_nested_tvar | resolve_ivar) { + fn resolve_type_vars_if_possible(&self, typ: ty::t) -> ty::t { + match resolve_type(*self, typ, resolve_nested_tvar | resolve_ivar) { result::Ok(new_type) => new_type, result::Err(_) => typ } } - fn type_error_message(sp: span, mk_msg: fn(~str) -> ~str, + fn type_error_message(&self, sp: span, mk_msg: fn(~str) -> ~str, actual_ty: ty::t, err: Option<&ty::type_err>) { let actual_ty = self.resolve_type_vars_if_possible(actual_ty); @@ -731,7 +732,7 @@ impl @mut InferCtxt { ty::note_and_explain_type_err(self.tcx, *err)); } - fn report_mismatched_types(sp: span, e: ty::t, a: ty::t, + fn report_mismatched_types(&self, sp: span, e: ty::t, a: ty::t, err: &ty::type_err) { // Don't report an error if expected is ty_err let resolved_expected = @@ -749,7 +750,7 @@ impl @mut InferCtxt { self.type_error_message(sp, mk_msg, a, Some(err)); } - fn replace_bound_regions_with_fresh_regions( + fn replace_bound_regions_with_fresh_regions(&self, span: span, fsig: &ty::FnSig) -> (ty::FnSig, isr_alist) { diff --git a/src/librustc/middle/typeck/infer/sub.rs b/src/librustc/middle/typeck/infer/sub.rs index 2c8c60a84fb8b..661c67dbefc62 100644 --- a/src/librustc/middle/typeck/infer/sub.rs +++ b/src/librustc/middle/typeck/infer/sub.rs @@ -33,30 +33,31 @@ pub fn macros() { pub enum Sub = CombineFields; // "subtype", "subregion" etc pub impl Combine for Sub { - fn infcx() -> @mut InferCtxt { self.infcx } - fn tag() -> ~str { ~"sub" } - fn a_is_expected() -> bool { self.a_is_expected } - fn span() -> span { self.span } + fn infcx(&self) -> @mut InferCtxt { self.infcx } + fn tag(&self) -> ~str { ~"sub" } + fn a_is_expected(&self) -> bool { self.a_is_expected } + fn span(&self) -> span { self.span } - fn sub() -> Sub { Sub(*self) } - fn lub() -> Lub { Lub(*self) } - fn glb() -> Glb { Glb(*self) } + fn sub(&self) -> Sub { Sub(**self) } + fn lub(&self) -> Lub { Lub(**self) } + fn glb(&self) -> Glb { Glb(**self) } - fn contratys(a: ty::t, b: ty::t) -> cres { + fn contratys(&self, a: ty::t, b: ty::t) -> cres { let opp = CombineFields { - a_is_expected: !self.a_is_expected,.. *self + a_is_expected: !self.a_is_expected,.. **self }; Sub(opp).tys(b, a) } - fn contraregions(a: ty::Region, b: ty::Region) -> cres { + fn contraregions(&self, a: ty::Region, b: ty::Region) + -> cres { let opp = CombineFields { - a_is_expected: !self.a_is_expected,.. *self + a_is_expected: !self.a_is_expected,.. **self }; Sub(opp).regions(b, a) } - fn regions(a: ty::Region, b: ty::Region) -> cres { + fn regions(&self, a: ty::Region, b: ty::Region) -> cres { debug!("%s.regions(%s, %s)", self.tag(), a.inf_str(self.infcx), @@ -69,7 +70,7 @@ pub impl Combine for Sub { } } - fn mts(a: ty::mt, b: ty::mt) -> cres { + fn mts(&self, a: ty::mt, b: ty::mt) -> cres { debug!("mts(%s <: %s)", a.inf_str(self.infcx), b.inf_str(self.infcx)); if a.mutbl != b.mutbl && b.mutbl != m_const { @@ -80,7 +81,7 @@ pub impl Combine for Sub { m_mutbl => { // If supertype is mut, subtype must match exactly // (i.e., invariant if mut): - eq_tys(&self, a.ty, b.ty).then(|| Ok(a) ) + eq_tys(self, a.ty, b.ty).then(|| Ok(a) ) } m_imm | m_const => { // Otherwise we can be covariant: @@ -89,19 +90,19 @@ pub impl Combine for Sub { } } - fn purities(a: purity, b: purity) -> cres { + fn purities(&self, a: purity, b: purity) -> cres { self.lub().purities(a, b).compare(b, || { - ty::terr_purity_mismatch(expected_found(&self, a, b)) + ty::terr_purity_mismatch(expected_found(self, a, b)) }) } - fn oncenesses(a: Onceness, b: Onceness) -> cres { + fn oncenesses(&self, a: Onceness, b: Onceness) -> cres { self.lub().oncenesses(a, b).compare(b, || { - ty::terr_onceness_mismatch(expected_found(&self, a, b)) + ty::terr_onceness_mismatch(expected_found(self, a, b)) }) } - fn tys(a: ty::t, b: ty::t) -> cres { + fn tys(&self, a: ty::t, b: ty::t) -> cres { debug!("%s.tys(%s, %s)", self.tag(), a.inf_str(self.infcx), b.inf_str(self.infcx)); if a == b { return Ok(a); } @@ -125,16 +126,16 @@ pub impl Combine for Sub { } (_, &ty::ty_bot) => { - Err(ty::terr_sorts(expected_found(&self, a, b))) + Err(ty::terr_sorts(expected_found(self, a, b))) } _ => { - super_tys(&self, a, b) + super_tys(self, a, b) } } } - fn fn_sigs(a: &ty::FnSig, b: &ty::FnSig) -> cres { + fn fn_sigs(&self, a: &ty::FnSig, b: &ty::FnSig) -> cres { debug!("fn_sigs(a=%s, b=%s)", a.inf_str(self.infcx), b.inf_str(self.infcx)); let _indenter = indenter(); @@ -175,7 +176,7 @@ pub impl Combine for Sub { debug!("b_sig=%s", b_sig.inf_str(self.infcx)); // Compare types now that bound regions have been replaced. - let sig = if_ok!(super_fn_sigs(&self, &a_sig, &b_sig)); + let sig = if_ok!(super_fn_sigs(self, &a_sig, &b_sig)); // Presuming type comparison succeeds, we need to check // that the skolemized regions do not "leak". @@ -212,53 +213,54 @@ pub impl Combine for Sub { // Traits please (FIXME: #2794): - fn sigils(p1: ast::Sigil, p2: ast::Sigil) -> cres { - super_sigils(&self, p1, p2) + fn sigils(&self, p1: ast::Sigil, p2: ast::Sigil) -> cres { + super_sigils(self, p1, p2) } - fn abis(p1: ast::Abi, p2: ast::Abi) -> cres { - super_abis(&self, p1, p2) + fn abis(&self, p1: ast::Abi, p2: ast::Abi) -> cres { + super_abis(self, p1, p2) } - fn flds(a: ty::field, b: ty::field) -> cres { - super_flds(&self, a, b) + fn flds(&self, a: ty::field, b: ty::field) -> cres { + super_flds(self, a, b) } - fn bare_fn_tys(a: &ty::BareFnTy, + fn bare_fn_tys(&self, a: &ty::BareFnTy, b: &ty::BareFnTy) -> cres { - super_bare_fn_tys(&self, a, b) + super_bare_fn_tys(self, a, b) } - fn closure_tys(a: &ty::ClosureTy, + fn closure_tys(&self, a: &ty::ClosureTy, b: &ty::ClosureTy) -> cres { - super_closure_tys(&self, a, b) + super_closure_tys(self, a, b) } - fn vstores(vk: ty::terr_vstore_kind, + fn vstores(&self, vk: ty::terr_vstore_kind, a: ty::vstore, b: ty::vstore) -> cres { - super_vstores(&self, vk, a, b) + super_vstores(self, vk, a, b) } - fn modes(a: ast::mode, b: ast::mode) -> cres { - super_modes(&self, a, b) + fn modes(&self, a: ast::mode, b: ast::mode) -> cres { + super_modes(self, a, b) } - fn args(a: ty::arg, b: ty::arg) -> cres { - super_args(&self, a, b) + fn args(&self, a: ty::arg, b: ty::arg) -> cres { + super_args(self, a, b) } - fn substs(did: ast::def_id, + fn substs(&self, did: ast::def_id, as_: &ty::substs, bs: &ty::substs) -> cres { - super_substs(&self, did, as_, bs) + super_substs(self, did, as_, bs) } - fn tps(as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]> { - super_tps(&self, as_, bs) + fn tps(&self, as_: &[ty::t], bs: &[ty::t]) -> cres<~[ty::t]> { + super_tps(self, as_, bs) } - fn self_tys(a: Option, b: Option) -> cres> { - super_self_tys(&self, a, b) + fn self_tys(&self, a: Option, b: Option) + -> cres> { + super_self_tys(self, a, b) } } diff --git a/src/librustc/middle/typeck/infer/unify.rs b/src/librustc/middle/typeck/infer/unify.rs index 4f85718ad1e53..d9b2b73890d42 100644 --- a/src/librustc/middle/typeck/infer/unify.rs +++ b/src/librustc/middle/typeck/infer/unify.rs @@ -38,7 +38,7 @@ pub struct Node { } pub trait UnifyVid { - static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt) + static fn appropriate_vals_and_bindings(&self, infcx: &v/mut InferCtxt) -> &v/mut ValsAndBindings; } @@ -147,7 +147,7 @@ pub impl InferCtxt { // doesn't have a subtyping relationship we need to worry about. pub trait SimplyUnifiable { - static fn to_type_err(expected_found) -> ty::type_err; + static fn to_type_err(&self, expected_found) -> ty::type_err; } pub fn mk_err(+a_is_expected: bool, @@ -238,35 +238,35 @@ pub impl InferCtxt { // ______________________________________________________________________ pub impl UnifyVid> for ty::TyVid { - static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt) + static fn appropriate_vals_and_bindings(&self, infcx: &v/mut InferCtxt) -> &v/mut ValsAndBindings> { return &mut infcx.ty_var_bindings; } } pub impl UnifyVid> for ty::IntVid { - static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt) + static fn appropriate_vals_and_bindings(&self, infcx: &v/mut InferCtxt) -> &v/mut ValsAndBindings> { return &mut infcx.int_var_bindings; } } pub impl SimplyUnifiable for IntVarValue { - static fn to_type_err(err: expected_found) + static fn to_type_err(&self, err: expected_found) -> ty::type_err { return ty::terr_int_mismatch(err); } } pub impl UnifyVid> for ty::FloatVid { - static fn appropriate_vals_and_bindings(infcx: &v/mut InferCtxt) + static fn appropriate_vals_and_bindings(&self, infcx: &v/mut InferCtxt) -> &v/mut ValsAndBindings> { return &mut infcx.float_var_bindings; } } pub impl SimplyUnifiable for ast::float_ty { - static fn to_type_err(err: expected_found) + static fn to_type_err(&self, err: expected_found) -> ty::type_err { return ty::terr_float_mismatch(err); } diff --git a/src/librustc/middle/typeck/mod.rs b/src/librustc/middle/typeck/mod.rs index 05820bed0d036..49b818328be06 100644 --- a/src/librustc/middle/typeck/mod.rs +++ b/src/librustc/middle/typeck/mod.rs @@ -172,8 +172,8 @@ pub enum vtable_origin { } pub impl vtable_origin { - fn to_str(tcx: ty::ctxt) -> ~str { - match self { + fn to_str(&self, tcx: ty::ctxt) -> ~str { + match *self { vtable_static(def_id, ref tys, ref vtable_res) => { fmt!("vtable_static(%?:%s, %?, %?)", def_id, ty::item_path_str(tcx, def_id), @@ -279,17 +279,17 @@ pub fn require_same_types( pub type isr_alist = @List<(ty::bound_region, ty::Region)>; trait get_and_find_region { - fn get(br: ty::bound_region) -> ty::Region; - fn find(br: ty::bound_region) -> Option; + fn get(&self, br: ty::bound_region) -> ty::Region; + fn find(&self, br: ty::bound_region) -> Option; } impl get_and_find_region for isr_alist { - fn get(br: ty::bound_region) -> ty::Region { + fn get(&self, br: ty::bound_region) -> ty::Region { self.find(br).get() } - fn find(br: ty::bound_region) -> Option { - for list::each(self) |isr| { + fn find(&self, br: ty::bound_region) -> Option { + for list::each(*self) |isr| { let (isr_br, isr_r) = *isr; if isr_br == br { return Some(isr_r); } } diff --git a/src/librustc/middle/typeck/rscope.rs b/src/librustc/middle/typeck/rscope.rs index d5834af3a574a..141a730ca8d34 100644 --- a/src/librustc/middle/typeck/rscope.rs +++ b/src/librustc/middle/typeck/rscope.rs @@ -19,21 +19,21 @@ use syntax::codemap::span; use syntax::parse::token::special_idents; pub trait region_scope { - pure fn anon_region(span: span) -> Result; - pure fn self_region(span: span) -> Result; - pure fn named_region(span: span, id: ast::ident) + pure fn anon_region(&self, span: span) -> Result; + pure fn self_region(&self, span: span) -> Result; + pure fn named_region(&self, span: span, id: ast::ident) -> Result; } pub enum empty_rscope { empty_rscope } pub impl region_scope for empty_rscope { - pure fn anon_region(_span: span) -> Result { + pure fn anon_region(&self, _span: span) -> Result { result::Ok(ty::re_static) } - pure fn self_region(_span: span) -> Result { + pure fn self_region(&self, _span: span) -> Result { result::Err(~"only the static region is allowed here") } - pure fn named_region(_span: span, _id: ast::ident) + pure fn named_region(&self, _span: span, _id: ast::ident) -> Result { result::Err(~"only the static region is allowed here") } @@ -41,17 +41,17 @@ pub impl region_scope for empty_rscope { pub enum type_rscope = Option; pub impl region_scope for type_rscope { - pure fn anon_region(_span: span) -> Result { - match *self { + pure fn anon_region(&self, _span: span) -> Result { + match **self { Some(_) => result::Ok(ty::re_bound(ty::br_self)), None => result::Err(~"to use region types here, the containing \ type must be declared with a region bound") } } - pure fn self_region(span: span) -> Result { + pure fn self_region(&self, span: span) -> Result { self.anon_region(span) } - pure fn named_region(span: span, id: ast::ident) + pure fn named_region(&self, span: span, id: ast::ident) -> Result { do empty_rscope.named_region(span, id).chain_err |_e| { result::Err(~"named regions other than `self` are not \ @@ -75,13 +75,13 @@ pub fn in_anon_rscope(self: RS, @anon_rscope {anon: r, base: self as region_scope} } pub impl region_scope for @anon_rscope { - pure fn anon_region(_span: span) -> Result { + pure fn anon_region(&self, _span: span) -> Result { result::Ok(self.anon) } - pure fn self_region(span: span) -> Result { + pure fn self_region(&self, span: span) -> Result { self.base.self_region(span) } - pure fn named_region(span: span, id: ast::ident) + pure fn named_region(&self, span: span, id: ast::ident) -> Result { self.base.named_region(span, id) } @@ -98,7 +98,7 @@ pub fn in_binding_rscope(self: RS) @mut binding_rscope { base: base, anon_bindings: 0 } } pub impl region_scope for @mut binding_rscope { - pure fn anon_region(_span: span) -> Result { + pure fn anon_region(&self, _span: span) -> Result { // XXX: Unsafe to work around purity unsafe { let idx = self.anon_bindings; @@ -106,10 +106,10 @@ pub impl region_scope for @mut binding_rscope { result::Ok(ty::re_bound(ty::br_anon(idx))) } } - pure fn self_region(span: span) -> Result { + pure fn self_region(&self, span: span) -> Result { self.base.self_region(span) } - pure fn named_region(span: span, id: ast::ident) + pure fn named_region(&self, span: span, id: ast::ident) -> Result { do self.base.named_region(span, id).chain_err |_e| { result::Ok(ty::re_bound(ty::br_named(id))) diff --git a/src/librustdoc/path_pass.rs b/src/librustdoc/path_pass.rs index ac50221445f73..c03eb06d2dde0 100644 --- a/src/librustdoc/path_pass.rs +++ b/src/librustdoc/path_pass.rs @@ -31,14 +31,14 @@ pub fn mk_pass() -> Pass { struct Ctxt { srv: astsrv::Srv, - mut path: ~[~str] + path: @mut ~[~str] } impl Clone for Ctxt { fn clone(&self) -> Ctxt { Ctxt { srv: self.srv.clone(), - path: copy self.path + path: @mut copy *self.path } } } @@ -47,7 +47,7 @@ impl Clone for Ctxt { fn run(srv: astsrv::Srv, doc: doc::Doc) -> doc::Doc { let ctxt = Ctxt { srv: srv, - mut path: ~[] + path: @mut ~[] }; let fold = Fold { ctxt: ctxt.clone(), @@ -61,7 +61,7 @@ fn run(srv: astsrv::Srv, doc: doc::Doc) -> doc::Doc { fn fold_item(fold: &fold::Fold, doc: doc::ItemDoc) -> doc::ItemDoc { doc::ItemDoc { - path: copy fold.ctxt.path, + path: copy *fold.ctxt.path, .. doc } } diff --git a/src/librustpkg/rustpkg.rc b/src/librustpkg/rustpkg.rc index 78db7dccb86b6..c16a56249ffab 100644 --- a/src/librustpkg/rustpkg.rc +++ b/src/librustpkg/rustpkg.rc @@ -252,7 +252,7 @@ impl PackageScript { struct Ctx { cfgs: ~[~str], json: bool, - mut dep_cache: LinearMap<~str, bool> + dep_cache: @mut LinearMap<~str, bool> } impl Ctx { @@ -912,7 +912,7 @@ pub fn main() { Ctx { cfgs: cfgs, json: json, - mut dep_cache: LinearMap::new() + dep_cache: @mut LinearMap::new() }.run(cmd, args); } diff --git a/src/librustpkg/util.rs b/src/librustpkg/util.rs index 5e549d9649080..64a6d9c50554a 100644 --- a/src/librustpkg/util.rs +++ b/src/librustpkg/util.rs @@ -72,11 +72,11 @@ struct ReadyCtx { sess: session::Session, crate: @ast::crate, ext_cx: ext_ctxt, - mut path: ~[ast::ident], - mut fns: ~[ListenerFn] + path: ~[ast::ident], + fns: ~[ListenerFn] } -fn fold_mod(_ctx: @ReadyCtx, m: ast::_mod, +fn fold_mod(_ctx: @mut ReadyCtx, m: ast::_mod, fold: fold::ast_fold) -> ast::_mod { fn strip_main(item: @ast::item) -> @ast::item { @ast::item { @@ -95,7 +95,7 @@ fn fold_mod(_ctx: @ReadyCtx, m: ast::_mod, }, fold) } -fn fold_item(ctx: @ReadyCtx, item: @ast::item, +fn fold_item(ctx: @mut ReadyCtx, item: @ast::item, fold: fold::ast_fold) -> Option<@ast::item> { ctx.path.push(item.ident); @@ -133,7 +133,7 @@ fn fold_item(ctx: @ReadyCtx, item: @ast::item, res } -fn add_pkg_module(ctx: @ReadyCtx, m: ast::_mod) -> ast::_mod { +fn add_pkg_module(ctx: @mut ReadyCtx, m: ast::_mod) -> ast::_mod { let listeners = mk_listener_vec(ctx); let ext_cx = ctx.ext_cx; let item = quote_item! ( @@ -152,24 +152,25 @@ fn add_pkg_module(ctx: @ReadyCtx, m: ast::_mod) -> ast::_mod { } } -fn mk_listener_vec(ctx: @ReadyCtx) -> @ast::expr { +fn mk_listener_vec(ctx: @mut ReadyCtx) -> @ast::expr { let fns = ctx.fns; let descs = do fns.map |listener| { mk_listener_rec(ctx, *listener) }; - build::mk_slice_vec_e(ctx.ext_cx, dummy_sp(), descs) + let ext_cx = ctx.ext_cx; + build::mk_slice_vec_e(ext_cx, dummy_sp(), descs) } -fn mk_listener_rec(ctx: @ReadyCtx, listener: ListenerFn) -> @ast::expr { - +fn mk_listener_rec(ctx: @mut ReadyCtx, listener: ListenerFn) -> @ast::expr { let span = listener.span; let cmds = do listener.cmds.map |&cmd| { - build::mk_base_str(ctx.ext_cx, span, cmd) + let ext_cx = ctx.ext_cx; + build::mk_base_str(ext_cx, span, cmd) }; - let cmds_expr = build::mk_slice_vec_e(ctx.ext_cx, span, cmds); - let cb_expr = build::mk_path(ctx.ext_cx, span, copy listener.path); let ext_cx = ctx.ext_cx; + let cmds_expr = build::mk_slice_vec_e(ext_cx, span, cmds); + let cb_expr = build::mk_path(ext_cx, span, copy listener.path); quote_expr!( Listener { @@ -182,12 +183,12 @@ fn mk_listener_rec(ctx: @ReadyCtx, listener: ListenerFn) -> @ast::expr { /// Generate/filter main function, add the list of commands, etc. pub fn ready_crate(sess: session::Session, crate: @ast::crate) -> @ast::crate { - let ctx = @ReadyCtx { + let ctx = @mut ReadyCtx { sess: sess, crate: crate, ext_cx: mk_ctxt(sess.parse_sess, copy sess.opts.cfg), - mut path: ~[], - mut fns: ~[] + path: ~[], + fns: ~[] }; let precursor = @fold::AstFoldFns { // fold_crate: fold::wrap(|a, b| fold_crate(ctx, a, b)), diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 3a863fc7ac5dc..1ab55fe9035bc 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -242,7 +242,7 @@ pub struct FileMap { /// The start position of this source in the CodeMap start_pos: BytePos, /// Locations of lines beginnings in the source code - mut lines: ~[BytePos], + lines: @mut ~[BytePos], /// Locations of multi-byte characters in the source code multibyte_chars: DVec } @@ -312,7 +312,7 @@ pub impl CodeMap { let filemap = @FileMap { name: filename, substr: substr, src: src, start_pos: BytePos(start_pos), - mut lines: ~[], + lines: @mut ~[], multibyte_chars: DVec() }; @@ -439,7 +439,7 @@ priv impl CodeMap { let idx = self.lookup_filemap_idx(pos); let f = self.files[idx]; let mut a = 0u; - let mut b = vec::len(f.lines); + let mut b = f.lines.len(); while b - a > 1u { let m = (a + b) / 2u; if f.lines[m] > pos { b = m; } else { a = m; } diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 87e1dd2d22cb5..27483ae94a5bd 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -158,7 +158,7 @@ pub fn mk_handler(emitter: Option) -> @handler { } }; - @mut HandlerT { mut err_count: 0, emit: emit } as @handler + @mut HandlerT { err_count: 0, emit: emit } as @handler } #[deriving_eq] diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index eb92b23c9d7e7..f3a74302400c9 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -339,7 +339,7 @@ pub fn get_exprs_from_tts(cx: ext_ctxt, tts: ~[ast::token_tree]) cx.cfg(), tts); let mut es = ~[]; - while p.token != token::EOF { + while *p.token != token::EOF { if es.len() != 0 { p.eat(token::COMMA); } diff --git a/src/libsyntax/ext/pipes/mod.rs b/src/libsyntax/ext/pipes/mod.rs index 6d117f5ad235c..8b8e48bd5229b 100644 --- a/src/libsyntax/ext/pipes/mod.rs +++ b/src/libsyntax/ext/pipes/mod.rs @@ -73,7 +73,7 @@ pub fn expand_proto(cx: ext_ctxt, _sp: span, id: ast::ident, let rdr = tt_rdr as reader; let rust_parser = Parser(sess, cfg, rdr.dup()); - let proto = rust_parser.parse_proto(cx.str_of(id)); + let mut proto = rust_parser.parse_proto(cx.str_of(id)); // check for errors visit(proto, cx); diff --git a/src/libsyntax/ext/pipes/parse_proto.rs b/src/libsyntax/ext/pipes/parse_proto.rs index a2f881fc19f71..66feb7cc753cf 100644 --- a/src/libsyntax/ext/pipes/parse_proto.rs +++ b/src/libsyntax/ext/pipes/parse_proto.rs @@ -25,7 +25,7 @@ pub trait proto_parser { pub impl proto_parser for parser::Parser { fn parse_proto(&self, id: ~str) -> protocol { - let proto = protocol(id, self.span); + let proto = protocol(id, *self.span); self.parse_seq_to_before_end(token::EOF, SeqSep { sep: None, @@ -40,7 +40,7 @@ pub impl proto_parser for parser::Parser { let name = *self.interner.get(id); self.expect(token::COLON); - let dir = match copy self.token { + let dir = match *self.token { token::IDENT(n, _) => self.interner.get(n), _ => fail!() }; @@ -51,10 +51,11 @@ pub impl proto_parser for parser::Parser { _ => fail!() }; - let typarms = if self.token == token::LT { + let typarms = if *self.token == token::LT { self.parse_ty_params() - } - else { ~[] }; + } else { + ~[] + }; let state = proto.add_state_poly(name, id, dir, typarms); @@ -69,7 +70,7 @@ pub impl proto_parser for parser::Parser { fn parse_message(&self, state: state) { let mname = *self.interner.get(self.parse_ident()); - let args = if self.token == token::LPAREN { + let args = if *self.token == token::LPAREN { self.parse_unspanned_seq(token::LPAREN, token::RPAREN, SeqSep { sep: Some(token::COMMA), @@ -80,10 +81,10 @@ pub impl proto_parser for parser::Parser { self.expect(token::RARROW); - let next = match copy self.token { + let next = match *self.token { token::IDENT(_, _) => { let name = *self.interner.get(self.parse_ident()); - let ntys = if self.token == token::LT { + let ntys = if *self.token == token::LT { self.parse_unspanned_seq(token::LT, token::GT, SeqSep { sep: Some(token::COMMA), @@ -101,7 +102,7 @@ pub impl proto_parser for parser::Parser { _ => self.fatal(~"invalid next state") }; - state.add_message(mname, copy self.span, args, next); + state.add_message(mname, *self.span, args, next); } } diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 48bd8b0329742..5fdba837da41d 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -27,8 +27,8 @@ use core::to_str::ToStr; use core::vec; pub trait gen_send { - fn gen_send(&self, cx: ext_ctxt, try: bool) -> @ast::item; - fn to_ty(&self, cx: ext_ctxt) -> @ast::Ty; + fn gen_send(&mut self, cx: ext_ctxt, try: bool) -> @ast::item; + fn to_ty(&mut self, cx: ext_ctxt) -> @ast::Ty; } pub trait to_type_decls { @@ -47,8 +47,11 @@ pub trait gen_init { } pub impl gen_send for message { - fn gen_send(&self, cx: ext_ctxt, try: bool) -> @ast::item { + fn gen_send(&mut self, cx: ext_ctxt, try: bool) -> @ast::item { debug!("pipec: gen_send"); + let name = self.name(); + let params = self.get_params(); + match *self { message(ref _id, span, ref tys, this, Some(ref next_state)) => { debug!("pipec: next state exists"); @@ -67,7 +70,7 @@ pub impl gen_send for message { args_ast); let mut body = ~"{\n"; - body += fmt!("use super::%s;\n", self.name()); + body += fmt!("use super::%s;\n", name); if this.proto.is_bounded() { let (sp, rp) = match (this.dir, next.dir) { @@ -96,7 +99,7 @@ pub impl gen_send for message { body += fmt!("let %s = ::pipes::entangle();\n", pat); } body += fmt!("let message = %s(%s);\n", - self.name(), + name, str::connect(vec::append_one( arg_names.map(|x| cx.str_of(*x)), ~"s"), ~", ")); @@ -121,13 +124,12 @@ pub impl gen_send for message { rty = cx.ty_option(rty); } - let name = cx.ident_of(if try { ~"try_" + self.name() - } else { self.name() } ); + let name = cx.ident_of(if try { ~"try_" + name } else { name } ); cx.item_fn_poly(name, args_ast, rty, - self.get_params(), + params, cx.expr_block(body)) } @@ -156,10 +158,8 @@ pub impl gen_send for message { }; let mut body = ~"{ "; - body += fmt!("use super::%s;\n", self.name()); - body += fmt!("let message = %s%s;\n", - self.name(), - message_args); + body += fmt!("use super::%s;\n", name); + body += fmt!("let message = %s%s;\n", name, message_args); if !try { body += fmt!("::pipes::send(pipe, message);\n"); @@ -175,10 +175,7 @@ pub impl gen_send for message { let body = cx.parse_expr(body); - let name = if try { - ~"try_" + self.name() - } - else { self.name() }; + let name = if try { ~"try_" + name } else { name }; cx.item_fn_poly(cx.ident_of(name), args_ast, @@ -187,13 +184,13 @@ pub impl gen_send for message { } else { cx.ty_nil_ast_builder() }, - self.get_params(), + params, cx.expr_block(body)) } } } - fn to_ty(&self, cx: ext_ctxt) -> @ast::Ty { + fn to_ty(&mut self, cx: ext_ctxt) -> @ast::Ty { cx.ty_path_ast_builder(path(~[cx.ident_of(self.name())], self.span()) .add_tys(cx.ty_vars_global(self.get_params()))) } @@ -259,10 +256,14 @@ pub impl to_type_decls for state { recv => (*self).dir.reverse() }; let mut items = ~[]; - for self.messages.each |m| { - if dir == send { - items.push(m.gen_send(cx, true)); - items.push(m.gen_send(cx, false)); + + { + let messages = &mut *self.messages; + for vec::each_mut(*messages) |m| { + if dir == send { + items.push(m.gen_send(cx, true)); + items.push(m.gen_send(cx, false)); + } } } @@ -393,7 +394,8 @@ pub impl gen_init for protocol { } cx.ty_path_ast_builder(path(~[cx.ident_of(~"super"), - cx.ident_of(~"__Buffer")], self.span) + cx.ident_of(~"__Buffer")], + copy self.span) .add_tys(cx.ty_vars_global(params))) } @@ -451,12 +453,12 @@ pub impl gen_init for protocol { } items.push(cx.item_mod(cx.ident_of(~"client"), - self.span, + copy self.span, client_states)); items.push(cx.item_mod(cx.ident_of(~"server"), - self.span, + copy self.span, server_states)); - cx.item_mod(cx.ident_of(self.name), self.span, items) + cx.item_mod(cx.ident_of(self.name), copy self.span, items) } } diff --git a/src/libsyntax/ext/pipes/proto.rs b/src/libsyntax/ext/pipes/proto.rs index da67e48dfa6e6..7c6dc1f937dca 100644 --- a/src/libsyntax/ext/pipes/proto.rs +++ b/src/libsyntax/ext/pipes/proto.rs @@ -16,7 +16,6 @@ use ext::base::ext_ctxt; use ext::pipes::ast_builder::{append_types, ext_ctxt_ast_builder, path}; use core::cmp; -use core::dvec::DVec; use core::to_str::ToStr; #[deriving_eq] @@ -45,26 +44,24 @@ pub struct next_state { tys: ~[@ast::Ty], } -pub enum message { - // name, span, data, current state, next state - message(~str, span, ~[@ast::Ty], state, Option) -} +// name, span, data, current state, next state +pub struct message(~str, span, ~[@ast::Ty], state, Option); pub impl message { - fn name(&self) -> ~str { + fn name(&mut self) -> ~str { match *self { message(ref id, _, _, _, _) => (*id) } } - fn span(&self) -> span { + fn span(&mut self) -> span { match *self { message(_, span, _, _, _) => span } } /// Return the type parameters actually used by this message - fn get_params(&self) -> ~[ast::ty_param] { + fn get_params(&mut self) -> ~[ast::ty_param] { match *self { message(_, _, _, this, _) => this.ty_params } @@ -80,7 +77,7 @@ pub struct state_ { span: span, dir: direction, ty_params: ~[ast::ty_param], - messages: DVec, + messages: @mut ~[message], proto: protocol } @@ -121,17 +118,17 @@ pub impl state_ { } } -pub type protocol = @protocol_; +pub type protocol = @mut protocol_; pub fn protocol(name: ~str, +span: span) -> protocol { - @protocol_(name, span) + @mut protocol_(name, span) } pub fn protocol_(name: ~str, span: span) -> protocol_ { protocol_ { name: name, span: span, - states: DVec(), + states: @mut ~[], bounded: None } } @@ -139,30 +136,30 @@ pub fn protocol_(name: ~str, span: span) -> protocol_ { pub struct protocol_ { name: ~str, span: span, - states: DVec, + states: @mut ~[state], - mut bounded: Option, + bounded: Option, } pub impl protocol_ { /// Get a state. - fn get_state(&self, name: ~str) -> state { + fn get_state(&mut self, name: ~str) -> state { self.states.find(|i| i.name == name).get() } - fn get_state_by_id(&self, id: uint) -> state { self.states[id] } + fn get_state_by_id(&mut self, id: uint) -> state { self.states[id] } - fn has_state(&self, name: ~str) -> bool { + fn has_state(&mut self, name: ~str) -> bool { self.states.find(|i| i.name == name).is_some() } - fn filename(&self) -> ~str { + fn filename(&mut self) -> ~str { ~"proto://" + self.name } - fn num_states(&self) -> uint { self.states.len() } + fn num_states(&mut self) -> uint { self.states.len() } - fn has_ty_params(&self) -> bool { + fn has_ty_params(&mut self) -> bool { for self.states.each |s| { if s.ty_params.len() > 0 { return true; @@ -170,7 +167,7 @@ pub impl protocol_ { } false } - fn is_bounded(&self) -> bool { + fn is_bounded(&mut self) -> bool { let bounded = self.bounded.get(); bounded } @@ -179,7 +176,7 @@ pub impl protocol_ { pub impl protocol { fn add_state_poly(&self, name: ~str, ident: ast::ident, dir: direction, +ty_params: ~[ast::ty_param]) -> state { - let messages = DVec(); + let messages = @mut ~[]; let state = @state_ { id: self.states.len(), diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index ffa6101d58fcc..d529ee0c01b01 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -515,7 +515,7 @@ fn expand_tts(cx: ext_ctxt, // try removing it when enough of them are gone. let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts); - p.quote_depth += 1u; + *p.quote_depth += 1u; let tts = p.parse_all_token_trees(); p.abort_if_errors(); diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index d51ddae6db2f3..890420edf6d68 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -114,8 +114,8 @@ pub fn is_some(&&mpu: matcher_pos_up) -> bool { pub struct MatcherPos { elts: ~[ast::matcher], // maybe should be /&? Need to understand regions. sep: Option, - mut idx: uint, - mut up: matcher_pos_up, // mutable for swapping only + idx: uint, + up: matcher_pos_up, // mutable for swapping only matches: ~[DVec<@named_match>], match_lo: uint, match_hi: uint, sp_lo: BytePos, @@ -155,8 +155,8 @@ pub fn initial_matcher_pos(ms: ~[matcher], sep: Option, lo: BytePos) ~MatcherPos { elts: ms, sep: sep, - mut idx: 0u, - mut up: matcher_pos_up(None), + idx: 0u, + up: matcher_pos_up(None), matches: copy vec::from_fn(count_names(ms), |_i| dvec::DVec()), match_lo: 0u, match_hi: match_idx_hi, @@ -267,7 +267,7 @@ pub fn parse(sess: @mut ParseSess, if idx == len { // pop from the matcher position - let new_pos = copy_up(ei.up); + let mut new_pos = copy_up(ei.up); // update matches (the MBE "parse tree") by appending // each tree as a subtree. @@ -295,13 +295,13 @@ pub fn parse(sess: @mut ParseSess, match copy ei.sep { Some(ref t) if idx == len => { // we need a separator if tok == (*t) { //pass the separator - let ei_t = ei; + let mut ei_t = ei; ei_t.idx += 1; next_eis.push(ei_t); } } _ => { // we don't need a separator - let ei_t = ei; + let mut ei_t = ei; ei_t.idx = 0; cur_eis.push(ei_t); } @@ -315,7 +315,7 @@ pub fn parse(sess: @mut ParseSess, match_seq(ref matchers, ref sep, zero_ok, match_idx_lo, match_idx_hi) => { if zero_ok { - let new_ei = copy ei; + let mut new_ei = copy ei; new_ei.idx += 1u; //we specifically matched zero repeats. for uint::range(match_idx_lo, match_idx_hi) |idx| { @@ -331,8 +331,8 @@ pub fn parse(sess: @mut ParseSess, cur_eis.push(~MatcherPos { elts: (*matchers), sep: (*sep), - mut idx: 0u, - mut up: matcher_pos_up(Some(ei_t)), + idx: 0u, + up: matcher_pos_up(Some(ei_t)), matches: matches, match_lo: match_idx_lo, match_hi: match_idx_hi, sp_lo: sp.lo @@ -340,7 +340,7 @@ pub fn parse(sess: @mut ParseSess, } match_nonterminal(_,_,_) => { bb_eis.push(ei) } match_tok(ref t) => { - let ei_t = ei; + let mut ei_t = ei; if (*t) == tok { ei_t.idx += 1; next_eis.push(ei_t); @@ -388,7 +388,7 @@ pub fn parse(sess: @mut ParseSess, } else /* bb_eis.len() == 1 */ { let rust_parser = Parser(sess, cfg, rdr.dup()); - let ei = bb_eis.pop(); + let mut ei = bb_eis.pop(); match ei.elts[ei.idx].node { match_nonterminal(_, name, idx) => { ei.matches[idx].push(@matched_nonterminal( @@ -421,16 +421,16 @@ pub fn parse_nt(p: Parser, name: ~str) -> nonterminal { ~"expr" => token::nt_expr(p.parse_expr()), ~"ty" => token::nt_ty(p.parse_ty(false /* no need to disambiguate*/)), // this could be handled like a token, since it is one - ~"ident" => match copy p.token { + ~"ident" => match *p.token { token::IDENT(sn,b) => { p.bump(); token::nt_ident(sn,b) } _ => p.fatal(~"expected ident, found " - + token::to_str(p.reader.interner(), copy p.token)) + + token::to_str(p.reader.interner(), *p.token)) }, ~"path" => token::nt_path(p.parse_path_with_tps(false)), ~"tt" => { - p.quote_depth += 1u; //but in theory, non-quoted tts might be useful + *p.quote_depth += 1u; //but in theory, non-quoted tts might be useful let res = token::nt_tt(@p.parse_token_tree()); - p.quote_depth -= 1u; + *p.quote_depth -= 1u; res } ~"matchers" => token::nt_matchers(p.parse_matchers()), diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 3817f89b8173f..a9502ff29020e 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -59,7 +59,7 @@ pub fn new_tt_reader(sp_diag: span_handler, let r = @mut TtReader { sp_diag: sp_diag, interner: itr, - mut cur: @mut TtFrame { + cur: @mut TtFrame { readme: @mut src, idx: 0u, dotdotdoted: false, diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 4f64d7bed3124..c0c97a0b9eb59 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -37,7 +37,7 @@ impl parser_attr for Parser { fn parse_outer_attributes() -> ~[ast::attribute] { let mut attrs: ~[ast::attribute] = ~[]; loop { - match copy self.token { + match *self.token { token::POUND => { if self.look_ahead(1u) != token::LBRACKET { break; @@ -90,14 +90,14 @@ impl parser_attr for Parser { let mut inner_attrs: ~[ast::attribute] = ~[]; let mut next_outer_attrs: ~[ast::attribute] = ~[]; loop { - match copy self.token { + match *self.token { token::POUND => { if self.look_ahead(1u) != token::LBRACKET { // This is an extension break; } let attr = self.parse_attribute(ast::attr_inner); - if self.token == token::SEMI { + if *self.token == token::SEMI { self.bump(); inner_attrs += ~[attr]; } else { @@ -131,7 +131,7 @@ impl parser_attr for Parser { fn parse_meta_item() -> @ast::meta_item { let lo = self.span.lo; let name = self.id_to_str(self.parse_ident()); - match self.token { + match *self.token { token::EQ => { self.bump(); let lit = self.parse_lit(); @@ -157,7 +157,7 @@ impl parser_attr for Parser { } fn parse_optional_meta() -> ~[@ast::meta_item] { - match self.token { + match *self.token { token::LPAREN => return self.parse_meta_seq(), _ => return ~[] } diff --git a/src/libsyntax/parse/common.rs b/src/libsyntax/parse/common.rs index 22004be87adc6..57d62d628dc6f 100644 --- a/src/libsyntax/parse/common.rs +++ b/src/libsyntax/parse/common.rs @@ -53,25 +53,25 @@ pub fn token_to_str(reader: reader, ++token: token::Token) -> ~str { pub impl Parser { fn unexpected_last(t: token::Token) -> ! { self.span_fatal( - copy self.last_span, + *self.last_span, ~"unexpected token: `" + token_to_str(self.reader, t) + ~"`"); } fn unexpected() -> ! { self.fatal(~"unexpected token: `" - + token_to_str(self.reader, self.token) + ~"`"); + + token_to_str(self.reader, *self.token) + ~"`"); } // expect and consume the token t. Signal an error if // the next token is not t. fn expect(t: token::Token) { - if self.token == t { + if *self.token == t { self.bump(); } else { let mut s: ~str = ~"expected `"; s += token_to_str(self.reader, t); s += ~"` but found `"; - s += token_to_str(self.reader, self.token); + s += token_to_str(self.reader, *self.token); self.fatal(s + ~"`"); } } @@ -79,12 +79,12 @@ pub impl Parser { fn parse_ident() -> ast::ident { self.check_strict_keywords(); self.check_reserved_keywords(); - match copy self.token { + match *self.token { token::IDENT(i, _) => { self.bump(); return i; } token::INTERPOLATED(token::nt_ident(*)) => { self.bug( ~"ident interpolation not converted to real token"); } _ => { self.fatal(~"expected ident, found `" - + token_to_str(self.reader, self.token) + + token_to_str(self.reader, *self.token) + ~"`"); } } } @@ -104,7 +104,7 @@ pub impl Parser { // consume token 'tok' if it exists. Returns true if the given // token was present, false otherwise. fn eat(tok: token::Token) -> bool { - return if self.token == tok { self.bump(); true } else { false }; + return if *self.token == tok { self.bump(); true } else { false }; } // Storing keywords as interned idents instead of strings would be nifty. @@ -129,7 +129,7 @@ pub impl Parser { } fn is_keyword(word: ~str) -> bool { - self.token_is_keyword(word, self.token) + self.token_is_keyword(word, *self.token) } fn is_any_keyword(tok: token::Token) -> bool { @@ -143,7 +143,7 @@ pub impl Parser { fn eat_keyword(word: ~str) -> bool { self.require_keyword(word); - let is_kw = match self.token { + let is_kw = match *self.token { token::IDENT(sid, false) => (word == *self.id_to_str(sid)), _ => false }; @@ -155,7 +155,7 @@ pub impl Parser { self.require_keyword(word); if !self.eat_keyword(word) { self.fatal(~"expected `" + word + ~"`, found `" + - token_to_str(self.reader, self.token) + + token_to_str(self.reader, *self.token) + ~"`"); } } @@ -165,9 +165,9 @@ pub impl Parser { } fn check_strict_keywords() { - match self.token { + match *self.token { token::IDENT(_, false) => { - let w = token_to_str(self.reader, self.token); + let w = token_to_str(self.reader, *self.token); self.check_strict_keywords_(w); } _ => () @@ -185,9 +185,9 @@ pub impl Parser { } fn check_reserved_keywords() { - match self.token { + match *self.token { token::IDENT(_, false) => { - let w = token_to_str(self.reader, self.token); + let w = token_to_str(self.reader, *self.token); self.check_reserved_keywords_(w); } _ => () @@ -203,9 +203,9 @@ pub impl Parser { // expect and consume a GT. if a >> is seen, replace it // with a single > and continue. fn expect_gt() { - if self.token == token::GT { + if *self.token == token::GT { self.bump(); - } else if self.token == token::BINOP(token::SHR) { + } else if *self.token == token::BINOP(token::SHR) { self.replace_token(token::GT, self.span.lo + BytePos(1u), self.span.hi); @@ -213,7 +213,7 @@ pub impl Parser { let mut s: ~str = ~"expected `"; s += token_to_str(self.reader, token::GT); s += ~"`, found `"; - s += token_to_str(self.reader, self.token); + s += token_to_str(self.reader, *self.token); s += ~"`"; self.fatal(s); } @@ -225,8 +225,8 @@ pub impl Parser { f: fn(Parser) -> T) -> ~[T] { let mut first = true; let mut v = ~[]; - while self.token != token::GT - && self.token != token::BINOP(token::SHR) { + while *self.token != token::GT + && *self.token != token::BINOP(token::SHR) { match sep { Some(ref t) => { if first { first = false; } @@ -276,7 +276,7 @@ pub impl Parser { f: fn(Parser) -> T) -> ~[T] { let mut first: bool = true; let mut v: ~[T] = ~[]; - while self.token != ket { + while *self.token != ket { match sep.sep { Some(ref t) => { if first { first = false; } @@ -284,7 +284,7 @@ pub impl Parser { } _ => () } - if sep.trailing_sep_allowed && self.token == ket { break; } + if sep.trailing_sep_allowed && *self.token == ket { break; } v.push(f(self)); } return v; @@ -293,8 +293,8 @@ pub impl Parser { // parse a sequence, including the closing delimiter. The function // f must consume tokens until reaching the next separator or // closing bracket. - fn parse_unspanned_seq(bra: token::Token, - ket: token::Token, + fn parse_unspanned_seq(+bra: token::Token, + +ket: token::Token, sep: SeqSep, f: fn(Parser) -> T) -> ~[T] { self.expect(bra); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 4f330ea86b0b6..5fa6115938506 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -141,7 +141,7 @@ pub fn parse_tts_from_source_str(name: ~str, sess: @mut ParseSess) -> ~[ast::token_tree] { let p = new_parser_from_source_str(sess, cfg, name, codemap::FssNone, source); - p.quote_depth += 1u; + *p.quote_depth += 1u; let r = p.parse_all_token_trees(); p.abort_if_errors(); return r; diff --git a/src/libsyntax/parse/obsolete.rs b/src/libsyntax/parse/obsolete.rs index f5ee5bd802907..1ae8786e09bb2 100644 --- a/src/libsyntax/parse/obsolete.rs +++ b/src/libsyntax/parse/obsolete.rs @@ -158,7 +158,7 @@ pub impl Parser { } fn is_obsolete_ident(ident: &str) -> bool { - self.token_is_obsolete_ident(ident, copy self.token) + self.token_is_obsolete_ident(ident, *self.token) } fn eat_obsolete_ident(ident: &str) -> bool { @@ -172,7 +172,7 @@ pub impl Parser { fn try_parse_obsolete_struct_ctor() -> bool { if self.eat_obsolete_ident("new") { - self.obsolete(copy self.last_span, ObsoleteStructCtor); + self.obsolete(*self.last_span, ObsoleteStructCtor); self.parse_fn_decl(|p| p.parse_arg()); self.parse_block(); true @@ -182,13 +182,13 @@ pub impl Parser { } fn try_parse_obsolete_with() -> bool { - if self.token == token::COMMA + if *self.token == token::COMMA && self.token_is_obsolete_ident("with", self.look_ahead(1u)) { self.bump(); } if self.eat_obsolete_ident("with") { - self.obsolete(copy self.last_span, ObsoleteWith); + self.obsolete(*self.last_span, ObsoleteWith); self.parse_expr(); true } else { @@ -198,10 +198,10 @@ pub impl Parser { fn try_parse_obsolete_priv_section() -> bool { if self.is_keyword(~"priv") && self.look_ahead(1) == token::LBRACE { - self.obsolete(copy self.span, ObsoletePrivSection); + self.obsolete(*self.span, ObsoletePrivSection); self.eat_keyword(~"priv"); self.bump(); - while self.token != token::RBRACE { + while *self.token != token::RBRACE { self.parse_single_class_item(ast::private); } self.bump(); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index c7c8836126bdb..9bac163dab6ef 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -127,7 +127,7 @@ enum view_item_parse_mode { The important thing is to make sure that lookahead doesn't balk at INTERPOLATED tokens */ macro_rules! maybe_whole_expr ( - ($p:expr) => ( match copy $p.token { + ($p:expr) => ( match *$p.token { INTERPOLATED(token::nt_expr(e)) => { $p.bump(); return e; @@ -142,28 +142,28 @@ macro_rules! maybe_whole_expr ( ) macro_rules! maybe_whole ( - ($p:expr, $constructor:ident) => ( match copy $p.token { - INTERPOLATED(token::$constructor(ref x)) => { $p.bump(); return (*x); } + ($p:expr, $constructor:ident) => ( match *$p.token { + INTERPOLATED(token::$constructor(x)) => { $p.bump(); return x; } _ => () }) ; - (deref $p:expr, $constructor:ident) => ( match copy $p.token { + (deref $p:expr, $constructor:ident) => ( match *$p.token { INTERPOLATED(token::$constructor(x)) => { $p.bump(); return *x; } _ => () }) ; - (Some $p:expr, $constructor:ident) => ( match copy $p.token { + (Some $p:expr, $constructor:ident) => ( match *$p.token { INTERPOLATED(token::$constructor(x)) => { $p.bump(); return Some(x); } _ => () }) ; - (iovi $p:expr, $constructor:ident) => ( match copy $p.token { + (iovi $p:expr, $constructor:ident) => ( match *$p.token { INTERPOLATED(token::$constructor(x)) => { $p.bump(); return iovi_item(x); } _ => () }) ; - (pair_empty $p:expr, $constructor:ident) => ( match copy $p.token { - INTERPOLATED(token::$constructor(ref x)) => { - $p.bump(); return (~[], (*x)); + (pair_empty $p:expr, $constructor:ident) => ( match *$p.token { + INTERPOLATED(token::$constructor(x)) => { + $p.bump(); return (~[], x); } _ => () }) @@ -202,35 +202,35 @@ pub fn Parser(sess: @mut ParseSess, interner: interner, sess: sess, cfg: cfg, - token: tok0.tok, - span: span0, - last_span: span0, - mut buffer: [TokenAndSpan {tok: tok0.tok, sp: span0}, ..4], - buffer_start: 0, - buffer_end: 0, - tokens_consumed: 0u, - restriction: UNRESTRICTED, - quote_depth: 0u, + token: @mut tok0.tok, + span: @mut span0, + last_span: @mut span0, + buffer: @mut [TokenAndSpan {tok: tok0.tok, sp: span0}, ..4], + buffer_start: @mut 0, + buffer_end: @mut 0, + tokens_consumed: @mut 0u, + restriction: @mut UNRESTRICTED, + quote_depth: @mut 0u, keywords: token::keyword_table(), strict_keywords: token::strict_keyword_table(), reserved_keywords: token::reserved_keyword_table(), obsolete_set: HashMap(), - mod_path_stack: ~[], + mod_path_stack: @mut ~[], } } pub struct Parser { sess: @mut ParseSess, cfg: crate_cfg, - mut token: token::Token, - mut span: span, - mut last_span: span, - mut buffer: [TokenAndSpan * 4], - mut buffer_start: int, - mut buffer_end: int, - mut tokens_consumed: uint, - mut restriction: restriction, - mut quote_depth: uint, // not (yet) related to the quasiquoter + token: @mut token::Token, + span: @mut span, + last_span: @mut span, + buffer: @mut [TokenAndSpan * 4], + buffer_start: @mut int, + buffer_end: @mut int, + tokens_consumed: @mut uint, + restriction: @mut restriction, + quote_depth: @mut uint, // not (yet) related to the quasiquoter reader: reader, interner: @token::ident_interner, keywords: HashMap<~str, ()>, @@ -240,7 +240,7 @@ pub struct Parser { /// extra detail when the same error is seen twice obsolete_set: HashMap, /// Used to determine the path to externally loaded source files - mut mod_path_stack: ~[~str], + mod_path_stack: @mut ~[~str], drop {} /* do not copy the parser; its state is tied to outside state */ } @@ -248,39 +248,39 @@ pub struct Parser { pub impl Parser { // advance the parser by one token fn bump() { - self.last_span = self.span; - let next = if self.buffer_start == self.buffer_end { + *self.last_span = *self.span; + let next = if *self.buffer_start == *self.buffer_end { self.reader.next_token() } else { - let next = self.buffer[self.buffer_start]; - self.buffer_start = (self.buffer_start + 1) & 3; + let next = self.buffer[*self.buffer_start]; + *self.buffer_start = (*self.buffer_start + 1) & 3; next }; - self.token = next.tok; - self.span = next.sp; - self.tokens_consumed += 1u; + *self.token = next.tok; + *self.span = next.sp; + *self.tokens_consumed += 1u; } // EFFECT: replace the current token and span with the given one fn replace_token(next: token::Token, +lo: BytePos, +hi: BytePos) { - self.token = next; - self.span = mk_sp(lo, hi); + *self.token = next; + *self.span = mk_sp(lo, hi); } fn buffer_length() -> int { - if self.buffer_start <= self.buffer_end { - return self.buffer_end - self.buffer_start; + if *self.buffer_start <= *self.buffer_end { + return *self.buffer_end - *self.buffer_start; } - return (4 - self.buffer_start) + self.buffer_end; + return (4 - *self.buffer_start) + *self.buffer_end; } fn look_ahead(distance: uint) -> token::Token { let dist = distance as int; while self.buffer_length() < dist { - self.buffer[self.buffer_end] = self.reader.next_token(); - self.buffer_end = (self.buffer_end + 1) & 3; + self.buffer[*self.buffer_end] = self.reader.next_token(); + *self.buffer_end = (*self.buffer_end + 1) & 3; } - return copy self.buffer[(self.buffer_start + dist - 1) & 3].tok; + return copy self.buffer[(*self.buffer_start + dist - 1) & 3].tok; } fn fatal(m: ~str) -> ! { - self.sess.span_diagnostic.span_fatal(copy self.span, m) + self.sess.span_diagnostic.span_fatal(*copy self.span, m) } fn span_fatal(sp: span, m: ~str) -> ! { self.sess.span_diagnostic.span_fatal(sp, m) @@ -289,10 +289,10 @@ pub impl Parser { self.sess.span_diagnostic.span_note(sp, m) } fn bug(m: ~str) -> ! { - self.sess.span_diagnostic.span_bug(copy self.span, m) + self.sess.span_diagnostic.span_bug(*copy self.span, m) } fn warn(m: ~str) { - self.sess.span_diagnostic.span_warn(copy self.span, m) + self.sess.span_diagnostic.span_warn(*copy self.span, m) } fn span_err(sp: span, m: ~str) { self.sess.span_diagnostic.span_err(sp, m) @@ -451,8 +451,8 @@ pub impl Parser { let hi = p.last_span.hi; debug!("parse_trait_methods(): trait method signature ends in \ `%s`", - token_to_str(p.reader, p.token)); - match p.token { + token_to_str(p.reader, *p.token)); + match *p.token { token::SEMI => { p.bump(); debug!("parse_trait_methods(): parsing required method"); @@ -490,7 +490,7 @@ pub impl Parser { } _ => { p.fatal(~"expected `;` or `}` but found `" + - token_to_str(p.reader, p.token) + ~"`"); + token_to_str(p.reader, *p.token) + ~"`"); } } } @@ -562,7 +562,7 @@ pub impl Parser { fn parse_region() -> @region { self.expect(token::BINOP(token::AND)); - match copy self.token { + match *self.token { token::IDENT(sid, _) => { self.bump(); self.region_from_name(Some(sid)) @@ -578,9 +578,9 @@ pub impl Parser { let lo = self.span.lo; - let t = if self.token == token::LPAREN { + let t = if *self.token == token::LPAREN { self.bump(); - if self.token == token::RPAREN { + if *self.token == token::RPAREN { self.bump(); ty_nil } else { @@ -589,9 +589,9 @@ pub impl Parser { // of type t let mut ts = ~[self.parse_ty(false)]; let mut one_tuple = false; - while self.token == token::COMMA { + while *self.token == token::COMMA { self.bump(); - if self.token != token::RPAREN { + if *self.token != token::RPAREN { ts.push(self.parse_ty(false)); } else { @@ -603,16 +603,16 @@ pub impl Parser { self.expect(token::RPAREN); t } - } else if self.token == token::AT { + } else if *self.token == token::AT { self.bump(); self.parse_box_or_uniq_pointee(ManagedSigil, ty_box) - } else if self.token == token::TILDE { + } else if *self.token == token::TILDE { self.bump(); self.parse_box_or_uniq_pointee(OwnedSigil, ty_uniq) - } else if self.token == token::BINOP(token::STAR) { + } else if *self.token == token::BINOP(token::STAR) { self.bump(); ty_ptr(self.parse_mt()) - } else if self.token == token::LBRACE { + } else if *self.token == token::LBRACE { let elems = self.parse_unspanned_seq( token::LBRACE, token::RBRACE, seq_sep_trailing_allowed(token::COMMA), @@ -621,7 +621,7 @@ pub impl Parser { self.unexpected_last(token::RBRACE); } ty_rec(elems) - } else if self.token == token::LBRACKET { + } else if *self.token == token::LBRACKET { self.expect(token::LBRACKET); let mt = self.parse_mt(); @@ -632,15 +632,15 @@ pub impl Parser { }; self.expect(token::RBRACKET); t - } else if self.token == token::BINOP(token::AND) { + } else if *self.token == token::BINOP(token::AND) { self.bump(); self.parse_borrowed_pointee() } else if self.eat_keyword(~"extern") { self.parse_ty_bare_fn() - } else if self.token_is_closure_keyword(self.token) { + } else if self.token_is_closure_keyword(*self.token) { self.parse_ty_closure(None, None) - } else if self.token == token::MOD_SEP - || is_ident_or_path(self.token) { + } else if *self.token == token::MOD_SEP + || is_ident_or_path(*self.token) { let path = self.parse_path_with_tps(colons_before_params); ty_path(path, self.get_id()) } else { self.fatal(~"expected type"); }; @@ -654,7 +654,7 @@ pub impl Parser { ctor: &fn(+v: mt) -> ty_) -> ty_ { // @'foo fn() or @foo/fn() or @fn() are parsed directly as fn types: - match copy self.token { + match *self.token { token::LIFETIME(rname) => { self.bump(); return self.parse_ty_closure(Some(sigil), Some(rname)); @@ -662,11 +662,10 @@ pub impl Parser { token::IDENT(rname, _) => { if self.look_ahead(1u) == token::BINOP(token::SLASH) && - self.token_is_closure_keyword(self.look_ahead(2u)) - { + self.token_is_closure_keyword(self.look_ahead(2u)) { self.bump(); self.bump(); return self.parse_ty_closure(Some(sigil), Some(rname)); - } else if self.token_is_closure_keyword(self.token) { + } else if self.token_is_closure_keyword(*self.token) { return self.parse_ty_closure(Some(sigil), None); } } @@ -683,7 +682,7 @@ pub impl Parser { fn parse_borrowed_pointee() -> ty_ { // look for `&'lt` or `&foo/` and interpret `foo` as the region name: - let rname = match self.token { + let rname = match *self.token { token::LIFETIME(sid) => { self.bump(); Some(sid) @@ -701,7 +700,7 @@ pub impl Parser { _ => { None } }; - if self.token_is_closure_keyword(self.token) { + if self.token_is_closure_keyword(*self.token) { return self.parse_ty_closure(Some(BorrowedSigil), rname); } @@ -727,13 +726,13 @@ pub impl Parser { } fn is_named_argument() -> bool { - let offset = if self.token == token::BINOP(token::AND) { + let offset = if *self.token == token::BINOP(token::AND) { 1 - } else if self.token == token::BINOP(token::MINUS) { + } else if *self.token == token::BINOP(token::MINUS) { 1 - } else if self.token == token::ANDAND { + } else if *self.token == token::ANDAND { 1 - } else if self.token == token::BINOP(token::PLUS) { + } else if *self.token == token::BINOP(token::PLUS) { if self.look_ahead(1) == token::BINOP(token::PLUS) { 2 } else { @@ -741,7 +740,7 @@ pub impl Parser { } } else { 0 }; if offset == 0 { - is_plain_ident(self.token) + is_plain_ident(*self.token) && self.look_ahead(1) == token::COLON } else { is_plain_ident(self.look_ahead(offset)) @@ -775,7 +774,7 @@ pub impl Parser { } else { m = infer(self.get_id()); ast_util::ident_to_pat(self.get_id(), - copy self.last_span, + *self.last_span, special_idents::invalid) }; @@ -819,7 +818,7 @@ pub impl Parser { fn maybe_parse_fixed_vstore_with_star() -> Option { if self.eat(token::BINOP(token::STAR)) { - match copy self.token { + match *self.token { token::LIT_INT_UNSUFFIXED(i) if i >= 0i64 => { self.bump(); Some(i as uint) @@ -828,7 +827,7 @@ pub impl Parser { self.fatal( fmt!("expected integral vector length \ but found `%s`", - token_to_str(self.reader, self.token))); + token_to_str(self.reader, *self.token))); } } } else { @@ -857,7 +856,8 @@ pub impl Parser { } else if self.eat_keyword(~"false") { lit_bool(false) } else { - let tok = self.token; + // XXX: This is a really bad copy! + let tok = *self.token; self.bump(); self.lit_from_token(tok) }; @@ -920,9 +920,8 @@ pub impl Parser { // vstores is... um... the same. I guess that's my fault. This // is still not ideal as for &str we end up parsing more than we // ought to and have to sort it out later. - if self.token == token::BINOP(token::SLASH) + if *self.token == token::BINOP(token::SLASH) && self.look_ahead(1u) == token::BINOP(token::AND) { - self.expect(token::BINOP(token::SLASH)); Some(self.parse_region()) } else { @@ -958,7 +957,7 @@ pub impl Parser { * Parses 0 or 1 lifetime. */ - match self.token { + match *self.token { token::LIFETIME(_) => { Some(self.parse_lifetime()) } @@ -974,12 +973,12 @@ pub impl Parser { * Parses a single lifetime. */ - match self.token { + match *self.token { token::LIFETIME(i) => { self.bump(); return ast::Lifetime { id: self.get_id(), - span: self.span, + span: *self.span, ident: i }; } @@ -1000,7 +999,7 @@ pub impl Parser { let mut res = ~[]; loop { - match self.token { + match *self.token { token::LIFETIME(_) => { res.push(self.parse_lifetime()); } @@ -1009,7 +1008,7 @@ pub impl Parser { } } - match self.token { + match *self.token { token::COMMA => { self.bump();} token::GT => { return res; } _ => { @@ -1058,14 +1057,16 @@ pub impl Parser { fn mk_lit_u32(i: u32) -> @expr { let span = self.span; - let lv_lit = @codemap::spanned { node: lit_uint(i as u64, ty_u32), - span: span }; + let lv_lit = @codemap::spanned { + node: lit_uint(i as u64, ty_u32), + span: *span + }; @expr { id: self.get_id(), callee_id: self.get_id(), node: expr_lit(lv_lit), - span: span, + span: *span, } } @@ -1076,21 +1077,21 @@ pub impl Parser { let mut ex: expr_; - if self.token == token::LPAREN { + if *self.token == token::LPAREN { self.bump(); // (e) is parenthesized e // (e,) is a tuple with only one field, e let mut one_tuple = false; - if self.token == token::RPAREN { + if *self.token == token::RPAREN { hi = self.span.hi; self.bump(); let lit = @spanned(lo, hi, lit_nil); return self.mk_expr(lo, hi, expr_lit(lit)); } let mut es = ~[self.parse_expr()]; - while self.token == token::COMMA { + while *self.token == token::COMMA { self.bump(); - if self.token != token::RPAREN { + if *self.token != token::RPAREN { es.push(self.parse_expr()); } else { @@ -1106,7 +1107,7 @@ pub impl Parser { else { self.mk_expr(lo, hi, expr_tup(es)) } - } else if self.token == token::LBRACE { + } else if *self.token == token::LBRACE { if self.looking_at_record_literal() { ex = self.parse_record_literal(); hi = self.span.hi; @@ -1116,7 +1117,7 @@ pub impl Parser { return self.mk_expr(blk.span.lo, blk.span.hi, expr_block(blk)); } - } else if token::is_bar(self.token) { + } else if token::is_bar(*self.token) { return self.parse_lambda_expr(); } else if self.eat_keyword(~"if") { return self.parse_if_expr(); @@ -1143,17 +1144,17 @@ pub impl Parser { return self.parse_fn_expr(sigil); } else if self.eat_keyword(~"unsafe") { return self.parse_block_expr(lo, unsafe_blk); - } else if self.token == token::LBRACKET { + } else if *self.token == token::LBRACKET { self.bump(); let mutbl = self.parse_mutability(); - if self.token == token::RBRACKET { + if *self.token == token::RBRACKET { // Empty vector. self.bump(); ex = expr_vec(~[], mutbl); } else { // Nonempty vector. let first_expr = self.parse_expr(); - if self.token == token::COMMA && + if *self.token == token::COMMA && self.look_ahead(1) == token::DOTDOT { // Repeating vector syntax: [ 0, ..512 ] self.bump(); @@ -1161,7 +1162,7 @@ pub impl Parser { let count = self.parse_expr(); self.expect(token::RBRACKET); ex = expr_repeat(first_expr, count, mutbl); - } else if self.token == token::COMMA { + } else if *self.token == token::COMMA { // Vector with two or more elements. self.bump(); let remaining_exprs = @@ -1189,13 +1190,13 @@ pub impl Parser { ex = expr_assert(e); hi = e.span.hi; } else if self.eat_keyword(~"return") { - if can_begin_expr(self.token) { + if can_begin_expr(*self.token) { let e = self.parse_expr(); hi = e.span.hi; ex = expr_ret(Some(e)); } else { ex = expr_ret(None); } } else if self.eat_keyword(~"break") { - if is_ident(self.token) { + if is_ident(*self.token) { ex = expr_break(Some(self.parse_ident())); } else { ex = expr_break(None); @@ -1205,28 +1206,28 @@ pub impl Parser { let e = self.parse_expr(); ex = expr_copy(e); hi = e.span.hi; - } else if self.token == token::MOD_SEP || - is_ident(self.token) && !self.is_keyword(~"true") && - !self.is_keyword(~"false") { + } else if *self.token == token::MOD_SEP || + is_ident(*self.token) && !self.is_keyword(~"true") && + !self.is_keyword(~"false") { let pth = self.parse_path_with_tps(true); /* `!`, as an operator, is prefix, so we know this isn't that */ - if self.token == token::NOT { + if *self.token == token::NOT { self.bump(); - let tts = match self.token { - token::LPAREN | token::LBRACE => { - let ket = token::flip_delimiter(copy self.token); - self.parse_unspanned_seq(copy self.token, ket, - seq_sep_none(), - |p| p.parse_token_tree()) - } + match *self.token { + token::LPAREN | token::LBRACE => {} _ => self.fatal(~"expected open delimiter") }; + + let ket = token::flip_delimiter(*self.token); + let tts = self.parse_unspanned_seq(*self.token, + ket, + seq_sep_none(), + |p| p.parse_token_tree()); let hi = self.span.hi; - return self.mk_mac_expr( - lo, hi, mac_invoc_tt(pth, tts)); - } else if self.token == token::LBRACE { + return self.mk_mac_expr(lo, hi, mac_invoc_tt(pth, tts)); + } else if *self.token == token::LBRACE { // This might be a struct literal. if self.looking_at_record_literal() { // It's a struct literal. @@ -1234,8 +1235,7 @@ pub impl Parser { let mut fields = ~[]; let mut base = None; fields.push(self.parse_field(token::COLON)); - while self.token != token::RBRACE { - + while *self.token != token::RBRACE { if self.try_parse_obsolete_with() { break; } @@ -1247,7 +1247,7 @@ pub impl Parser { break; } - if self.token == token::RBRACE { + if *self.token == token::RBRACE { // Accept an optional trailing comma. break; } @@ -1284,7 +1284,7 @@ pub impl Parser { } fn permits_call() -> bool { - return self.restriction != RESTRICT_NO_CALL_EXPRS; + return *self.restriction != RESTRICT_NO_CALL_EXPRS; } fn parse_dot_or_call_expr_with(e0: @expr) -> @expr { @@ -1294,7 +1294,7 @@ pub impl Parser { loop { // expr.f if self.eat(token::DOT) { - match copy self.token { + match *self.token { token::IDENT(i, _) => { hi = self.span.hi; self.bump(); @@ -1307,7 +1307,7 @@ pub impl Parser { }; // expr.f() method call - match copy self.token { + match *self.token { token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( token::LPAREN, token::RPAREN, @@ -1328,7 +1328,7 @@ pub impl Parser { loop; } if self.expr_is_complete(e) { break; } - match copy self.token { + match *self.token { // expr(...) token::LPAREN if self.permits_call() => { let es = self.parse_unspanned_seq( @@ -1359,17 +1359,17 @@ pub impl Parser { // parse an optional separator followed by a kleene-style // repetition token (+ or *). fn parse_sep_and_zerok() -> (Option, bool) { - if self.token == token::BINOP(token::STAR) - || self.token == token::BINOP(token::PLUS) { - let zerok = self.token == token::BINOP(token::STAR); + if *self.token == token::BINOP(token::STAR) + || *self.token == token::BINOP(token::PLUS) { + let zerok = *self.token == token::BINOP(token::STAR); self.bump(); return (None, zerok); } else { - let sep = self.token; + let sep = *self.token; self.bump(); - if self.token == token::BINOP(token::STAR) - || self.token == token::BINOP(token::PLUS) { - let zerok = self.token == token::BINOP(token::STAR); + if *self.token == token::BINOP(token::STAR) + || *self.token == token::BINOP(token::PLUS) { + let zerok = *self.token == token::BINOP(token::STAR); self.bump(); return (Some(sep), zerok); } else { @@ -1384,18 +1384,18 @@ pub impl Parser { fn parse_non_delim_tt_tok(p: Parser) -> token_tree { maybe_whole!(deref p, nt_tt); - match p.token { + match *p.token { token::RPAREN | token::RBRACE | token::RBRACKET => { p.fatal(~"incorrect close delimiter: `" - + token_to_str(p.reader, p.token) + ~"`"); + + token_to_str(p.reader, *p.token) + ~"`"); } /* we ought to allow different depths of unquotation */ - token::DOLLAR if p.quote_depth > 0u => { + token::DOLLAR if *p.quote_depth > 0u => { p.bump(); - let sp = p.span; + let sp = *p.span; - if p.token == token::LPAREN { + if *p.token == token::LPAREN { let seq = p.parse_seq(token::LPAREN, token::RPAREN, seq_sep_none(), |p| p.parse_token_tree()); @@ -1413,18 +1413,18 @@ pub impl Parser { // turn the next token into a tt_tok: fn parse_any_tt_tok(p: Parser) -> token_tree{ - let res = tt_tok(p.span, p.token); + let res = tt_tok(*p.span, *p.token); p.bump(); res } - match self.token { + match *self.token { token::EOF => { self.fatal(~"file ended in the middle of a macro invocation"); } token::LPAREN | token::LBRACE | token::LBRACKET => { // tjc: ?????? - let ket = token::flip_delimiter(copy self.token); + let ket = token::flip_delimiter(*self.token); tt_delim(vec::append( // the open delimiter: ~[parse_any_tt_tok(self)], @@ -1441,7 +1441,7 @@ pub impl Parser { fn parse_all_token_trees() -> ~[token_tree] { let mut tts = ~[]; - while self.token != token::EOF { + while *self.token != token::EOF { tts.push(self.parse_token_tree()); } tts @@ -1452,11 +1452,11 @@ pub impl Parser { // the interpolation of matchers maybe_whole!(self, nt_matchers); let name_idx = @mut 0u; - return match self.token { + return match *self.token { token::LBRACE | token::LPAREN | token::LBRACKET => { - self.parse_matcher_subseq(name_idx, copy self.token, + self.parse_matcher_subseq(name_idx, *self.token, // tjc: not sure why we need a copy - token::flip_delimiter(copy self.token)) + token::flip_delimiter(*self.token)) } _ => self.fatal(~"expected open delimiter") } @@ -1473,9 +1473,9 @@ pub impl Parser { self.expect(bra); - while self.token != ket || lparens > 0u { - if self.token == token::LPAREN { lparens += 1u; } - if self.token == token::RPAREN { lparens -= 1u; } + while *self.token != ket || lparens > 0u { + if *self.token == token::LPAREN { lparens += 1u; } + if *self.token == token::RPAREN { lparens -= 1u; } ret_val.push(self.parse_matcher(name_idx)); } @@ -1487,11 +1487,12 @@ pub impl Parser { fn parse_matcher(name_idx: @mut uint) -> matcher { let lo = self.span.lo; - let m = if self.token == token::DOLLAR { + let m = if *self.token == token::DOLLAR { self.bump(); - if self.token == token::LPAREN { + if *self.token == token::LPAREN { let name_idx_lo = *name_idx; - let ms = self.parse_matcher_subseq(name_idx, token::LPAREN, + let ms = self.parse_matcher_subseq(name_idx, + token::LPAREN, token::RPAREN); if ms.len() == 0u { self.fatal(~"repetition body must be nonempty"); @@ -1507,7 +1508,7 @@ pub impl Parser { m } } else { - let m = match_tok(self.token); + let m = match_tok(*self.token); self.bump(); m }; @@ -1521,7 +1522,7 @@ pub impl Parser { let mut hi; let mut ex; - match copy self.token { + match *self.token { token::NOT => { self.bump(); let e = self.parse_prefix_expr(); @@ -1610,13 +1611,13 @@ pub impl Parser { fn parse_more_binops(lhs: @expr, min_prec: uint) -> @expr { if self.expr_is_complete(lhs) { return lhs; } - let peeked = self.token; + let peeked = *self.token; if peeked == token::BINOP(token::OR) && - (self.restriction == RESTRICT_NO_BAR_OP || - self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) { + (*self.restriction == RESTRICT_NO_BAR_OP || + *self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP) { lhs } else if peeked == token::OROR && - self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP { + *self.restriction == RESTRICT_NO_BAR_OR_DOUBLEBAR_OP { lhs } else { let cur_opt = token_to_binop(peeked); @@ -1656,7 +1657,7 @@ pub impl Parser { fn parse_assign_expr() -> @expr { let lo = self.span.lo; let lhs = self.parse_binops(); - match copy self.token { + match *self.token { token::EQ => { self.bump(); let rhs = self.parse_expr(); @@ -1683,7 +1684,7 @@ pub impl Parser { expr_assign_op(aop, lhs, rhs)) } token::LARROW => { - self.obsolete(copy self.span, ObsoleteBinaryMove); + self.obsolete(*self.span, ObsoleteBinaryMove); // Bogus value (but it's an error) self.bump(); // <- self.bump(); // rhs @@ -1733,7 +1734,7 @@ pub impl Parser { fn parse_lambda_block_expr() -> @expr { self.parse_lambda_expr_( || { - match self.token { + match *self.token { token::BINOP(token::OR) | token::OROR => { self.parse_fn_block_decl() } @@ -1744,7 +1745,7 @@ pub impl Parser { output: @Ty { id: self.get_id(), node: ty_infer, - span: self.span + span: *self.span }, cf: return_val } @@ -1841,8 +1842,8 @@ pub impl Parser { // but they aren't represented by tests debug!("sugary call on %?", e.node); self.span_fatal( - lo, fmt!("`%s` must be followed by a block call", - keyword)); + *lo, + fmt!("`%s` must be followed by a block call", keyword)); } } } @@ -1858,13 +1859,13 @@ pub impl Parser { fn parse_loop_expr() -> @expr { // loop headers look like 'loop {' or 'loop unsafe {' let is_loop_header = - self.token == token::LBRACE - || (is_ident(copy self.token) + *self.token == token::LBRACE + || (is_ident(*self.token) && self.look_ahead(1) == token::LBRACE); // labeled loop headers look like 'loop foo: {' let is_labeled_loop_header = - is_ident(self.token) - && !self.is_any_keyword(copy self.token) + is_ident(*self.token) + && !self.is_any_keyword(*self.token) && self.look_ahead(1) == token::COLON; if is_loop_header || is_labeled_loop_header { @@ -1884,7 +1885,7 @@ pub impl Parser { } else { // This is a 'continue' expression let lo = self.span.lo; - let ex = if is_ident(self.token) { + let ex = if is_ident(*self.token) { expr_again(Some(self.parse_ident())) } else { expr_again(None) @@ -1897,7 +1898,7 @@ pub impl Parser { // For distingishing between record literals and blocks fn looking_at_record_literal() -> bool { let lookahead = self.look_ahead(1); - self.token == token::LBRACE && + *self.token == token::LBRACE && (self.token_is_keyword(~"mut", lookahead) || (is_plain_ident(lookahead) && self.look_ahead(2) == token::COLON)) @@ -1907,8 +1908,8 @@ pub impl Parser { self.expect(token::LBRACE); let mut fields = ~[self.parse_field(token::COLON)]; let mut base = None; - while self.token != token::RBRACE { - if self.token == token::COMMA + while *self.token != token::RBRACE { + if *self.token == token::COMMA && self.look_ahead(1) == token::DOTDOT { self.bump(); self.bump(); @@ -1920,7 +1921,7 @@ pub impl Parser { } self.expect(token::COMMA); - if self.token == token::RBRACE { + if *self.token == token::RBRACE { // record ends by an optional trailing comma break; } @@ -1936,7 +1937,7 @@ pub impl Parser { let discriminant = self.parse_expr(); self.expect(token::LBRACE); let mut arms: ~[arm] = ~[]; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { let pats = self.parse_pats(); let mut guard = None; if self.eat_keyword(~"if") { guard = Some(self.parse_expr()); } @@ -1945,7 +1946,7 @@ pub impl Parser { let require_comma = !classify::expr_is_simple_block(expr) - && self.token != token::RBRACE; + && *self.token != token::RBRACE; if require_comma { self.expect(token::COMMA); @@ -1978,21 +1979,21 @@ pub impl Parser { // parse an expression, subject to the given restriction fn parse_expr_res(r: restriction) -> @expr { - let old = self.restriction; - self.restriction = r; + let old = *self.restriction; + *self.restriction = r; let e = self.parse_assign_expr(); - self.restriction = old; + *self.restriction = old; return e; } fn parse_initializer() -> Option<@expr> { - match self.token { + match *self.token { token::EQ => { self.bump(); return Some(self.parse_expr()); } token::LARROW => { - self.obsolete(copy self.span, ObsoleteMoveInit); + self.obsolete(*self.span, ObsoleteMoveInit); self.bump(); self.bump(); return None; @@ -2007,7 +2008,7 @@ pub impl Parser { let mut pats = ~[]; loop { pats.push(self.parse_pat(true)); - if self.token == token::BINOP(token::OR) { self.bump(); } + if *self.token == token::BINOP(token::OR) { self.bump(); } else { return pats; } }; } @@ -2017,12 +2018,12 @@ pub impl Parser { let mut tail = None; let mut first = true; - while self.token != token::RBRACKET { + while *self.token != token::RBRACKET { if first { first = false; } else { self.expect(token::COMMA); } let mut is_tail = false; - if self.token == token::DOTDOT { + if *self.token == token::DOTDOT { self.bump(); is_tail = true; } @@ -2049,15 +2050,15 @@ pub impl Parser { let mut fields = ~[]; let mut etc = false; let mut first = true; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { if first { first = false; } else { self.expect(token::COMMA); } - if self.token == token::UNDERSCORE { + if *self.token == token::UNDERSCORE { self.bump(); - if self.token != token::RBRACE { + if *self.token != token::RBRACE { self.fatal(~"expected `}`, found `" + - token_to_str(self.reader, self.token) + + token_to_str(self.reader, *self.token) + ~"`"); } etc = true; @@ -2074,14 +2075,14 @@ pub impl Parser { let fieldpath = ast_util::ident_to_path(mk_sp(lo1, hi1), fieldname); let mut subpat; - if self.token == token::COLON { + if *self.token == token::COLON { self.bump(); subpat = self.parse_pat(refutable); } else { subpat = @ast::pat { id: self.get_id(), node: pat_ident(bind_infer, fieldpath, None), - span: self.last_span + span: *self.last_span }; } fields.push(ast::field_pat { ident: fieldname, pat: subpat }); @@ -2095,7 +2096,7 @@ pub impl Parser { let lo = self.span.lo; let mut hi = self.span.hi; let mut pat; - match self.token { + match *self.token { token::UNDERSCORE => { self.bump(); pat = pat_wild; } token::AT => { self.bump(); @@ -2173,7 +2174,7 @@ pub impl Parser { } token::LPAREN => { self.bump(); - if self.token == token::RPAREN { + if *self.token == token::RPAREN { hi = self.span.hi; self.bump(); let lit = @codemap::spanned { @@ -2184,7 +2185,7 @@ pub impl Parser { } else { let mut fields = ~[self.parse_pat(refutable)]; if self.look_ahead(1) != token::RPAREN { - while self.token == token::COMMA { + while *self.token == token::COMMA { self.bump(); fields.push(self.parse_pat(refutable)); } @@ -2233,7 +2234,7 @@ pub impl Parser { cannot_be_enum_or_struct = true } - if is_plain_ident(self.token) && cannot_be_enum_or_struct { + if is_plain_ident(*self.token) && cannot_be_enum_or_struct { let name = self.parse_value_path(); let sub; if self.eat(token::AT) { @@ -2244,7 +2245,7 @@ pub impl Parser { pat = pat_ident(binding_mode, name, sub); } else { let enum_path = self.parse_path_with_tps(true); - match self.token { + match *self.token { token::LBRACE => { self.bump(); let (fields, etc) = @@ -2255,7 +2256,7 @@ pub impl Parser { _ => { let mut args: ~[@pat] = ~[]; let mut star_pat = false; - match self.token { + match *self.token { token::LPAREN => match self.look_ahead(1u) { token::BINOP(token::STAR) => { // This is a "top constructor only" pat @@ -2299,9 +2300,9 @@ pub impl Parser { fn parse_pat_ident(refutable: bool, binding_mode: ast::binding_mode) -> ast::pat_ { - if !is_plain_ident(self.token) { + if !is_plain_ident(*self.token) { self.span_fatal( - copy self.last_span, + *self.last_span, ~"expected identifier, found path"); } let name = self.parse_value_path(); @@ -2315,9 +2316,9 @@ pub impl Parser { // leads to a parse error. Note that if there is no explicit // binding mode then we do not end up here, because the lookahead // will direct us over to parse_enum_variant() - if self.token == token::LPAREN { + if *self.token == token::LPAREN { self.span_fatal( - copy self.last_span, + *self.last_span, ~"expected identifier, found enum pattern"); } @@ -2365,7 +2366,7 @@ pub impl Parser { if self.eat_keyword(~"mut") { is_mutbl = struct_mutable; } - if !is_plain_ident(self.token) { + if !is_plain_ident(*self.token) { self.fatal(~"expected ident"); } let name = self.parse_ident(); @@ -2394,8 +2395,8 @@ pub impl Parser { self.expect_keyword(~"let"); let decl = self.parse_let(); return @spanned(lo, decl.span.hi, stmt_decl(decl, self.get_id())); - } else if is_ident(self.token) - && !self.is_any_keyword(copy self.token) + } else if is_ident(*self.token) + && !self.is_any_keyword(*self.token) && self.look_ahead(1) == token::NOT { check_expected_item(self, first_item_attrs); @@ -2405,7 +2406,7 @@ pub impl Parser { let pth = self.parse_value_path(); self.bump(); - let id = if self.token == token::LPAREN { + let id = if *self.token == token::LPAREN { token::special_idents::invalid // no special identifier } else { self.parse_ident() @@ -2460,7 +2461,7 @@ pub impl Parser { } fn expr_is_complete(e: @expr) -> bool { - return self.restriction == RESTRICT_STMT_EXPR && + return *self.restriction == RESTRICT_STMT_EXPR && !classify::expr_requires_semi_to_be_stmt(e); } @@ -2486,7 +2487,7 @@ pub impl Parser { let lo = self.span.lo; if self.eat_keyword(~"unsafe") { - self.obsolete(copy self.span, ObsoleteUnsafeBlock); + self.obsolete(*self.span, ObsoleteUnsafeBlock); } self.expect(token::LBRACE); let (inner, next) = @@ -2530,12 +2531,12 @@ pub impl Parser { let mut initial_attrs = attrs_remaining; - if self.token == token::RBRACE && !vec::is_empty(initial_attrs) { + if *self.token == token::RBRACE && !vec::is_empty(initial_attrs) { self.fatal(~"expected item"); } - while self.token != token::RBRACE { - match self.token { + while *self.token != token::RBRACE { + match *self.token { token::SEMI => { self.bump(); // empty } @@ -2545,7 +2546,7 @@ pub impl Parser { match stmt.node { stmt_expr(e, stmt_id) => { // Expression without semicolon - match self.token { + match *self.token { token::SEMI => { self.bump(); stmts.push(@codemap::spanned { @@ -2570,7 +2571,7 @@ pub impl Parser { stmt_mac(ref m, _) => { // Statement macro; might be an expr - match self.token { + match *self.token { token::SEMI => { self.bump(); stmts.push(@codemap::spanned { @@ -2616,9 +2617,9 @@ pub impl Parser { @Ty { id: self.get_id(), node: ty_path( - ident_to_path(copy self.last_span, i), + ident_to_path(*self.last_span, i), self.get_id()), - span: self.last_span, + span: *self.last_span, } } @@ -2644,12 +2645,12 @@ pub impl Parser { if self.eat_keyword(~"static") { bounds.push(RegionTyParamBound); } else { - self.span_err(copy self.span, + self.span_err(*self.span, ~"`&static` is the only permissible \ region bound here"); } - } else if is_ident(self.token) { - let maybe_bound = match self.token { + } else if is_ident(*self.token) { + let maybe_bound = match *self.token { token::IDENT(copy sid, _) => { match *self.id_to_str(sid) { @@ -2657,7 +2658,7 @@ pub impl Parser { | ~"copy" | ~"const" | ~"owned" => { - self.obsolete(copy self.span, + self.obsolete(*self.span, ObsoleteLowerCaseKindBounds); // Bogus value, but doesn't matter, since // is an error @@ -2689,8 +2690,8 @@ pub impl Parser { loop; } - if is_ident_or_path(self.token) { - self.obsolete(copy self.span, + if is_ident_or_path(*self.token) { + self.obsolete(*self.span, ObsoleteTraitBoundSeparator); } } @@ -2732,7 +2733,7 @@ pub impl Parser { } fn is_self_ident() -> bool { - match self.token { + match *self.token { token::IDENT(id, false) if id == special_idents::self_ => true, _ => false @@ -2742,7 +2743,7 @@ pub impl Parser { fn expect_self_ident() { if !self.is_self_ident() { self.fatal(fmt!("expected `self` but found `%s`", - token_to_str(self.reader, self.token))); + token_to_str(self.reader, *self.token))); } self.bump(); } @@ -2773,7 +2774,7 @@ pub impl Parser { // A bit of complexity and lookahead is needed here in order to to be // backwards compatible. let lo = self.span.lo; - let self_ty = match copy self.token { + let self_ty = match *self.token { token::BINOP(token::AND) => { maybe_parse_self_ty(sty_region, self) } @@ -2795,7 +2796,7 @@ pub impl Parser { // If we parsed a self type, expect a comma before the argument list. let args_or_capture_items; if self_ty != sty_by_ref { - match copy self.token { + match *self.token { token::COMMA => { self.bump(); let sep = seq_sep_trailing_disallowed(token::COMMA); @@ -2809,7 +2810,8 @@ pub impl Parser { } _ => { self.fatal(~"expected `,` or `)`, found `" + - token_to_str(self.reader, self.token) + ~"`"); + token_to_str(self.reader, *self.token) + + ~"`"); } } } else { @@ -2850,7 +2852,7 @@ pub impl Parser { let output = if self.eat(token::RARROW) { self.parse_ty(false) } else { - @Ty { id: self.get_id(), node: ty_infer, span: self.span } + @Ty { id: self.get_id(), node: ty_infer, span: *self.span } }; ast::fn_decl { @@ -2929,7 +2931,7 @@ pub impl Parser { // Parse traits, if necessary. let traits; - if self.token == token::COLON { + if *self.token == token::COLON { self.bump(); traits = self.parse_trait_ref_list(token::LBRACE); } else { @@ -2954,7 +2956,7 @@ pub impl Parser { // First, parse type parameters if necessary. let mut tps; - if self.token == token::LT { + if *self.token == token::LT { tps = self.parse_ty_params(); } else { tps = ~[]; @@ -2978,7 +2980,7 @@ pub impl Parser { }) } _ => { - self.span_err(copy self.span, ~"not a trait"); + self.span_err(*self.span, ~"not a trait"); None } }; @@ -2986,7 +2988,7 @@ pub impl Parser { ty = self.parse_ty(false); opt_trait_ref } else if self.eat(token::COLON) { - self.obsolete(copy self.span, ObsoleteImplSyntax); + self.obsolete(*self.span, ObsoleteImplSyntax); Some(self.parse_trait_ref()) } else { None @@ -3008,7 +3010,7 @@ pub impl Parser { // the return type of the ctor function. fn ident_to_path_tys(i: ident, typarams: ~[ty_param]) -> @path { - let s = self.last_span; + let s = *self.last_span; @ast::path { span: s, @@ -3026,7 +3028,7 @@ pub impl Parser { } fn ident_to_path(i: ident) -> @path { - @ast::path { span: self.last_span, + @ast::path { span: *self.last_span, global: false, idents: ~[i], rp: None, @@ -3051,7 +3053,7 @@ pub impl Parser { self.parse_region_param(); let ty_params = self.parse_ty_params(); if self.eat(token::COLON) { - self.obsolete(copy self.span, ObsoleteClassTraits); + self.obsolete(*self.span, ObsoleteClassTraits); let _ = self.parse_trait_ref_list(token::LBRACE); } @@ -3063,7 +3065,7 @@ pub impl Parser { // It's a record-like struct. is_tuple_like = false; fields = ~[]; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { match self.parse_class_item() { dtor_decl(ref blk, ref attrs, s) => { match the_dtor { @@ -3087,7 +3089,7 @@ pub impl Parser { } } self.bump(); - } else if self.token == token::LPAREN { + } else if *self.token == token::LPAREN { // It's a tuple-like struct. is_tuple_like = true; fields = do self.parse_unspanned_seq(token::LPAREN, token::RPAREN, @@ -3109,7 +3111,7 @@ pub impl Parser { } else { self.fatal(fmt!("expected `{`, `(`, or `;` after struct name \ but found `%s`", - token_to_str(self.reader, self.token))); + token_to_str(self.reader, *self.token))); } let actual_dtor = do the_dtor.map |dtor| { @@ -3139,13 +3141,13 @@ pub impl Parser { fn parse_single_class_item(vis: visibility) -> @struct_field { if self.eat_obsolete_ident("let") { - self.obsolete(copy self.last_span, ObsoleteLet); + self.obsolete(*self.last_span, ObsoleteLet); } let a_var = self.parse_instance_var(vis); - match self.token { + match *self.token { token::SEMI => { - self.obsolete(copy self.span, ObsoleteFieldTerminator); + self.obsolete(*self.span, ObsoleteFieldTerminator); self.bump(); } token::COMMA => { @@ -3153,11 +3155,11 @@ pub impl Parser { } token::RBRACE => {} _ => { - self.span_fatal(copy self.span, + self.span_fatal(*self.span, fmt!("expected `;`, `,`, or '}' but \ found `%s`", token_to_str(self.reader, - self.token))); + *self.token))); } } a_var @@ -3226,7 +3228,7 @@ pub impl Parser { // outer attributes can't occur on view items (or macros // invocations?) let mut first = true; - while self.token != term { + while *self.token != term { let mut attrs = self.parse_outer_attributes(); if first { attrs = vec::append(attrs_remaining, attrs); @@ -3243,7 +3245,7 @@ pub impl Parser { } _ => { self.fatal(~"expected item but found `" + - token_to_str(self.reader, self.token) + ~"`"); + token_to_str(self.reader, *self.token) + ~"`"); } } debug!("parse_mod_items: attrs=%?", attrs); @@ -3268,9 +3270,9 @@ pub impl Parser { } fn parse_item_mod(outer_attrs: ~[ast::attribute]) -> item_info { - let id_span = self.span; + let id_span = *self.span; let id = self.parse_ident(); - let info_ = if self.token == token::SEMI { + let info_ = if *self.token == token::SEMI { self.bump(); // This mod is in an external file. Let's go get it! let (m, attrs) = self.eval_src_mod(id, outer_attrs, id_span); @@ -3292,7 +3294,7 @@ pub impl Parser { match ::attr::first_attr_value_str_by_name(outer_attrs, ~"merge") { Some(path) => { let prefix = Path( - self.sess.cm.span_to_filename(copy self.span)); + self.sess.cm.span_to_filename(*self.span)); let prefix = prefix.dir_path(); let path = Path(copy *path); let (new_mod_item, new_attrs) = self.eval_src_mod_from_path( @@ -3337,9 +3339,9 @@ pub impl Parser { outer_attrs: ~[ast::attribute], id_sp: span) -> (ast::item_, ~[ast::attribute]) { - let prefix = Path(self.sess.cm.span_to_filename(copy self.span)); + let prefix = Path(self.sess.cm.span_to_filename(*self.span)); let prefix = prefix.dir_path(); - let mod_path = Path(".").push_many(self.mod_path_stack); + let mod_path = Path(".").push_many(*self.mod_path_stack); let default_path = self.sess.interner.get(id) + ~".rs"; let file_path = match ::attr::first_attr_value_str_by_name( outer_attrs, ~"path") { @@ -3456,7 +3458,7 @@ pub impl Parser { let mut items: ~[@foreign_item] = foreign_items; let mut initial_attrs = attrs_remaining; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { let attrs = vec::append(initial_attrs, self.parse_outer_attributes()); initial_attrs = ~[]; @@ -3478,7 +3480,7 @@ pub impl Parser { // Parse the ABI. let abi_opt; - match self.token { + match *self.token { token::LIT_STR(copy found_abi) => { self.bump(); abi_opt = Some(found_abi); @@ -3492,21 +3494,21 @@ pub impl Parser { if self.is_keyword(~"mod") { must_be_named_mod = true; self.expect_keyword(~"mod"); - } else if self.token != token::LBRACE { - self.span_fatal(copy self.span, + } else if *self.token != token::LBRACE { + self.span_fatal(*self.span, fmt!("expected `{` or `mod` but found %s", - token_to_str(self.reader, self.token))); + token_to_str(self.reader, *self.token))); } - let (sort, ident) = match self.token { + let (sort, ident) = match *self.token { token::IDENT(*) => (ast::named, self.parse_ident()), _ => { if must_be_named_mod { - self.span_fatal(copy self.span, + self.span_fatal(*self.span, fmt!("expected foreign module name but \ found %s", token_to_str(self.reader, - self.token))); + *self.token))); } (ast::anonymous, @@ -3534,7 +3536,7 @@ pub impl Parser { match abi_opt { None => {} // OK. Some(_) => { - self.span_err(copy self.span, ~"an ABI may not be specified \ + self.span_err(*self.span, ~"an ABI may not be specified \ here"); } } @@ -3575,7 +3577,7 @@ pub impl Parser { fn parse_struct_def() -> @struct_def { let mut the_dtor: Option<(blk, ~[attribute], codemap::span)> = None; let mut fields: ~[@struct_field] = ~[]; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { match self.parse_class_item() { dtor_decl(ref blk, ref attrs, s) => { match the_dtor { @@ -3621,7 +3623,7 @@ pub impl Parser { let mut all_nullary = true, have_disr = false; let mut common_fields = None; - while self.token != token::RBRACE { + while *self.token != token::RBRACE { let variant_attrs = self.parse_outer_attributes(); let vlo = self.span.lo; @@ -3652,7 +3654,7 @@ pub impl Parser { // Parse a struct variant. all_nullary = false; kind = struct_variant_kind(self.parse_struct_def()); - } else if self.token == token::LPAREN { + } else if *self.token == token::LPAREN { all_nullary = false; let arg_tys = self.parse_unspanned_seq( token::LPAREN, token::RPAREN, @@ -3701,7 +3703,7 @@ pub impl Parser { self.parse_region_param(); let ty_params = self.parse_ty_params(); // Newtype syntax - if self.token == token::EQ { + if *self.token == token::EQ { self.bump(); let ty = self.parse_ty(false); self.expect(token::SEMI); @@ -3733,7 +3735,7 @@ pub impl Parser { } fn parse_fn_ty_sigil() -> Option { - match self.token { + match *self.token { token::AT => { self.bump(); Some(ManagedSigil) @@ -3879,7 +3881,7 @@ pub impl Parser { vis: visibility, span: mk_sp(lo, self.last_span.hi) }); - } else if macros_allowed && !self.is_any_keyword(copy self.token) + } else if macros_allowed && !self.is_any_keyword(*self.token) && self.look_ahead(1) == token::NOT && (is_plain_ident(self.look_ahead(2)) || self.look_ahead(2) == token::LPAREN @@ -3896,16 +3898,16 @@ pub impl Parser { // a 'special' identifier (like what `macro_rules!` uses) // is optional. We should eventually unify invoc syntax // and remove this. - let id = if is_plain_ident(self.token) { + let id = if is_plain_ident(*self.token) { self.parse_ident() } else { token::special_idents::invalid // no special identifier }; // eat a matched-delimiter token tree: - let tts = match self.token { + let tts = match *self.token { token::LPAREN | token::LBRACE => { - let ket = token::flip_delimiter(copy self.token); - self.parse_unspanned_seq(copy self.token, ket, + let ket = token::flip_delimiter(*self.token); + self.parse_unspanned_seq(*self.token, ket, seq_sep_none(), |p| p.parse_token_tree()) } @@ -3925,7 +3927,7 @@ pub impl Parser { let mut s = ~"unmatched visibility `"; s += if visibility == public { ~"pub" } else { ~"priv" }; s += ~"`"; - self.span_fatal(copy self.last_span, s); + self.span_fatal(*self.last_span, s); } return iovi_none; }; @@ -3961,12 +3963,12 @@ pub impl Parser { let first_ident = self.parse_ident(); let mut path = ~[first_ident]; debug!("parsed view_path: %s", *self.id_to_str(first_ident)); - match self.token { + match *self.token { token::EQ => { // x = foo::bar self.bump(); path = ~[self.parse_ident()]; - while self.token == token::MOD_SEP { + while *self.token == token::MOD_SEP { self.bump(); let id = self.parse_ident(); path.push(id); @@ -3983,11 +3985,10 @@ pub impl Parser { token::MOD_SEP => { // foo::bar or foo::{a,b,c} or foo::* - while self.token == token::MOD_SEP { + while *self.token == token::MOD_SEP { self.bump(); - match copy self.token { - + match *self.token { token::IDENT(i, _) => { self.bump(); path.push(i); @@ -4038,7 +4039,7 @@ pub impl Parser { fn parse_view_paths() -> ~[@view_path] { let mut vp = ~[self.parse_view_path()]; - while self.token == token::COMMA { + while *self.token == token::COMMA { self.bump(); vp.push(self.parse_view_path()); } @@ -4048,7 +4049,7 @@ pub impl Parser { fn is_view_item() -> bool { let tok, next_tok; if !self.is_keyword(~"pub") && !self.is_keyword(~"priv") { - tok = self.token; + tok = *self.token; next_tok = self.look_ahead(1); } else { tok = self.look_ahead(1); @@ -4159,7 +4160,7 @@ pub impl Parser { } fn parse_str() -> @~str { - match copy self.token { + match *self.token { token::LIT_STR(s) => { self.bump(); self.id_to_str(s) } _ => self.fatal(~"expected string literal") } diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 4b13818974c30..41500d6a409a8 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -18,7 +18,7 @@ use hashmap::linear::LinearMap; use dvec::DVec; pub struct Interner { - priv mut map: LinearMap, + priv map: @mut LinearMap, priv vect: DVec, } @@ -26,7 +26,7 @@ pub struct Interner { pub impl Interner { static fn new() -> Interner { Interner { - map: LinearMap::new(), + map: @mut LinearMap::new(), vect: DVec(), } }