@@ -9,6 +9,7 @@ use rustc_ast::{self as ast, CRATE_NODE_ID};
99use rustc_attr_parsing:: { AttributeParser , Early , ShouldEmit } ;
1010use rustc_codegen_ssa:: traits:: CodegenBackend ;
1111use rustc_codegen_ssa:: { CodegenResults , CrateInfo } ;
12+ use rustc_data_structures:: indexmap:: IndexMap ;
1213use rustc_data_structures:: jobserver:: Proxy ;
1314use rustc_data_structures:: steal:: Steal ;
1415use rustc_data_structures:: sync:: { AppendOnlyIndexVec , FreezeLock , WorkerLocal } ;
@@ -584,7 +585,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
584585 let result: io:: Result < ( ) > = try {
585586 // Build a list of files used to compile the output and
586587 // write Makefile-compatible dependency rules
587- let mut files: Vec < ( String , u64 , Option < SourceFileHash > ) > = sess
588+ let mut files: IndexMap < String , ( u64 , Option < SourceFileHash > ) > = sess
588589 . source_map ( )
589590 . files ( )
590591 . iter ( )
@@ -593,10 +594,12 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
593594 . map ( |fmap| {
594595 (
595596 escape_dep_filename ( & fmap. name . prefer_local_unconditionally ( ) . to_string ( ) ) ,
596- // This needs to be unnormalized,
597- // as external tools wouldn't know how rustc normalizes them
598- fmap. unnormalized_source_len as u64 ,
599- fmap. checksum_hash ,
597+ (
598+ // This needs to be unnormalized,
599+ // as external tools wouldn't know how rustc normalizes them
600+ fmap. unnormalized_source_len as u64 ,
601+ fmap. checksum_hash ,
602+ ) ,
600603 )
601604 } )
602605 . collect ( ) ;
@@ -614,7 +617,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
614617 fn hash_iter_files < P : AsRef < Path > > (
615618 it : impl Iterator < Item = P > ,
616619 checksum_hash_algo : Option < SourceFileHashAlgorithm > ,
617- ) -> impl Iterator < Item = ( P , u64 , Option < SourceFileHash > ) > {
620+ ) -> impl Iterator < Item = ( P , ( u64 , Option < SourceFileHash > ) ) > {
618621 it. map ( move |path| {
619622 match checksum_hash_algo. and_then ( |algo| {
620623 fs:: File :: open ( path. as_ref ( ) )
@@ -630,8 +633,8 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
630633 } )
631634 . ok ( )
632635 } ) {
633- Some ( ( file_len, checksum) ) => ( path, file_len, Some ( checksum) ) ,
634- None => ( path, 0 , None ) ,
636+ Some ( ( file_len, checksum) ) => ( path, ( file_len, Some ( checksum) ) ) ,
637+ None => ( path, ( 0 , None ) ) ,
635638 }
636639 } )
637640 }
@@ -705,18 +708,14 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
705708 file,
706709 "{}: {}\n " ,
707710 path. display( ) ,
708- files
709- . iter( )
710- . map( |( path, _file_len, _checksum_hash_algo) | path. as_str( ) )
711- . intersperse( " " )
712- . collect:: <String >( )
711+ files. keys( ) . map( String :: as_str) . intersperse( " " ) . collect:: <String >( )
713712 ) ?;
714713 }
715714
716715 // Emit a fake target for each input file to the compilation. This
717716 // prevents `make` from spitting out an error if a file is later
718717 // deleted. For more info see #28735
719- for ( path, _file_len , _checksum_hash_algo ) in & files {
718+ for path in files. keys ( ) {
720719 writeln ! ( file, "{path}:" ) ?;
721720 }
722721
@@ -745,7 +744,7 @@ fn write_out_deps(tcx: TyCtxt<'_>, outputs: &OutputFilenames, out_filenames: &[P
745744 if sess. opts . unstable_opts . checksum_hash_algorithm ( ) . is_some ( ) {
746745 files
747746 . iter ( )
748- . filter_map ( |( path, file_len, hash_algo) | {
747+ . filter_map ( |( path, ( file_len, hash_algo) ) | {
749748 hash_algo. map ( |hash_algo| ( path, file_len, hash_algo) )
750749 } )
751750 . try_for_each ( |( path, file_len, checksum_hash) | {
0 commit comments