@@ -97,8 +97,31 @@ impl Api {
9797 Ok ( response)
9898 }
9999
100+ /// Filters the download map by optional artist or album filters.
101+ fn filter_download_map < ' a > (
102+ unfiltered : Option < DownloadsMap > ,
103+ items : & ' a Vec < & ' a Item > ,
104+ album : Option < & String > ,
105+ artist : Option < & String >
106+ ) -> DownloadsMap {
107+ unfiltered
108+ . iter ( )
109+ . flatten ( )
110+ . filter_map ( |( id, url) | {
111+ items. iter ( ) . find ( |v| & format ! ( "{}{}" , v. sale_item_type, v. sale_item_id) == id)
112+ . filter ( |item| {
113+ artist. is_none_or ( |v| item. band_name . eq_ignore_ascii_case ( v) )
114+ } )
115+ . filter ( |item| {
116+ album. is_none_or ( |v| item. item_title . eq_ignore_ascii_case ( v) )
117+ } )
118+ . map ( |_| ( id. clone ( ) , url. clone ( ) ) )
119+ } )
120+ . collect :: < DownloadsMap > ( )
121+ }
122+
100123 /// Scrape a user's Bandcamp page to find download urls
101- pub fn get_download_urls ( & self , name : & str ) -> Result < BandcampPage , Box < dyn Error > > {
124+ pub fn get_download_urls ( & self , name : & str , artist : Option < & String > , album : Option < & String > ) -> Result < BandcampPage , Box < dyn Error > > {
102125 debug ! ( "`get_download_urls` for Bandcamp page '{name}'" ) ;
103126
104127 let body = self . request ( Method :: GET , & Self :: bc_path ( name) ) ?. text ( ) ?;
@@ -115,6 +138,8 @@ impl Api {
115138 . expect ( "Failed to deserialise collection page data blob." ) ;
116139 debug ! ( "Successfully fetched Bandcamp page, and found + deserialised data blob" ) ;
117140
141+ let items = fanpage_data. item_cache . collection . values ( ) . collect :: < Vec < & Item > > ( ) ;
142+
118143 match fanpage_data. fan_data . is_own_page {
119144 Some ( true ) => ( ) ,
120145 _ => bail ! ( format!(
@@ -123,11 +148,7 @@ impl Api {
123148 }
124149
125150 // TODO: make sure this exists
126- let mut collection = fanpage_data
127- . collection_data
128- . redownload_urls
129- . clone ( )
130- . unwrap ( ) ;
151+ let mut collection = Self :: filter_download_map ( fanpage_data. collection_data . redownload_urls . clone ( ) , & items, album, artist) ;
131152
132153 let skip_hidden_items = true ;
133154 if skip_hidden_items {
@@ -142,7 +163,7 @@ impl Api {
142163 // This should never be `None` thanks to the comparison above.
143164 fanpage_data. collection_data. item_count. unwrap( )
144165 ) ;
145- let rest = self . get_rest_downloads_in_collection ( & fanpage_data, "collection_items" ) ?;
166+ let rest = self . get_rest_downloads_in_collection ( & fanpage_data, "collection_items" , album , artist ) ?;
146167 collection. extend ( rest) ;
147168 }
148169
@@ -153,7 +174,7 @@ impl Api {
153174 "Too many in `hidden_data`, and we're told not to skip, so we need to paginate ({} total)" ,
154175 fanpage_data. hidden_data. item_count. unwrap( )
155176 ) ;
156- let rest = self . get_rest_downloads_in_collection ( & fanpage_data, "hidden_items" ) ?;
177+ let rest = self . get_rest_downloads_in_collection ( & fanpage_data, "hidden_items" , album , artist ) ?;
157178 collection. extend ( rest) ;
158179 }
159180
@@ -171,6 +192,8 @@ impl Api {
171192 & self ,
172193 data : & ParsedFanpageData ,
173194 collection_name : & str ,
195+ album : Option < & String > ,
196+ artist : Option < & String > ,
174197 ) -> Result < DownloadsMap , Box < dyn Error > > {
175198 debug ! ( "Paginating results for {collection_name}" ) ;
176199 let collection_data = match collection_name {
@@ -199,8 +222,12 @@ impl Api {
199222 . send ( ) ?
200223 . json :: < ParsedCollectionItems > ( ) ?;
201224
202- trace ! ( "Collected {} items" , body. redownload_urls. clone( ) . len( ) ) ;
203- collection. extend ( body. redownload_urls ) ;
225+ let items = body. items . iter ( ) . by_ref ( ) . collect :: < Vec < _ > > ( ) ;
226+ let redownload_urls = Self :: filter_download_map ( Some ( body. redownload_urls ) , & items, album, artist) ;
227+ trace ! ( "Collected {} items" , redownload_urls. len( ) ) ;
228+
229+
230+ collection. extend ( redownload_urls) ;
204231 more_available = body. more_available ;
205232 last_token = body. last_token ;
206233 }
0 commit comments