Skip to content

Commit ca9a5de

Browse files
authored
feat: add album/artist filtering to run command (#17)
* chore: remove stale TODO comment * feat: add album/artist filtering to run command * fix: remove unused DateTime import * chore: set MSRV to 1.82.0 * chore: update nix flake (for newer rustc)
1 parent 2a2baf7 commit ca9a5de

File tree

6 files changed

+84
-47
lines changed

6 files changed

+84
-47
lines changed

Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
name = "bandsnatch"
33
version = "0.3.3"
44
edition = "2021"
5+
rust-version = "1.82.0"
56
description = "A CLI batch downloader for your Bandcamp collection"
67
authors = ["Ashlynne Mitchell <[email protected]>"]
78
license = "MIT"

flake.lock

Lines changed: 21 additions & 18 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/api/mod.rs

Lines changed: 37 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -97,8 +97,31 @@ impl Api {
9797
Ok(response)
9898
}
9999

100+
/// Filters the download map by optional artist or album filters.
101+
fn filter_download_map<'a>(
102+
unfiltered: Option<DownloadsMap>,
103+
items: &'a Vec<&'a Item>,
104+
album: Option<&String>,
105+
artist: Option<&String>
106+
) -> DownloadsMap {
107+
unfiltered
108+
.iter()
109+
.flatten()
110+
.filter_map(|(id, url)| {
111+
items.iter().find(|v| &format!("{}{}", v.sale_item_type, v.sale_item_id) == id)
112+
.filter(|item| {
113+
artist.is_none_or(|v| item.band_name.eq_ignore_ascii_case(v))
114+
})
115+
.filter(|item| {
116+
album.is_none_or(|v| item.item_title.eq_ignore_ascii_case(v))
117+
})
118+
.map(|_| (id.clone(), url.clone()))
119+
})
120+
.collect::<DownloadsMap>()
121+
}
122+
100123
/// Scrape a user's Bandcamp page to find download urls
101-
pub fn get_download_urls(&self, name: &str) -> Result<BandcampPage, Box<dyn Error>> {
124+
pub fn get_download_urls(&self, name: &str, artist: Option<&String>, album: Option<&String>) -> Result<BandcampPage, Box<dyn Error>> {
102125
debug!("`get_download_urls` for Bandcamp page '{name}'");
103126

104127
let body = self.request(Method::GET, &Self::bc_path(name))?.text()?;
@@ -115,6 +138,8 @@ impl Api {
115138
.expect("Failed to deserialise collection page data blob.");
116139
debug!("Successfully fetched Bandcamp page, and found + deserialised data blob");
117140

141+
let items = fanpage_data.item_cache.collection.values().collect::<Vec<&Item>>();
142+
118143
match fanpage_data.fan_data.is_own_page {
119144
Some(true) => (),
120145
_ => bail!(format!(
@@ -123,11 +148,7 @@ impl Api {
123148
}
124149

125150
// TODO: make sure this exists
126-
let mut collection = fanpage_data
127-
.collection_data
128-
.redownload_urls
129-
.clone()
130-
.unwrap();
151+
let mut collection = Self::filter_download_map(fanpage_data.collection_data.redownload_urls.clone(), &items, album, artist);
131152

132153
let skip_hidden_items = true;
133154
if skip_hidden_items {
@@ -142,7 +163,7 @@ impl Api {
142163
// This should never be `None` thanks to the comparison above.
143164
fanpage_data.collection_data.item_count.unwrap()
144165
);
145-
let rest = self.get_rest_downloads_in_collection(&fanpage_data, "collection_items")?;
166+
let rest = self.get_rest_downloads_in_collection(&fanpage_data, "collection_items", album, artist)?;
146167
collection.extend(rest);
147168
}
148169

@@ -153,7 +174,7 @@ impl Api {
153174
"Too many in `hidden_data`, and we're told not to skip, so we need to paginate ({} total)",
154175
fanpage_data.hidden_data.item_count.unwrap()
155176
);
156-
let rest = self.get_rest_downloads_in_collection(&fanpage_data, "hidden_items")?;
177+
let rest = self.get_rest_downloads_in_collection(&fanpage_data, "hidden_items", album, artist)?;
157178
collection.extend(rest);
158179
}
159180

@@ -171,6 +192,8 @@ impl Api {
171192
&self,
172193
data: &ParsedFanpageData,
173194
collection_name: &str,
195+
album: Option<&String>,
196+
artist: Option<&String>,
174197
) -> Result<DownloadsMap, Box<dyn Error>> {
175198
debug!("Paginating results for {collection_name}");
176199
let collection_data = match collection_name {
@@ -199,8 +222,12 @@ impl Api {
199222
.send()?
200223
.json::<ParsedCollectionItems>()?;
201224

202-
trace!("Collected {} items", body.redownload_urls.clone().len());
203-
collection.extend(body.redownload_urls);
225+
let items = body.items.iter().by_ref().collect::<Vec<_>>();
226+
let redownload_urls = Self::filter_download_map(Some(body.redownload_urls), &items, album, artist);
227+
trace!("Collected {} items", redownload_urls.len());
228+
229+
230+
collection.extend(redownload_urls);
204231
more_available = body.more_available;
205232
last_token = body.last_token;
206233
}

src/api/structs/digital_item.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
use crate::util::make_string_fs_safe;
22

3-
use chrono::{DateTime, Datelike, NaiveDateTime};
3+
use chrono::{Datelike, NaiveDateTime};
44
use serde::{self, Deserialize};
55
use std::{collections::HashMap, path::Path};
66

src/api/structs/mod.rs

Lines changed: 15 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,20 @@ pub struct ParsedFanpageData {
1717
pub collection_data: CollectionData,
1818
/// Data about items in the user's music collection that have been hidden.
1919
pub hidden_data: CollectionData,
20-
// pub item_cache: ItemCache,
20+
pub item_cache: ItemCache,
21+
}
22+
23+
#[derive(Deserialize, Debug)]
24+
pub struct ItemCache {
25+
pub collection: HashMap<String, Item>,
26+
}
27+
28+
#[derive(Deserialize, Debug)]
29+
pub struct Item {
30+
pub sale_item_id: u64,
31+
pub sale_item_type: String,
32+
pub band_name: String,
33+
pub item_title: String,
2134
}
2235

2336
#[derive(Deserialize, Debug)]
@@ -35,26 +48,13 @@ pub struct CollectionData {
3548
pub redownload_urls: Option<DownloadsMap>,
3649
}
3750

38-
// #[derive(Deserialize, Debug)]
39-
// pub struct ItemCache {
40-
// pub collection: HashMap<String, CachedItem>,
41-
// pub hidden: HashMap<String, CachedItem>,
42-
// }
43-
44-
// #[derive(Deserialize, Debug)]
45-
// pub struct CachedItem {
46-
// #[serde(deserialize_with = "deserialize_string_from_number")]
47-
// pub sale_item_id: String,
48-
// pub band_name: String,
49-
// pub item_title: String,
50-
// }
51-
5251
/// Structure of the data returned from Bandcamp's collection API.
5352
#[derive(Deserialize, Debug)]
5453
pub struct ParsedCollectionItems {
5554
pub more_available: bool,
5655
pub last_token: String,
5756
pub redownload_urls: DownloadsMap,
57+
pub items: Vec<Item>,
5858
}
5959

6060
#[derive(Deserialize, Debug)]

src/cmds/run.rs

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,12 @@ macro_rules! skip_err {
3434

3535
#[derive(Debug, ClapArgs)]
3636
pub struct Args {
37+
#[arg(long, env = "BS_ALBUM")]
38+
album: Option<String>,
39+
40+
#[arg(long, env = "BS_ARTIST")]
41+
artist: Option<String>,
42+
3743
/// The audio format to download the files in.
3844
#[arg(short = 'f', long = "format", value_parser = PossibleValuesParser::new(FORMATS), env = "BS_FORMAT")]
3945
audio_format: String,
@@ -78,6 +84,8 @@ pub struct Args {
7884

7985
pub fn command(
8086
Args {
87+
album,
88+
artist,
8189
audio_format,
8290
cookies,
8391
debug,
@@ -117,7 +125,7 @@ pub fn command(
117125
root.join("bandcamp-collection-downloader.cache"),
118126
)));
119127

120-
let download_urls = api.get_download_urls(&user)?.download_urls;
128+
let download_urls = api.get_download_urls(&user, artist.as_ref(), album.as_ref())?.download_urls;
121129
let items = {
122130
// Lock gets freed after this block.
123131
let cache_content = cache.lock().unwrap().content()?;
@@ -139,8 +147,6 @@ pub fn command(
139147
let m = Arc::new(MultiProgress::new());
140148
let dry_run_results = Arc::new(Mutex::new(Vec::<String>::new()));
141149

142-
// TODO: dry_run
143-
144150
thread::scope(|scope| {
145151
for i in 0..jobs {
146152
let api = api.clone();

0 commit comments

Comments
 (0)