MQKD76RYJOC3SJ4EPKQRQFV7A2BRJAHAI4VMMLR4EKV4B3EV5YTQC
O36J3PG32M4QSUVVZGMR6744ZBY6SSUX4653TDTRNN4JV36RVJNQC
L3I4GC7R74HPQS3VCQ44UTBAKKBOW6UDVXV2EF7XSWH7H2Z3RRJQC
FBQOBNZ6JJQXSHYQK7MCFA4U7NBNB47FXED7Y7HPRTOQVXJFIAGAC
XIHPYOWDLQY2MVMVUQPH23O3TBALRG4G2CHSLWSCAYMY5NVJ32WQC
IFBRAMVLQ4Z6BAEMWDIXD2V5HSZK4DHRWYZNB32IBY7ZRTNZJVCQC
FRLZDOAN7A3N623TLAPO66JVWBLBI45AG6P3DMQMDKGEZ2OBPYAAC
UIMZBURR7KOWSREO4GDH5C2LZDUTEZBKQNYWBYSFGUTRYJ4GKSNQC
DP6ASS5FJGSZUG2B4SQOKZVHFAVZLEHFVEWBNFG3BHMC6FZAJNHQC
JYSIHNS67XTGAR4HN7ZHWFMGGYSK5IY6J6EHO4YUZOR7UMMWAORQC
NE63ERXN7OUYSQ4PGPIQIIKEYD7OAOWXXSGMTORD6RJNUZHRLVJAC
3YR56Y65UIAL3J7PUXWVJMOOHYZYDIX4V54OT2TJPZ25WQ6MXHCQC
2CKX4R6ONNXDXGRYZ5NZEBJZFX5Z6BYPGNJ7LMXUHHFB4MUFJRCAC
QG7TWI4VFXEVO3WTXGUPKUE5V5VKEJBWDGIE3IWM2G5L7RQRR2AAC
TE6ACJLE7GUBYSV7B77L5YFKUMTV2PYR7CQRKG6A3XZ6YZWBR7TAC
ADXMUSFXOKBPCHW4XS3T4KLWMPGQPYDMZW6YFFSHZPAQKEGIKCBQC
5JMYBRF3UYX4LFH7JK6S4BEDKRVKDFIL4YKTCWKMKP4TMNNGQFKQC
HSDBPX2AMUS4NRA52EHIYOR7H37ABNGJWBJKPQABFMFDU7EITSIAC
O3VZ5J3LIYD3KBZLZA6HOJI7MVOV5DEODDPCPOMIBTPCO3CZW4YQC
UCVMP2BZ7IIJTWG5ZA3ETJKEBFXTB7J5J25XE4T74RFTGSMZX4AAC
J64KBLKALQ3HQCY4HJU5H6WBXTATS7TKBYNNUUSNJE7JLWLYO66QC
ZD3G3BCXBEXELHH3KMXMDUQUJ4BXIF4ZSZKFIJJCRO7IDK4XPOWAC
THSENT35O3PIXQ343QPPE3DJGR4YVULN6YPS5ETW5PXSVGZZQIZAC
476KTQSS5NXVCTVLVZQRGSYD5OAFBYG75VTSWBN26Q45RSMRT5YQC
O53GR2OQHGRKAVJT2RVPRHYFB54W5LM4DQYT7EYVGKU7HDK5CJJQC
M4FCDZ745GHHL3OLH64EVYOEOEGGGVIBCVFGX5JUJDJRE5OLCXLQC
EZMX4SYFEBYNJVQETRVAYONU5MIMQYTTSA5DRMTQET5B7CL6CI6AC
EEJ6CBJRTXLPQP44I2RLWVLJBX565DXXAWU4JIWNA3MMNE7WB5LQC
FFAFJQ5QVMHTLULZTGVM5PX7XQEZQUWLPH2GAN5BGVAYZOZZYG5QC
6FJACP6KUOZ4HWK4PSS5PFPGDYXZSCAWSKIARWBDGCZTPJWXA62AC
SBPKWZNQF5BWAJ7SZHWVK5BG6DTVJNDYND6UG5PDZCWZ2W4W2HXQC
NUYITHNW2PMOOUBX2ZT3JHSK3MQUFP4W3GEZ5HFJFDOMT56R4QSAC
JDSHG6Q3GBJBEMIQRIDLYSCRUIH6PZD5P2DF6VGXAW5RRZAK73FQC
DSOQZYPHBDUFWKMMDXJZCHLPP7GRHW6PS7EGTF42M6OKZCEXSY3AC
Y6BVNXQ747WQKVB4JO3AHYOMBWNNJNPC6SG2UUCCZRIIXDMXJYDQC
JPN37V6Q35ZAW7A2DTGX2WJ3IJ66BAGHXHWXOGHQRHGFAOETFQ7AC
JJ4SMY257MAHSJSZH5PJZMLBH3GJX5VKH2ZZSBGWLL7FWP7OA7TQC
ZYS43ILR4OXI7S2AYNGYSTK3IU2UVELIWVCCWDS7RVZQDSNJMDHQC
H5MQEP35FJNDLARTAB7J5GEHRNBSL6RTHFUGURG5HIVGLCG4FGUAC
GG5VBVLAQAQFIPOQGYSCC2T5RXXQMDPKXZFLCYGTUNE5DLRQYFDQC
5GQNHICLSFAA7ZUFXUCNACCPAIIGK4DV2QPTONDNXLS4TJJTOFHAC
TV3GOKIHRVTPGEFYLMTNFQYZBNJRL46ZLOKSQACHVIGW4IWOMDFQC
QTMCROQ5GDS3HX2XNNHSCCNTI4ECVVJRI6IE2WMHNS3YWTY6QLKAC
USEXBPODUNF4Y7KLGC7SFQZAARXFMTAHK4OKO6HR7G3UPZY4C42QC
GE7XXDPC73SUY6I6F3X6I4QFQO2BCPHD4MZALYOWG7H7SRE5XAFQC
76TBVFPIFU3LSMXY5NAHZBH6HRJLSLK43PGOPL6QQ2YYVBJ64QAQC
ORRUAQNB2SPMP67NJKXTJRMWOXVNOONLWE7VXTU2EH726QV3SV2AC
5FEMSWRS6SMVKBJAV4IYJLEJ2CML6QNZM75UGQFIIMAR5FBAACXAC
MQKOX2CQ7AON24UJC7RORAC7Y2UROROVG7BBKLVWURPXKY75JV5AC
OX53NPUOF2CYET46DF6Y22DVQSKQKR2KW3TGYDAXILEBYVJO2EPQC
BSPWOOHZMN3RAOHGJ2A3XKUOUCFFAOXS7YR67E3AARPPPIA5YPDAC
ZSO6T3AM36JCKVGEZEIC6NA7WWESSW73TVM67W3BOWUPOSBHZEFQC
let file = File::open(&entry.path()).chain_err(|| UNABLE_TO_OPEN_FILE)?;
let channel = Channel::read_from(BufReader::new(file))
.chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_FILE)?;
let file = File::open(&entry.path())?;
let channel = Channel::read_from(BufReader::new(file))?;
for (num, ep) in episodes.iter().enumerate() {
writeln!(
&mut handle,
"({}) {}",
episodes.len() - num,
ep.title()
.chain_err(|| "unable to retrieve episode title")?
)
.ok();
}
episodes
.iter()
.filter(|ep| ep.title().is_some())
.enumerate()
.for_each(|(num, ep)| {
writeln!(
&mut handle,
"({}) {}",
episodes.len() - num,
ep.title().unwrap()
)
.ok();
});
}
}
Ok(())
}
pub fn download_rss(config: &Config, url: &str) -> Result<()> {
let channel = download_rss_feed(url)?;
let mut download_limit = config.auto_download_limit as usize;
if 0 < download_limit {
println!("Subscribe auto-download limit set to: {}", download_limit);
println!("Downloading episode(s)...");
let podcast = Podcast::from(channel);
let episodes = podcast.episodes();
if episodes.len() < download_limit {
download_limit = episodes.len()
episodes[..download_limit].par_iter().for_each(|ep| {
if let Err(err) = download(podcast.title(), ep) {
eprintln!("Error downloading {}: {}", podcast.title(), err);
}
});
let mut resp = reqwest::get(&sub.url).chain_err(|| UNABLE_TO_GET_HTTP_RESPONSE)?;
let mut content: Vec<u8> = Vec::new();
resp.read_to_end(&mut content)
.chain_err(|| UNABLE_TO_READ_RESPONSE_TO_END)?;
let podcast = Podcast::from(
Channel::read_from(BufReader::new(&content[..]))
.chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_RESPONSE)?,
);
let resp = reqwest::get(&sub.url)?;
let podcast = Podcast::from(Channel::read_from(BufReader::new(resp))?);
.for_each(|ep: &Episode| {
if let Err(err) = download(podcast.title(), ep) {
eprintln!("Error downloading {}: {}", podcast.title(), err);
}
});
.map(|ep| download::download(podcast.title(), ep))
.flat_map(|e| e.err())
.for_each(|err| eprintln!("Error: {}", err));
for podcast in state.subscriptions() {
writeln!(&mut handle, "{}", &podcast.title).ok();
}
Ok(())
}
pub fn download_range(state: &State, p_search: &str, e_search: &str) -> Result<()> {
let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let podcast = Podcast::from_title(&subscription.title)
.chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
let episodes_to_download = parse_download_episodes(e_search)
.chain_err(|| "unable to parse episodes to download")?;
podcast
.download_specific(&episodes_to_download)
.chain_err(|| "unable to download episodes")?;
}
for subscription in state.subscriptions() {
writeln!(&mut handle, "{}", subscription.title())?;
if let Ok(ep_num) = e_search.parse::<usize>() {
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let podcast = Podcast::from_title(&subscription.title)
.chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
let episodes = podcast.episodes();
download(podcast.title(), &episodes[episodes.len() - ep_num])
.chain_err(|| "unable to download episode")?;
}
}
} else {
{
let stdout = io::stdout();
let mut handle = stdout.lock();
writeln!(
&mut handle,
"Failed to parse episode number...\nAttempting to find episode by name..."
)
.ok();
}
download_episode_by_name(state, p_search, e_search, false)
.chain_err(|| "Failed to download episode.")?;
}
Ok(())
}
pub fn download(podcast_name: &str, episode: &Episode) -> Result<()> {
let stdout = io::stdout();
let mut path = get_podcast_dir()?;
path.push(podcast_name);
create_dir_if_not_exist(&path)?;
if let Some(url) = episode.url() {
if let Some(title) = episode.title() {
let mut filename = title;
filename.push_str(
episode
.extension()
.chain_err(|| "unable to retrieve extension")?,
);
path.push(filename);
if !path.exists() {
{
let mut handle = stdout.lock();
writeln!(&mut handle, "Downloading: {:?}", &path).ok();
}
let mut file = File::create(&path).chain_err(|| UNABLE_TO_CREATE_FILE)?;
let mut resp = reqwest::get(url).chain_err(|| UNABLE_TO_GET_HTTP_RESPONSE)?;
let mut content: Vec<u8> = Vec::new();
resp.read_to_end(&mut content)
.chain_err(|| UNABLE_TO_READ_RESPONSE_TO_END)?;
file.write_all(&content)
.chain_err(|| UNABLE_TO_WRITE_FILE)?;
} else {
let mut handle = stdout.lock();
writeln!(&mut handle, "File already exists: {:?}", &path).ok();
}
}
}
Ok(())
}
pub fn download_episode_by_name(
state: &State,
p_search: &str,
e_search: &str,
download_all: bool,
) -> Result<()> {
let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let podcast = Podcast::from_title(&subscription.title)
.chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
let episodes = podcast.episodes();
if download_all {
episodes
.iter()
.filter(|ep| ep.title().is_some())
.filter(|ep| ep.title().unwrap().contains(e_search))
.for_each(|ep| {
download(podcast.title(), ep).unwrap_or_else(|_| {
eprintln!("Error downloading episode: {}", podcast.title())
});
})
} else {
let filtered_episodes: Vec<&Episode> = episodes
.iter()
.filter(|ep| ep.title().is_some())
.filter(|ep| {
ep.title()
.unwrap()
.to_lowercase()
.contains(&e_search.to_lowercase())
})
.collect();
if let Some(ep) = filtered_episodes.first() {
download(podcast.title(), ep).chain_err(|| "unable to download episode")?;
}
}
}
}
Ok(())
}
pub fn download_all(state: &State, p_search: &str) -> Result<()> {
let re_pod = Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let podcast = Podcast::from_title(&subscription.title)
.chain_err(|| UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE)?;
podcast
.download()
.chain_err(|| "unable to download podcast")?;
}
}
Ok(())
}
pub fn play_latest(state: &State, p_search: &str) -> Result<()> {
let re_pod: Regex =
Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
let mut path: PathBuf = get_xml_dir()?;
DirBuilder::new()
.recursive(true)
.create(&path)
.chain_err(|| UNABLE_TO_CREATE_DIRECTORY)?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let mut filename: String = subscription.title.clone();
filename.push_str(".xml");
path.push(filename);
let mut file: File = File::open(&path).chain_err(|| UNABLE_TO_OPEN_FILE)?;
let mut content: Vec<u8> = Vec::new();
file.read_to_end(&mut content)
.chain_err(|| "unable to read file to end")?;
let podcast: Podcast = Podcast::from(
Channel::read_from(content.as_slice())
.chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_FILE)?,
);
let episodes = podcast.episodes();
let episode = episodes[0].clone();
filename = episode
.title()
.chain_err(|| "unable to retrieve episode name")?;
filename.push_str(
episode
.extension()
.chain_err(|| "unable to retrieve episode extension")?,
);
path = get_podcast_dir()?;
path.push(podcast.title());
path.push(filename);
if path.exists() {
launch_player(
path.to_str()
.chain_err(|| "unable to convert path to &str")?,
)?;
} else {
launch_player(
episode
.url()
.chain_err(|| "unable to retrieve episode url")?,
)?;
}
return Ok(());
}
}
Ok(())
}
pub fn play_episode_by_num(state: &State, p_search: &str, ep_num_string: &str) -> Result<()> {
let re_pod: Regex =
Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
if let Ok(ep_num) = ep_num_string.parse::<usize>() {
let mut path: PathBuf = get_xml_dir()?;
if let Err(err) = DirBuilder::new().recursive(true).create(&path) {
eprintln!(
"Couldn't create directory: {}\nReason: {}",
path.to_str().unwrap(),
err
);
return Ok(());
}
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let mut filename: String = subscription.title.clone();
filename.push_str(".xml");
path.push(filename);
let mut file: File = File::open(&path).unwrap();
let mut content: Vec<u8> = Vec::new();
file.read_to_end(&mut content).unwrap();
let podcast = Podcast::from(Channel::read_from(content.as_slice()).unwrap());
let episodes = podcast.episodes();
let episode = episodes[episodes.len() - ep_num].clone();
filename = episode.title().unwrap();
filename.push_str(episode.extension().unwrap());
path = get_podcast_dir()?;
path.push(podcast.title());
path.push(filename);
if path.exists() {
launch_player(path.to_str().chain_err(|| UNABLE_TO_CONVERT_TO_STR)?)?;
} else {
launch_player(
episode
.url()
.chain_err(|| "unable to retrieve episode url")?,
)?;
}
return Ok(());
}
}
} else {
{
let stdout = io::stdout();
let mut handle = stdout.lock();
writeln!(&mut handle, "Failed to parse episode index number...").ok();
writeln!(&mut handle, "Attempting to find episode by name...").ok();
}
play_episode_by_name(state, p_search, ep_num_string)
.chain_err(|| "Failed to play episode by name.")?;
}
Ok(())
}
pub fn play_episode_by_name(state: &State, p_search: &str, ep_string: &str) -> Result<()> {
let re_pod: Regex =
Regex::new(&format!("(?i){}", &p_search)).chain_err(|| UNABLE_TO_PARSE_REGEX)?;
let mut path: PathBuf = get_xml_dir()?;
if let Err(err) = DirBuilder::new().recursive(true).create(&path) {
eprintln!(
"Couldn't create directory: {}\nReason: {}",
path.to_str().unwrap(),
err
);
return Ok(());
}
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let mut filename: String = subscription.title.clone();
filename.push_str(".xml");
path.push(filename);
let mut file: File = File::open(&path).unwrap();
let mut content: Vec<u8> = Vec::new();
file.read_to_end(&mut content).unwrap();
let podcast = Podcast::from(Channel::read_from(content.as_slice()).unwrap());
let episodes = podcast.episodes();
let filtered_episodes: Vec<&Episode> = episodes
.iter()
.filter(|ep| {
ep.title()
.unwrap_or_else(|| "".to_string())
.to_lowercase()
.contains(&ep_string.to_lowercase())
})
.collect();
if let Some(episode) = filtered_episodes.first() {
filename = episode.title().unwrap();
filename.push_str(episode.extension().unwrap());
path = get_podcast_dir()?;
path.push(podcast.title());
path.push(filename);
if path.exists() {
launch_player(path.to_str().chain_err(|| UNABLE_TO_CONVERT_TO_STR)?)?;
} else {
launch_player(
episode
.url()
.chain_err(|| "unable to retrieve episode url")?,
)?;
}
}
return Ok(());
}
}
Ok(())
}
reqwest::get("https://raw.githubusercontent.com/njaremko/podcast/master/Cargo.toml")
.chain_err(|| UNABLE_TO_GET_HTTP_RESPONSE)?
.text()
.chain_err(|| "unable to convert response to text")?;
reqwest::get("https://raw.githubusercontent.com/njaremko/podcast/master/Cargo.toml")?
.text()?;
let config = resp
.parse::<toml::Value>()
.chain_err(|| "unable to parse toml")?;
let latest = config["package"]["version"]
.as_str()
.chain_err(|| UNABLE_TO_CONVERT_TO_STR)?;
let config = resp.parse::<toml::Value>()?;
let latest = config["package"]["version"].as_str().expect(&format!("Cargo.toml didn't have a version {:?}", config));
fn launch_player(url: &str) -> Result<()> {
if launch_mpv(url).is_err() {
return launch_vlc(url);
}
Ok(())
}
fn launch_mpv(url: &str) -> Result<()> {
if let Err(err) = Command::new("mpv")
.args(&["--audio-display=no", "--ytdl=no", url])
.status()
{
match err.kind() {
io::ErrorKind::NotFound => {
eprintln!("Couldn't open mpv\nTrying vlc...");
}
_ => eprintln!("Error: {}", err),
}
}
Ok(())
}
fn launch_vlc(url: &str) -> Result<()> {
if let Err(err) = Command::new("vlc").args(&["-I ncurses", url]).status() {
match err.kind() {
io::ErrorKind::NotFound => {
eprintln!("Couldn't open vlc...aborting");
}
_ => eprintln!("Error: {}", err),
}
}
Ok(())
}
"zsh" => app.gen_completions_to("podcast", Shell::Zsh, &mut io::stdout()),
"bash" => app.gen_completions_to("podcast", Shell::Bash, &mut io::stdout()),
"powershell" => app.gen_completions_to("podcast", Shell::PowerShell, &mut io::stdout()),
"fish" => app.gen_completions_to("podcast", Shell::Fish, &mut io::stdout()),
"elvish" => app.gen_completions_to("podcast", Shell::Elvish, &mut io::stdout()),
other => eprintln!("Completions are not available for {}", other),
"zsh" => {
app.gen_completions_to("podcast", Shell::Zsh, &mut io::stdout());
}
"bash" => {
app.gen_completions_to("podcast", Shell::Bash, &mut io::stdout());
}
"powershell" => {
app.gen_completions_to("podcast", Shell::PowerShell, &mut io::stdout());
}
"fish" => {
app.gen_completions_to("podcast", Shell::Fish, &mut io::stdout());
}
"elvish" => {
app.gen_completions_to("podcast", Shell::Elvish, &mut io::stdout());
}
other => {
println!("Completions are not available for {}", other);
}
pub fn handle_matches(
version: &str,
state: &mut State,
config: &Config,
app: &mut App,
matches: &ArgMatches,
) -> Result<()> {
match matches.subcommand_name() {
Some("download") => {
let download_matches = matches
.subcommand_matches("download")
.chain_err(|| "unable to find subcommand matches")?;
let podcast = download_matches
.value_of("PODCAST")
.chain_err(|| "unable to find subcommand match")?;
match download_matches.value_of("EPISODE") {
Some(ep) => {
if String::from(ep).contains(|c| c == '-' || c == ',') {
download_range(&state, podcast, ep)?
} else if download_matches.occurrences_of("name") > 0 {
download_episode_by_name(
&state,
podcast,
ep,
download_matches.occurrences_of("all") > 0,
)?
} else {
download_episode_by_num(&state, podcast, ep)?
}
}
None => download_all(&state, podcast)?,
}
}
Some("ls") | Some("list") => {
let list_matches = matches
.subcommand_matches("ls")
.or_else(|| matches.subcommand_matches("list"))
.chain_err(|| "unable to find subcommand matches")?;
match list_matches.value_of("PODCAST") {
Some(regex) => list_episodes(regex)?,
None => list_subscriptions(&state)?,
pub fn download(state: &mut State, matches: &ArgMatches) -> Result<()> {
let download_matches = matches.subcommand_matches("download").unwrap();
let podcast = download_matches.value_of("PODCAST").unwrap();
match download_matches.value_of("EPISODE") {
Some(ep) => {
if String::from(ep).contains(|c| c == '-' || c == ',') {
download::download_range(&state, podcast, ep)?
} else if download_matches.occurrences_of("name") > 0 {
download::download_episode_by_name(
&state,
podcast,
ep,
download_matches.occurrences_of("all") > 0,
)?
} else {
download::download_episode_by_num(&state, podcast, ep)?
Some("play") => {
let play_matches = matches
.subcommand_matches("play")
.chain_err(|| "unable to find subcommand matches")?;
let podcast = play_matches
.value_of("PODCAST")
.chain_err(|| "unable to find subcommand match")?;
match play_matches.value_of("EPISODE") {
Some(episode) => {
if play_matches.occurrences_of("name") > 0 {
play_episode_by_name(&state, podcast, episode)?
} else {
play_episode_by_num(&state, podcast, episode)?
}
}
None => play_latest(&state, podcast)?,
None => download::download_all(&state, podcast)?,
}
Ok(())
}
pub fn list(state: &mut State, matches: &ArgMatches) -> Result<()> {
let list_matches = matches
.subcommand_matches("ls")
.or_else(|| matches.subcommand_matches("list"))
.unwrap();
match list_matches.value_of("PODCAST") {
Some(regex) => list_episodes(regex)?,
None => list_subscriptions(&state)?,
}
Ok(())
}
pub fn play(state: &mut State, matches: &ArgMatches) -> Result<()> {
let play_matches = matches.subcommand_matches("play").unwrap();
let podcast = play_matches.value_of("PODCAST").unwrap();
match play_matches.value_of("EPISODE") {
Some(episode) => {
if play_matches.occurrences_of("name") > 0 {
playback::play_episode_by_name(&state, podcast, episode)?
} else {
playback::play_episode_by_num(&state, podcast, episode)?
Some("sub") | Some("subscribe") => {
let subscribe_matches = matches
.subcommand_matches("sub")
.or_else(|| matches.subcommand_matches("subscribe"))
.chain_err(|| "unable to find subcommand matches")?;
let url = subscribe_matches
.value_of("URL")
.chain_err(|| "unable to find subcommand match")?;
state.subscribe(url).chain_err(|| "unable to subscribe")?;
download_rss(&config, url)?;
}
Some("search") => println!("This feature is coming soon..."),
Some("rm") => {
let rm_matches = matches
.subcommand_matches("rm")
.chain_err(|| "unable to find subcommand matches")?;
let regex = rm_matches.value_of("PODCAST").chain_err(|| "")?;
remove_podcast(state, regex)?
}
Some("completion") => {
let matches = matches
.subcommand_matches("completion")
.chain_err(|| "unable to find subcommand matches")?;
match matches.value_of("SHELL") {
Some(shell) => print_completion(app, shell),
None => {
let shell_path_env = env::var("SHELL");
if let Ok(p) = shell_path_env {
let shell_path = Path::new(&p);
if let Some(shell) = shell_path.file_name() {
print_completion(app, shell.to_str().chain_err(|| format!("Unable to convert {:?} to string", shell))?)
}
}
None => playback::play_latest(&state, podcast)?,
}
Ok(())
}
pub fn subscribe(state: &mut State, config: &Config, matches: &ArgMatches) -> Result<()> {
let subscribe_matches = matches
.subcommand_matches("sub")
.or_else(|| matches.subcommand_matches("subscribe"))
.unwrap();
let url = subscribe_matches.value_of("URL").unwrap();
state.subscribe(url)?;
download::download_rss(&config, url)?;
Ok(())
}
pub fn remove(state: &mut State, matches: &ArgMatches) -> Result<()> {
let rm_matches = matches.subcommand_matches("rm").unwrap();
let regex = rm_matches.value_of("PODCAST").unwrap();
remove_podcast(state, regex)?;
Ok(())
}
pub fn complete(app: &mut App, matches: &ArgMatches) -> Result<()> {
let matches = matches.subcommand_matches("completion").unwrap();
match matches.value_of("SHELL") {
Some(shell) => print_completion(app, shell),
None => {
let shell_path_env = env::var("SHELL");
if let Ok(p) = shell_path_env {
let shell_path = Path::new(&p);
if let Some(shell) = shell_path.file_name() {
print_completion(app, shell.to_str().unwrap())
use clap::{App, ArgMatches};
use crate::actions::*;
use crate::arg_parser;
use crate::commands;
use crate::errors::*;
use crate::structs::*;
pub fn parse_sub_command(matches: &ArgMatches) -> commands::Command {
match matches.subcommand_name() {
Some("download") => commands::Command::Download,
Some("ls") | Some("list") => commands::Command::List,
Some("play") => commands::Command::Play,
Some("sub") | Some("subscribe") => commands::Command::Subscribe,
Some("search") => commands::Command::Search,
Some("rm") => commands::Command::Remove,
Some("completion") => commands::Command::Complete,
Some("refresh") => commands::Command::Refresh,
Some("update") => commands::Command::Update,
_ => commands::Command::NoMatch,
}
}
pub fn handle_matches(
version: &str,
state: &mut State,
config: &Config,
app: &mut App,
matches: &ArgMatches,
) -> Result<()> {
let command = parse_sub_command(matches);
match command {
commands::Command::Download => {
arg_parser::download(state, matches)?;
}
commands::Command::List => {
arg_parser::list(state, matches)?;
}
commands::Command::Play => {
arg_parser::play(state, matches)?;
}
commands::Command::Subscribe => {
arg_parser::subscribe(state, config, matches)?;
}
commands::Command::Search => {
println!("This feature is coming soon...");
}
commands::Command::Remove => {
arg_parser::remove(state, matches)?;
}
commands::Command::Complete => {
arg_parser::complete(app, matches)?;
}
commands::Command::Refresh => {
update_rss(state);
}
commands::Command::Update => {
check_for_update(version)?;
}
_ => (),
};
Ok(())
}
pub enum Command {
Download,
List,
Play,
Subscribe,
Search,
Remove,
Complete,
Refresh,
Update,
NoMatch,
}
use crate::structs::*;
use crate::utils::*;
use std::collections::HashSet;
use std::fs::File;
use std::io::{self, BufReader, BufWriter, Write};
use failure::Error;
use rayon::prelude::*;
use regex::Regex;
use reqwest;
pub fn download_range(state: &State, p_search: &str, e_search: &str) -> Result<(), Error> {
let re_pod = Regex::new(&format!("(?i){}", &p_search))?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let podcast = Podcast::from_title(&subscription.title)?;
let downloaded = already_downloaded(podcast.title())?;
let episodes = podcast.episodes();
let episodes_to_download = parse_download_episodes(e_search)?;
episodes_to_download
.par_iter()
.map(|ep_num| &episodes[episodes.len() - ep_num])
.filter(|e| e.title().is_some())
.filter(|e| !downloaded.contains(&e.title().unwrap()))
.map(|ep| download(podcast.title(), ep))
.flat_map(|e| e.err())
.for_each(|err| eprintln!("Error: {}", err));
}
}
Ok(())
}
pub fn download_episode_by_num(state: &State, p_search: &str, e_search: &str) -> Result<(), Error> {
let re_pod = Regex::new(&format!("(?i){}", &p_search))?;
if let Ok(ep_num) = e_search.parse::<usize>() {
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let podcast = Podcast::from_title(&subscription.title)?;
let episodes = podcast.episodes();
download(podcast.title(), &episodes[episodes.len() - ep_num])?;
}
}
} else {
eprintln!("Failed to parse episode number...\nAttempting to find episode by name...");
download_episode_by_name(state, p_search, e_search, false)?;
}
Ok(())
}
pub fn download(podcast_name: &str, episode: &Episode) -> Result<(), Error> {
let mut path = get_podcast_dir()?;
path.push(podcast_name);
create_dir_if_not_exist(&path)?;
if let (Some(title), Some(url)) = (episode.title(), episode.url()) {
path.push(title);
episode.extension().map(|ext| path.set_extension(ext));
if !path.exists() {
println!("Downloading: {:?}", &path);
let resp = reqwest::get(url)?;
let file = File::create(&path)?;
let mut reader = BufReader::new(resp);
let mut writer = BufWriter::new(file);
io::copy(&mut reader, &mut writer)?;
} else {
eprintln!("File already exists: {:?}", &path);
}
}
Ok(())
}
pub fn download_episode_by_name(
state: &State,
p_search: &str,
e_search: &str,
download_all: bool,
) -> Result<(), Error> {
let re_pod = Regex::new(&format!("(?i){}", &p_search))?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let podcast = Podcast::from_title(&subscription.title)?;
let episodes = podcast.episodes();
let filtered_episodes =
episodes
.iter()
.filter(|ep| ep.title().is_some())
.filter(|ep| {
ep.title()
.unwrap()
.to_lowercase()
.contains(&e_search.to_lowercase())
});
if download_all {
filtered_episodes
.map(|ep| download(podcast.title(), ep))
.flat_map(|e| e.err())
.for_each(|err| eprintln!("Error: {}", err));
} else {
filtered_episodes
.take(1)
.map(|ep| download(podcast.title(), ep))
.flat_map(|e| e.err())
.for_each(|err| eprintln!("Error: {}", err));
}
}
}
Ok(())
}
pub fn download_all(state: &State, p_search: &str) -> Result<(), Error> {
let re_pod = Regex::new(&format!("(?i){}", &p_search))?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let podcast = Podcast::from_title(&subscription.title)?;
print!(
"You are about to download all episodes of {} (y/n): ",
podcast.title()
);
io::stdout().flush().ok();
let mut input = String::new();
io::stdin().read_line(&mut input)?;
if input.to_lowercase().trim() != "y" {
return Ok(());
}
let mut path = get_podcast_dir()?;
path.push(podcast.title());
already_downloaded(podcast.title()).map(|downloaded| {
podcast
.episodes()
.par_iter()
.filter(|e| e.title().is_some())
.filter(|e| !downloaded.contains(&e.title().unwrap()))
.map(|e| download(podcast.title(), e))
.flat_map(|e| e.err())
.for_each(|err| eprintln!("Error: {}", err))
})?;
}
}
Ok(())
}
pub fn download_rss(config: &Config, url: &str) -> Result<(), Error> {
let channel = download_rss_feed(url)?;
let mut download_limit = config.auto_download_limit as usize;
if 0 < download_limit {
println!(
"Subscribe auto-download limit set to: {}\nDownloading episode(s)...",
download_limit
);
let podcast = Podcast::from(channel);
let episodes = podcast.episodes();
if episodes.len() < download_limit {
download_limit = episodes.len()
}
episodes[..download_limit]
.par_iter()
.map(|ep| download(podcast.title(), ep))
.flat_map(|e| e.err())
.for_each(|err| eprintln!("Error downloading {}: {}", podcast.title(), err));
}
Ok(())
}
fn parse_download_episodes(e_search: &str) -> Result<HashSet<usize>, Error> {
let input = String::from(e_search);
let mut ranges = Vec::<(usize, usize)>::new();
let mut elements = HashSet::<usize>::new();
let comma_separated: Vec<&str> = input.split(',').collect();
for elem in comma_separated {
if elem.contains('-') {
let range: Vec<usize> = elem
.split('-')
.map(|i| i.parse::<usize>())
.collect::<Result<Vec<usize>, std::num::ParseIntError>>()?;
ranges.push((range[0], range[1]));
} else {
elements.insert(elem.parse::<usize>()?);
}
}
for range in ranges {
// Include given episode in the download
for num in range.0..=range.1 {
elements.insert(num);
}
}
Ok(elements)
}
pub mod actions;
pub mod match_handler;
pub mod migration_handler;
pub mod parser;
pub mod structs;
pub mod utils;
pub mod errors {
error_chain! {}
mod actions;
mod arg_parser;
mod command_handler;
mod commands;
mod download;
mod migration_handler;
mod parser;
mod playback;
mod structs;
mod utils;
mod errors {
use failure::Error;
use std::result;
pub type Result<T> = result::Result<T, Error>;
utils::create_directories().chain_err(|| "unable to create directories")?;
migration_handler::migrate_old_subscriptions()?;
let mut state = State::new(VERSION).chain_err(|| "unable to load state")?;
utils::create_directories()?;
migration_handler::migrate()?;
let mut state = State::new(VERSION)?;
match_handler::handle_matches(&VERSION, &mut state, &config, &mut app, &matches)?;
state.save().chain_err(|| "unable to save state")
command_handler::handle_matches(&VERSION, &mut state, &config, &mut app, &matches)?;
state.save()
pub fn migrate() -> Result<()> {
migrate_old_subscriptions()
}
use crate::errors::*;
use crate::structs::*;
use crate::utils::*;
use std::fs::{DirBuilder, File};
use std::io::{self, BufReader, Read, Write};
use std::process::Command;
use regex::Regex;
use rss::Channel;
use std::path::PathBuf;
fn launch_player(url: &str) -> Result<()> {
if launch_mpv(url).is_err() {
return launch_vlc(url);
}
Ok(())
}
fn launch_mpv(url: &str) -> Result<()> {
if let Err(err) = Command::new("mpv")
.args(&["--audio-display=no", "--ytdl=no", url])
.status()
{
let stderr = io::stderr();
let mut handle = stderr.lock();
match err.kind() {
io::ErrorKind::NotFound => {
writeln!(&mut handle, "Couldn't open mpv\nTrying vlc...").ok()
}
_ => writeln!(&mut handle, "Error: {}", err).ok(),
};
}
Ok(())
}
fn launch_vlc(url: &str) -> Result<()> {
if let Err(err) = Command::new("vlc").args(&["-I ncurses", url]).status() {
let stderr = io::stderr();
let mut handle = stderr.lock();
match err.kind() {
io::ErrorKind::NotFound => writeln!(&mut handle, "Couldn't open vlc...aborting").ok(),
_ => writeln!(&mut handle, "Error: {}", err).ok(),
};
}
Ok(())
}
pub fn play_latest(state: &State, p_search: &str) -> Result<()> {
let re_pod: Regex = Regex::new(&format!("(?i){}", &p_search))?;
let mut path: PathBuf = get_xml_dir()?;
DirBuilder::new().recursive(true).create(&path)?;
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let mut filename: String = subscription.title.clone();
filename.push_str(".xml");
path.push(filename);
let file: File = File::open(&path)?;
let podcast: Podcast = Podcast::from(Channel::read_from(BufReader::new(file))?);
let episodes = podcast.episodes();
let episode = episodes[0].clone();
filename = episode.title().unwrap();
filename.push_str(&episode.extension().unwrap());
path = get_podcast_dir()?;
path.push(podcast.title());
path.push(filename);
if path.exists() {
launch_player(path.to_str().unwrap())?;
} else {
launch_player(episode.url().unwrap())?;
}
return Ok(());
}
}
Ok(())
}
pub fn play_episode_by_num(state: &State, p_search: &str, ep_num_string: &str) -> Result<()> {
let re_pod: Regex = Regex::new(&format!("(?i){}", &p_search))?;
if let Ok(ep_num) = ep_num_string.parse::<usize>() {
let mut path: PathBuf = get_xml_dir()?;
let stderr = io::stderr();
let mut handle = stderr.lock();
if let Err(err) = DirBuilder::new().recursive(true).create(&path) {
writeln!(
&mut handle,
"Couldn't create directory: {}\nReason: {}",
path.to_str().unwrap(),
err
)
.ok();
return Ok(());
}
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let mut filename: String = subscription.title.clone();
filename.push_str(".xml");
path.push(filename);
let file: File = File::open(&path).unwrap();
let podcast = Podcast::from(Channel::read_from(BufReader::new(file)).unwrap());
let episodes = podcast.episodes();
let episode = episodes[episodes.len() - ep_num].clone();
filename = episode.title().unwrap();
filename.push_str(&episode.extension().unwrap());
path = get_podcast_dir()?;
path.push(podcast.title());
path.push(filename);
if path.exists() {
launch_player(path.to_str().unwrap())?;
} else {
launch_player(episode.url().unwrap())?;
}
return Ok(());
}
}
} else {
{
let stdout = io::stdout();
let mut handle = stdout.lock();
writeln!(&mut handle, "Failed to parse episode index number...").ok();
writeln!(&mut handle, "Attempting to find episode by name...").ok();
}
play_episode_by_name(state, p_search, ep_num_string)?;
}
Ok(())
}
pub fn play_episode_by_name(state: &State, p_search: &str, ep_string: &str) -> Result<()> {
let re_pod: Regex = Regex::new(&format!("(?i){}", &p_search))?;
let mut path: PathBuf = get_xml_dir()?;
if let Err(err) = DirBuilder::new().recursive(true).create(&path) {
let stderr = io::stderr();
let mut handle = stderr.lock();
writeln!(
&mut handle,
"Couldn't create directory: {:?}\nReason: {}",
path, err
)
.ok();
return Ok(());
}
for subscription in &state.subscriptions {
if re_pod.is_match(&subscription.title) {
let mut filename: String = subscription.title.clone();
filename.push_str(".xml");
path.push(filename);
let mut file: File = File::open(&path).unwrap();
let mut content: Vec<u8> = Vec::new();
file.read_to_end(&mut content).unwrap();
let podcast = Podcast::from(Channel::read_from(content.as_slice()).unwrap());
let episodes = podcast.episodes();
let filtered_episodes: Vec<&Episode> = episodes
.iter()
.filter(|ep| {
ep.title()
.unwrap_or_else(|| "".to_string())
.to_lowercase()
.contains(&ep_string.to_lowercase())
})
.collect();
if let Some(episode) = filtered_episodes.first() {
filename = episode.title().unwrap();
filename.push_str(&episode.extension().unwrap());
path = get_podcast_dir()?;
path.push(podcast.title());
path.push(filename);
if path.exists() {
launch_player(path.to_str().unwrap())?;
} else {
launch_player(episode.url().unwrap())?;
}
}
return Ok(());
}
}
Ok(())
}
#[derive(Debug, PartialEq, Serialize, Deserialize)]
fn create_new_config_file(path: &PathBuf) -> Result<Config> {
writeln!(
io::stdout().lock(),
"Creating new config file at {:?}",
&path
)
.ok();
let download_limit = 1;
let file = File::create(&path)?;
let config = Config {
auto_download_limit: download_limit,
};
serde_yaml::to_writer(file, &config)?;
Ok(config)
}
#[derive(Copy, Clone, Debug, PartialEq, Serialize, Deserialize)]
eprintln!("{}", err);
eprintln!("Failed to open config file, moving to {:?}", &new_path);
fs::rename(&path, new_path)
.chain_err(|| "Failed to move old config file...")?;
let stderr = io::stderr();
let mut handle = stderr.lock();
writeln!(
&mut handle,
"{}\nFailed to open config file, moving to {:?}",
err, &new_path
)
.ok();
fs::rename(&path, new_path)?;
fn create_new_config_file(path: &PathBuf) -> Result<Config> {
println!("Creating new config file at {:?}", &path);
let download_limit = 1;
let file = File::create(&path).chain_err(|| UNABLE_TO_CREATE_FILE)?;
let config = Config {
auto_download_limit: download_limit,
};
serde_yaml::to_writer(file, &config).chain_err(|| UNABLE_TO_WRITE_FILE)?;
Ok(config)
}
let file = File::open(&path).chain_err(|| UNABLE_TO_OPEN_FILE)?;
let mut state: State = match serde_json::from_reader(&file) {
Ok(val) => val,
// This will happen if the struct has changed between versions
Err(_) => {
let v: serde_json::Value = serde_json::from_reader(&file)
.chain_err(|| "unable to read json from string")?;
State {
version: String::from(version),
last_run_time: Utc::now(),
subscriptions: match serde_json::from_value(v["subscriptions"].clone()) {
Ok(val) => val,
Err(_) => serde_json::from_value(v["subs"].clone())
.chain_err(|| "unable to parse value from json")?,
},
}
}
};
let file = File::open(&path)?;
let mut state: State = serde_json::from_reader(BufReader::new(&file))?;
let serialized = serde_json::to_string(self).chain_err(|| "unable to serialize state")?;
{
let mut file = File::create(&path).chain_err(|| UNABLE_TO_CREATE_FILE)?;
file.write_all(serialized.as_bytes())
.chain_err(|| UNABLE_TO_WRITE_FILE)?;
}
let sub_file_path = get_sub_file()?;
fs::rename(&path, &sub_file_path)
.chain_err(|| format!("unable to rename file {:?} to {:?}", &path, &sub_file_path))?;
let file = File::create(&path)?;
serde_json::to_writer(BufWriter::new(file), self)?;
fs::rename(&path, get_sub_file()?)?;
let file = File::open(&path).chain_err(|| UNABLE_TO_OPEN_FILE)?;
Ok(Podcast::from(
Channel::read_from(BufReader::new(file))
.chain_err(|| UNABLE_TO_CREATE_CHANNEL_FROM_FILE)?,
))
let file = File::open(&path)?;
Ok(Podcast::from(Channel::read_from(BufReader::new(file))?))
pub fn download(&self) -> Result<()> {
print!(
"You are about to download all episodes of {} (y/n): ",
self.title()
);
io::stdout().flush().ok();
let mut input = String::new();
io::stdin()
.read_line(&mut input)
.chain_err(|| "unable to read stdin")?;
if input.to_lowercase().trim() != "y" {
return Ok(());
}
#[derive(Clone, Debug, PartialEq)]
pub struct Episode(Item);
let mut path = get_podcast_dir()?;
path.push(self.title());
match already_downloaded(self.title()) {
Ok(downloaded) => {
self.episodes().par_iter().for_each(|i| {
if let Some(ep_title) = i.title() {
if !downloaded.contains(&ep_title) {
if let Err(err) = download(self.title(), i) {
eprintln!("{}", err);
}
}
}
});
}
Err(_) => {
self.episodes().par_iter().for_each(|i| {
if let Err(err) = download(self.title(), i) {
eprintln!("{}", err);
}
});
}
}
Ok(())
impl From<Item> for Episode {
fn from(item: Item) -> Episode {
Episode(item)
pub fn download_specific(&self, episode_numbers: &[usize]) -> Result<()> {
let mut path = get_podcast_dir()?;
path.push(self.title());
let downloaded = already_downloaded(self.title())?;
let episodes = self.episodes();
episode_numbers.par_iter().for_each(|ep_num| {
if let Some(ep_title) = episodes[episodes.len() - ep_num].title() {
if !downloaded.contains(&ep_title) {
if let Err(err) = download(self.title(), &episodes[episodes.len() - ep_num]) {
eprintln!("{}", err);
}
}
}
});
Ok(())
}
"audio/mpeg" => Some(".mp3"),
"audio/mp4" => Some(".m4a"),
"audio/ogg" => Some(".ogg"),
"audio/mpeg" => Some("mp3".into()),
"audio/mp4" => Some("m4a".into()),
"audio/aac" => Some("m4a".into()),
"audio/ogg" => Some("ogg".into()),
"audio/vorbis" => Some("ogg".into()),
"audio/opus" => Some("opus".into()),
pub const UNABLE_TO_PARSE_REGEX: &str = "unable to parse regex";
pub const UNABLE_TO_OPEN_FILE: &str = "unable to open file";
pub const UNABLE_TO_CREATE_FILE: &str = "unable to create file";
pub const UNABLE_TO_READ_FILE: &str = "unable to read file";
pub const UNABLE_TO_WRITE_FILE: &str = "unable to write file";
pub const UNABLE_TO_READ_FILE_TO_STRING: &str = "unable to read file to string";
pub const UNABLE_TO_READ_DIRECTORY: &str = "unable to read directory";
pub const UNABLE_TO_READ_ENTRY: &str = "unable to read entry";
pub const UNABLE_TO_CREATE_DIRECTORY: &str = "unable to create directory";
pub const UNABLE_TO_READ_RESPONSE_TO_END: &str = "unable to read response to end";
pub const UNABLE_TO_GET_HTTP_RESPONSE: &str = "unable to get http response";
pub const UNABLE_TO_CONVERT_TO_STR: &str = "unable to convert to &str";
pub const UNABLE_TO_REMOVE_FILE: &str = "unable to remove file";
pub const UNABLE_TO_CREATE_CHANNEL_FROM_RESPONSE: &str =
"unable to create channel from http response";
pub const UNABLE_TO_CREATE_CHANNEL_FROM_FILE: &str = "unable to create channel from xml file";
pub const UNABLE_TO_RETRIEVE_PODCAST_BY_TITLE: &str = "unable to retrieve podcast by title";
pub fn find_extension(input: &str) -> Option<&str> {
let tmp = String::from(input);
if tmp.ends_with(".mp3") {
Some(".mp3")
} else if tmp.ends_with(".m4a") {
Some(".m4a")
} else if tmp.ends_with(".wav") {
Some(".wav")
} else if tmp.ends_with(".ogg") {
Some(".ogg")
} else if tmp.ends_with(".opus") {
Some(".opus")
} else {
None
pub fn find_extension(input: &str) -> Option<String> {
let s: Vec<String> = input.split(".").map(|s| s.to_string()).collect();
if s.len() > 1 {
return s.last().cloned();
resp.read_to_end(&mut content)
.chain_err(|| "unable to read http response to end")?;
let channel = Channel::read_from(BufReader::new(&content[..]))
.chain_err(|| "unable to create channel from xml http response")?;
resp.read_to_end(&mut content)?;
let channel = Channel::read_from(BufReader::new(&content[..]))?;
}
pub fn parse_download_episodes(e_search: &str) -> Result<Vec<usize>> {
let input = String::from(e_search);
let mut ranges = Vec::<(usize, usize)>::new();
let mut elements = Vec::<usize>::new();
let comma_separated: Vec<&str> = input.split(',').collect();
for elem in comma_separated {
let temp = String::from(elem);
if temp.contains('-') {
let range: Vec<usize> = elem
.split('-')
.map(|i| i.parse::<usize>().chain_err(|| "unable to parse number"))
.collect::<Result<Vec<usize>>>()
.chain_err(|| "unable to collect ranges")?;
ranges.push((range[0], range[1]));
} else {
elements.push(
elem.parse::<usize>()
.chain_err(|| "unable to parse number")?,
);
}
}
for range in ranges {
// Add 1 to upper range to include given episode in the download
for num in range.0..=range.1 {
elements.push(num);
}
}
elements.dedup();
Ok(elements)