Skip to content

Commit

Permalink
rustfmt
Browse files Browse the repository at this point in the history
  • Loading branch information
mcdallas committed Nov 3, 2022
1 parent 7ca87aa commit ce2de42
Show file tree
Hide file tree
Showing 6 changed files with 70 additions and 119 deletions.
7 changes: 4 additions & 3 deletions src/auth.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ pub struct Client<'a> {
/// Login password
password: &'a str,
/// Reqwest client
session: &'a reqwest::Client
session: &'a reqwest::Client,
}

#[derive(Serialize, Deserialize, Debug)]
Expand Down Expand Up @@ -44,7 +44,7 @@ impl<'a> Client<'a> {
client_secret: &secret,
username: &username,
password: &password,
session: &session
session: &session,
}
}

Expand All @@ -57,7 +57,8 @@ impl<'a> Client<'a> {
body.insert("password", self.password);
body.insert("grant_type", &grant_type);

let auth = self.session
let auth = self
.session
.post("https://www.reddit.com/api/v1/access_token")
// base64 encoded <clientID>:<clientSecret> should be sent as a basic token
// along with the body of the message
Expand Down
46 changes: 18 additions & 28 deletions src/download.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
use std::borrow::Borrow;
use std::fmt::format;
use std::fs::File;
use std::path::Path;
use std::process::Command;
Expand All @@ -15,7 +14,7 @@ use url::{Position, Url};

use crate::errors::GertError;
use crate::structures::{GfyData, PostData};
use crate::structures::{Summary, Post};
use crate::structures::{Post, Summary};
use crate::utils::{check_path_present, check_url_is_mp4};

static JPG_EXTENSION: &str = "jpg";
Expand Down Expand Up @@ -96,7 +95,6 @@ impl<'a> Downloader<'a> {
use_human_readable: bool,
ffmpeg_available: bool,
session: &'a reqwest::Client,

) -> Downloader<'a> {
Downloader {
posts,
Expand All @@ -123,10 +121,7 @@ impl<'a> Downloader<'a> {
}

/// Download and save medias from Reddit in parallel
async fn download_collection(
&self,
collection: &Vec<Post>,
) -> Result<Summary, GertError> {
async fn download_collection(&self, collection: &Vec<Post>) -> Result<Summary, GertError> {
let summary = Arc::new(Mutex::new(Summary {
media_supported: 0,
media_downloaded: 0,
Expand Down Expand Up @@ -323,10 +318,7 @@ impl<'a> Downloader<'a> {
// name irrespective of how many times it's run. If run more than once, the
// media is overwritten by this method
let hash = md5::compute(url);
format!(
"{}/{}/{:x}.{}",
self.data_directory, subreddit, hash, extension
)
format!("{}/{}/{:x}.{}", self.data_directory, subreddit, hash, extension)
} else {
let canonical_title: String = title
.to_lowercase()
Expand Down Expand Up @@ -362,8 +354,7 @@ impl<'a> Downloader<'a> {
};
}


/// Helper function that downloads and saves a single media from Reddit or Imgur
/// Helper function that downloads and saves a single media from Reddit or Imgur
async fn save_or_skip(&self, url: &str, file_name: &str) -> Result<MediaStatus, GertError> {
if check_path_present(&file_name) {
debug!("Media from url {} already downloaded. Skipping...", url);
Expand All @@ -378,7 +369,7 @@ impl<'a> Downloader<'a> {
}
}

/// Download media from the given url and save to data directory. Also create data directory if not present already
/// Download media from the given url and save to data directory. Also create data directory if not present already
async fn download_media(&self, file_name: &str, url: &str) -> Result<bool, GertError> {
// create directory if it does not already exist
// the directory is created relative to the current working directory
Expand All @@ -388,11 +379,8 @@ impl<'a> Downloader<'a> {
Ok(_) => (),
Err(_e) => return Err(GertError::CouldNotCreateDirectory),
}

let maybe_response = self.session
.get(url)
.send()
.await;

let maybe_response = self.session.get(url).send().await;
if let Ok(response) = maybe_response {
debug!("URL Response: {:#?}", response);
let maybe_data = response.bytes().await;
Expand Down Expand Up @@ -422,7 +410,7 @@ impl<'a> Downloader<'a> {
Ok(status)
}

/// Convert Gfycat/Redgifs GIFs into mp4 URLs for download
/// Convert Gfycat/Redgifs GIFs into mp4 URLs for download
async fn gfy_to_mp4(&self, url: &str) -> Result<Option<SupportedMedia>, GertError> {
let api_prefix =
if url.contains(GFYCAT_DOMAIN) { GFYCAT_API_PREFIX } else { REDGIFS_API_PREFIX };
Expand All @@ -433,9 +421,7 @@ impl<'a> Downloader<'a> {
debug!("GFY API URL: {}", api_url);

// talk to gfycat API and get GIF information
let response = self.session.get(&api_url)
.send()
.await?;
let response = self.session.get(&api_url).send().await?;
// if the gif is not available anymore, Gfycat might send
// a 404 response. Proceed to get the mp4 URL only if the
// response was HTTP 200
Expand All @@ -454,7 +440,7 @@ impl<'a> Downloader<'a> {
}
}

// Get reddit video information and optionally the audio track if it exists
// Get reddit video information and optionally the audio track if it exists
async fn get_reddit_video(&self, url: &str) -> Result<Option<SupportedMedia>, GertError> {
let maybe_dash_video = url.split("/").last();
if let Some(dash_video) = maybe_dash_video {
Expand Down Expand Up @@ -568,7 +554,9 @@ impl<'a> Downloader<'a> {
if let Some(v) = &m.reddit_video {
let fallback_url =
String::from(&v.fallback_url).replace("?source=fallback", "");
if let Some(supported_media) = self.get_reddit_video(&fallback_url).await? {
if let Some(supported_media) =
self.get_reddit_video(&fallback_url).await?
{
media.push(supported_media);
}
}
Expand All @@ -589,8 +577,10 @@ impl<'a> Downloader<'a> {
);
image_urls.push(image_url);
}
let supported_media =
SupportedMedia { components: image_urls, media_type: MediaType::RedditImage };
let supported_media = SupportedMedia {
components: image_urls,
media_type: MediaType::RedditImage,
};
media.push(supported_media);
}
}
Expand Down Expand Up @@ -679,4 +669,4 @@ impl<'a> Downloader<'a> {

Ok(media)
}
}
}
67 changes: 32 additions & 35 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,21 @@ use auth::Client;
use crate::download::Downloader;
use crate::errors::GertError;
use crate::errors::GertError::DataDirNotFound;
use crate::structures::Post;
use crate::subreddit::Subreddit;
use crate::user::User;
use crate::utils::*;
use crate::subreddit::Subreddit;
use crate::structures::Post;

mod auth;
mod download;
mod errors;
mod structures;
mod subreddit;
mod user;
mod utils;
mod subreddit;

#[tokio::main]
async fn main() -> Result<(), GertError> {

let matches = App::new("Gert")
.version(crate_version!())
.author("Mike Dallas")
Expand All @@ -35,7 +34,7 @@ async fn main() -> Result<(), GertError> {
.long("from-env")
.value_name("ENV_FILE")
.help("Set a custom .env style file with secrets")
.takes_value(true)
.takes_value(true),
)
.arg(
Arg::with_name("match")
Expand Down Expand Up @@ -92,7 +91,7 @@ async fn main() -> Result<(), GertError> {
.value_delimiter(",")
.help("Download media from these subreddit")
.takes_value(true)
.required(true)
.required(true),
)
.arg(
Arg::with_name("period")
Expand All @@ -102,7 +101,7 @@ async fn main() -> Result<(), GertError> {
.help("Time period to download from")
.takes_value(true)
.possible_values(&["now", "hour", "day", "week", "month", "year", "all"])
.default_value("day")
.default_value("day"),
)
.arg(
Arg::with_name("feed")
Expand All @@ -112,7 +111,7 @@ async fn main() -> Result<(), GertError> {
.help("Feed to download from")
.takes_value(true)
.possible_values(&["hot", "new", "top", "rising"])
.default_value("hot")
.default_value("hot"),
)
.get_matches();

Expand All @@ -133,14 +132,13 @@ async fn main() -> Result<(), GertError> {
Some(pattern) => regex::Regex::new(pattern).expect("Invalid regex pattern"),
None => regex::Regex::new(".*").unwrap(),
};

// initialize logger for the app and set logging level to info if no environment variable present
let env = Env::default().filter("RS_LOG").default_filter_or("info");
env_logger::Builder::from_env(env).init();

// if the option is --debug, show the configuration and return immediately
// if the option is --debug, show the configuration and return immediately
if matches.is_present("debug") {

info!("Current configuration:");
info!("ENVIRONMENT_FILE = {}", &env_file.unwrap_or("None"));
info!("DATA_DIRECTORY = {}", &data_directory);
Expand Down Expand Up @@ -172,48 +170,49 @@ async fn main() -> Result<(), GertError> {
}

let session = match env_file {

Some(envfile) => {
let user_env = parse_env_file(envfile)?;

let client_sess = reqwest::Client::builder()
.cookie_store(true)
.user_agent(get_user_agent_string(&user_env.username))
.build()?;

let client = Client::new(&user_env.client_id, &user_env.client_secret, &user_env.username, &user_env.password, &client_sess);
.cookie_store(true)
.user_agent(get_user_agent_string(&user_env.username))
.build()?;

let client = Client::new(
&user_env.client_id,
&user_env.client_secret,
&user_env.username,
&user_env.password,
&client_sess,
);
// login to reddit using the credentials provided and get API bearer token
let auth = client.login().await?;

info!("Successfully logged in to Reddit as {}", user_env.username);
debug!("Authentication details: {:#?}", auth);

// get information about the user to display
let user = User::new(&auth, &user_env.username, &client_sess);

let user_info = user.about().await?;

info!("The user details are: ");
info!("Account name: {:#?}", user_info.data.name);
info!("Account ID: {:#?}", user_info.data.id);
info!("Comment Karma: {:#?}", user_info.data.comment_karma);
info!("Link Karma: {:#?}", user_info.data.link_karma);

client_sess

},
}
None => {
info!("No environment file provided, using default values");
reqwest::Client::builder()
.cookie_store(true)
.user_agent(get_user_agent_string("anon"))
.build()?
.cookie_store(true)
.user_agent(get_user_agent_string("anon"))
.build()?
}
};




if !check_path_present(&data_directory) {
return Err(DataDirNotFound);
}
Expand All @@ -224,19 +223,17 @@ async fn main() -> Result<(), GertError> {
Videos hosted by Reddit use separate video and audio streams. \
Ffmpeg needs be installed to combine the audio and video into a single mp4."
);
}
;
};

info!("Starting data gathering from Reddit. This might take some time. Hold on....");

let mut posts: Vec<Post> = Vec::with_capacity(limit as usize * subreddits.len());
for subreddit in &subreddits {
let listing = Subreddit::new(subreddit).get_feed(feed, limit, period).await?;
posts.extend(
listing.data.children
.into_iter()
.filter(|post| post.data.url.is_some())
.filter(|post| pattern.is_match(post.data.title.as_ref().unwrap_or(&"".to_string())))
listing.data.children.into_iter().filter(|post| post.data.url.is_some()).filter(
|post| pattern.is_match(post.data.title.as_ref().unwrap_or(&"".to_string())),
),
);
}

Expand Down
Loading

0 comments on commit ce2de42

Please sign in to comment.