mirror of
https://github.com/spikecodes/libreddit.git
synced 2026-01-24 21:44:05 +01:00
94
src/main.rs
94
src/main.rs
@@ -18,7 +18,7 @@ async fn style() -> HttpResponse {
|
||||
async fn robots() -> HttpResponse {
|
||||
HttpResponse::Ok()
|
||||
.header("Cache-Control", "public, max-age=1209600, s-maxage=86400")
|
||||
.body(include_str!("../static/robots.txt"))
|
||||
.body("User-agent: *\nAllow: /")
|
||||
}
|
||||
|
||||
async fn favicon() -> HttpResponse {
|
||||
@@ -42,7 +42,7 @@ async fn main() -> std::io::Result<()> {
|
||||
match arg.split('=').collect::<Vec<&str>>()[0] {
|
||||
"--address" | "-a" => address = arg.split('=').collect::<Vec<&str>>()[1].to_string(),
|
||||
// "--redirect-https" | "-r" => https = true,
|
||||
_ => {}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,55 +51,61 @@ async fn main() -> std::io::Result<()> {
|
||||
|
||||
HttpServer::new(|| {
|
||||
App::new()
|
||||
// REDIRECT TO HTTPS
|
||||
// .wrap(middleware::DefaultHeaders::new().header("Strict-Transport-Security", "max-age=31536000"))
|
||||
// .wrap_fn(|req, srv| {
|
||||
// let fut = srv.call(req);
|
||||
// async {
|
||||
// let mut res = fut.await?;
|
||||
// if https {
|
||||
// res.headers_mut().insert(
|
||||
// actix_web::http::header::STRICT_TRANSPORT_SECURITY, actix_web::http::HeaderValue::from_static("max-age=31536000;"),
|
||||
// );
|
||||
// }
|
||||
// Ok(res)
|
||||
// }
|
||||
// })
|
||||
// TRAILING SLASH MIDDLEWARE
|
||||
// Redirect to HTTPS
|
||||
// .wrap_fn(|req, srv| { let fut = srv.call(req); async { let mut res = fut.await?; if https {} Ok(res) } })
|
||||
// Append trailing slash and remove double slashes
|
||||
.wrap(middleware::NormalizePath::default())
|
||||
// DEFAULT SERVICE
|
||||
// Default service in case no routes match
|
||||
.default_service(web::get().to(|| utils::error("Nothing here".to_string())))
|
||||
// GENERAL SERVICES
|
||||
// Read static files
|
||||
.route("/style.css/", web::get().to(style))
|
||||
.route("/favicon.ico/", web::get().to(favicon))
|
||||
.route("/thumbnail.svg/", web::get().to(thumbnail))
|
||||
.route("/robots.txt/", web::get().to(robots))
|
||||
// SETTINGS SERVICE
|
||||
.route("/settings/", web::get().to(settings::get))
|
||||
.route("/settings/", web::post().to(settings::set))
|
||||
// PROXY SERVICE
|
||||
// Proxy media through Libreddit
|
||||
.route("/proxy/{url:.*}/", web::get().to(proxy::handler))
|
||||
// SEARCH SERVICES
|
||||
.route("/search/", web::get().to(search::find))
|
||||
.route("r/{sub}/search/", web::get().to(search::find))
|
||||
// USER SERVICES
|
||||
.route("/u/{username}/", web::get().to(user::profile))
|
||||
.route("/user/{username}/", web::get().to(user::profile))
|
||||
// WIKI SERVICES
|
||||
.route("/wiki/", web::get().to(subreddit::wiki))
|
||||
.route("/wiki/{page}/", web::get().to(subreddit::wiki))
|
||||
.route("/r/{sub}/wiki/", web::get().to(subreddit::wiki))
|
||||
.route("/r/{sub}/wiki/{page}/", web::get().to(subreddit::wiki))
|
||||
// SUBREDDIT SERVICES
|
||||
.route("/r/{sub}/", web::get().to(subreddit::page))
|
||||
.route("/r/{sub}/{sort:hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// POPULAR SERVICES
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// POST SERVICES
|
||||
.route("/{id:.{5,6}}/", web::get().to(post::item))
|
||||
.route("/r/{sub}/comments/{id}/{title}/", web::get().to(post::item))
|
||||
.route("/r/{sub}/comments/{id}/{title}/{comment_id}/", web::get().to(post::item))
|
||||
// Browse user profile
|
||||
.route("/{scope:u|user}/{username}/", web::get().to(user::profile))
|
||||
// Configure settings
|
||||
.service(web::resource("/settings/").route(web::get().to(settings::get)).route(web::post().to(settings::set)))
|
||||
// Subreddit services
|
||||
.service(
|
||||
web::scope("/r/{sub}")
|
||||
// See posts and info about subreddit
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// View post on subreddit
|
||||
.service(
|
||||
web::scope("/comments/{id}/{title}")
|
||||
.route("/", web::get().to(post::item))
|
||||
.route("/{comment_id}/", web::get().to(post::item)),
|
||||
)
|
||||
// Search inside subreddit
|
||||
.route("/search/", web::get().to(search::find))
|
||||
// View wiki of subreddit
|
||||
.service(
|
||||
web::scope("/wiki")
|
||||
.route("/", web::get().to(subreddit::wiki))
|
||||
.route("/{page}/", web::get().to(subreddit::wiki)),
|
||||
),
|
||||
)
|
||||
// Universal services
|
||||
.service(
|
||||
web::scope("")
|
||||
// Front page
|
||||
.route("/", web::get().to(subreddit::page))
|
||||
.route("/{sort:best|hot|new|top|rising|controversial}/", web::get().to(subreddit::page))
|
||||
// View Reddit wiki
|
||||
.service(
|
||||
web::scope("/wiki")
|
||||
.route("/", web::get().to(subreddit::wiki))
|
||||
.route("/{page}/", web::get().to(subreddit::wiki)),
|
||||
)
|
||||
// Search all of Reddit
|
||||
.route("/search/", web::get().to(search::find))
|
||||
// Short link for post
|
||||
.route("/{id:.{5,6}}/", web::get().to(post::item)),
|
||||
)
|
||||
})
|
||||
.bind(&address)
|
||||
.unwrap_or_else(|e| panic!("Cannot bind to the address {}: {}", address, e))
|
||||
|
||||
28
src/post.rs
28
src/post.rs
@@ -1,5 +1,5 @@
|
||||
// CRATES
|
||||
use crate::utils::{cookie, error, format_num, format_url, media, parse_rich_flair, param, prefs, request, rewrite_url, val, Comment, Flags, Flair, Post, Preferences};
|
||||
use crate::utils::{cookie, error, format_num, format_url, media, param, parse_rich_flair, prefs, request, rewrite_url, val, Comment, Flags, Flair, Post, Preferences};
|
||||
use actix_web::{HttpRequest, HttpResponse};
|
||||
|
||||
use async_recursion::async_recursion;
|
||||
@@ -57,7 +57,7 @@ pub async fn item(req: HttpRequest) -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
// If the Reddit API returns an error, exit and send error page to user
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,8 +82,12 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
||||
community: val(post, "subreddit"),
|
||||
body: rewrite_url(&val(post, "selftext_html")),
|
||||
author: val(post, "author"),
|
||||
author_flair: Flair{
|
||||
flair_parts: parse_rich_flair(val(post, "author_flair_type"), post["data"]["author_flair_richtext"].as_array(), post["data"]["author_flair_text"].as_str()),
|
||||
author_flair: Flair {
|
||||
flair_parts: parse_rich_flair(
|
||||
val(post, "author_flair_type"),
|
||||
post["data"]["author_flair_richtext"].as_array(),
|
||||
post["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
@@ -92,8 +96,12 @@ async fn parse_post(json: &serde_json::Value) -> Post {
|
||||
upvote_ratio: ratio as i64,
|
||||
post_type,
|
||||
thumbnail: format_url(val(post, "thumbnail").as_str()),
|
||||
flair: Flair{
|
||||
flair_parts: parse_rich_flair(val(post, "link_flair_type"), post["data"]["link_flair_richtext"].as_array(), post["data"]["link_flair_text"].as_str()),
|
||||
flair: Flair {
|
||||
flair_parts: parse_rich_flair(
|
||||
val(post, "link_flair_type"),
|
||||
post["data"]["link_flair_richtext"].as_array(),
|
||||
post["data"]["link_flair_text"].as_str(),
|
||||
),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
@@ -145,8 +153,12 @@ async fn parse_comments(json: &serde_json::Value) -> Vec<Comment> {
|
||||
score: format_num(score),
|
||||
time: OffsetDateTime::from_unix_timestamp(unix_time).format("%b %d %Y %H:%M UTC"),
|
||||
replies,
|
||||
flair: Flair{
|
||||
flair_parts: parse_rich_flair(val(&comment, "author_flair_type"), comment["data"]["author_flair_richtext"].as_array(), comment["data"]["author_flair_text"].as_str()),
|
||||
flair: Flair {
|
||||
flair_parts: parse_rich_flair(
|
||||
val(&comment, "author_flair_type"),
|
||||
comment["data"]["author_flair_richtext"].as_array(),
|
||||
comment["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
background_color: val(&comment, "author_flair_background_color"),
|
||||
foreground_color: val(&comment, "author_flair_text_color"),
|
||||
},
|
||||
|
||||
@@ -41,9 +41,9 @@ pub async fn handler(web::Path(b64): web::Path<String>) -> Result<HttpResponse>
|
||||
Err(error::ErrorForbidden("Resource must be from Reddit"))
|
||||
}
|
||||
}
|
||||
Err(_) => Err(error::ErrorBadRequest("Can't parse base64 into URL")),
|
||||
_ => Err(error::ErrorBadRequest("Can't parse base64 into URL")),
|
||||
}
|
||||
}
|
||||
Err(_) => Err(error::ErrorBadRequest("Can't decode base64")),
|
||||
_ => Err(error::ErrorBadRequest("Can't decode base64")),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -90,6 +90,6 @@ pub async fn find(req: HttpRequest) -> HttpResponse {
|
||||
.render()
|
||||
.unwrap(),
|
||||
),
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -58,33 +58,33 @@ pub async fn page(req: HttpRequest) -> HttpResponse {
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn wiki(req: HttpRequest) -> HttpResponse {
|
||||
let sub = req.match_info().get("sub").unwrap_or("reddit.com");
|
||||
let page = req.match_info().get("page").unwrap_or("index");
|
||||
let sub = req.match_info().get("sub").unwrap_or("reddit.com").to_string();
|
||||
let page = req.match_info().get("page").unwrap_or("index").to_string();
|
||||
let path: String = format!("/r/{}/wiki/{}.json?raw_json=1", sub, page);
|
||||
|
||||
match request(&path).await {
|
||||
Ok(res) => {
|
||||
let s = WikiTemplate {
|
||||
sub: sub.to_string(),
|
||||
sub,
|
||||
wiki: rewrite_url(res["data"]["content_html"].as_str().unwrap_or_default()),
|
||||
page: page.to_string(),
|
||||
page,
|
||||
prefs: prefs(req),
|
||||
}
|
||||
.render()
|
||||
.unwrap();
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
// SUBREDDIT
|
||||
async fn subreddit(sub: &str) -> Result<Subreddit, &'static str> {
|
||||
async fn subreddit(sub: &str) -> Result<Subreddit, String> {
|
||||
// Build the Reddit JSON API url
|
||||
let path: String = format!("/r/{}/about.json?raw_json=1", sub);
|
||||
|
||||
|
||||
17
src/user.rs
17
src/user.rs
@@ -1,5 +1,5 @@
|
||||
// CRATES
|
||||
use crate::utils::{error, fetch_posts, format_url, nested_val, param, prefs, request, Post, Preferences, User};
|
||||
use crate::utils::{error, fetch_posts, format_url, param, prefs, request, Post, Preferences, User};
|
||||
use actix_web::{HttpRequest, HttpResponse, Result};
|
||||
use askama::Template;
|
||||
use time::OffsetDateTime;
|
||||
@@ -42,12 +42,12 @@ pub async fn profile(req: HttpRequest) -> HttpResponse {
|
||||
HttpResponse::Ok().content_type("text/html").body(s)
|
||||
}
|
||||
// If there is an error show error page
|
||||
Err(msg) => error(msg.to_string()).await,
|
||||
Err(msg) => error(msg).await,
|
||||
}
|
||||
}
|
||||
|
||||
// USER
|
||||
async fn user(name: &str) -> Result<User, &'static str> {
|
||||
async fn user(name: &str) -> Result<User, String> {
|
||||
// Build the Reddit JSON API path
|
||||
let path: String = format!("/user/{}/about.json", name);
|
||||
|
||||
@@ -58,15 +58,18 @@ async fn user(name: &str) -> Result<User, &'static str> {
|
||||
// Grab creation date as unix timestamp
|
||||
let created: i64 = res["data"]["created"].as_f64().unwrap_or(0.0).round() as i64;
|
||||
|
||||
// nested_val function used to parse JSON from Reddit APIs
|
||||
let about = |item| res["data"]["subreddit"][item].as_str().unwrap_or_default().to_string();
|
||||
|
||||
// Parse the JSON output into a User struct
|
||||
Ok(User {
|
||||
name: name.to_string(),
|
||||
title: nested_val(&res, "subreddit", "title"),
|
||||
icon: format_url(nested_val(&res, "subreddit", "icon_img").as_str()),
|
||||
title: about("title"),
|
||||
icon: format_url(about("icon_img").as_str()),
|
||||
karma: res["data"]["total_karma"].as_i64().unwrap_or(0),
|
||||
created: OffsetDateTime::from_unix_timestamp(created).format("%b %d '%y"),
|
||||
banner: nested_val(&res, "subreddit", "banner_img"),
|
||||
description: nested_val(&res, "subreddit", "public_description"),
|
||||
banner: about("banner_img"),
|
||||
description: about("public_description"),
|
||||
})
|
||||
}
|
||||
// If the Reddit API returns an error, exit this function
|
||||
|
||||
164
src/utils.rs
164
src/utils.rs
@@ -7,21 +7,21 @@ use base64::encode;
|
||||
use regex::Regex;
|
||||
use serde_json::{from_str, Value};
|
||||
use std::collections::HashMap;
|
||||
use time::{OffsetDateTime, Duration};
|
||||
use time::{Duration, OffsetDateTime};
|
||||
use url::Url;
|
||||
|
||||
//
|
||||
// STRUCTS
|
||||
//
|
||||
// Post flair with content, background color and foreground color
|
||||
pub struct Flair{
|
||||
pub flair_parts: Vec<FlairPart>,
|
||||
pub struct Flair {
|
||||
pub flair_parts: Vec<FlairPart>,
|
||||
pub background_color: String,
|
||||
pub foreground_color: String,
|
||||
}
|
||||
|
||||
pub struct FlairPart{
|
||||
pub flair_part_type: String,
|
||||
pub struct FlairPart {
|
||||
pub flair_part_type: String,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
@@ -101,7 +101,7 @@ pub struct Params {
|
||||
#[derive(Template)]
|
||||
#[template(path = "error.html", escape = "none")]
|
||||
pub struct ErrorTemplate {
|
||||
pub message: String,
|
||||
pub msg: String,
|
||||
pub prefs: Preferences,
|
||||
}
|
||||
|
||||
@@ -169,7 +169,7 @@ pub fn format_num(num: i64) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn media(data: &serde_json::Value) -> (String, String) {
|
||||
pub async fn media(data: &Value) -> (String, String) {
|
||||
let post_type: &str;
|
||||
let url = if !data["preview"]["reddit_video_preview"]["fallback_url"].is_null() {
|
||||
post_type = "video";
|
||||
@@ -183,11 +183,11 @@ pub async fn media(data: &serde_json::Value) -> (String, String) {
|
||||
Some(gif) => {
|
||||
post_type = "gif";
|
||||
format_url(gif["source"]["url"].as_str().unwrap_or_default())
|
||||
},
|
||||
}
|
||||
None => {
|
||||
post_type = "image";
|
||||
format_url(preview["source"]["url"].as_str().unwrap_or_default())
|
||||
},
|
||||
}
|
||||
}
|
||||
} else if data["is_self"].as_bool().unwrap_or_default() {
|
||||
post_type = "self";
|
||||
@@ -201,37 +201,42 @@ pub async fn media(data: &serde_json::Value) -> (String, String) {
|
||||
}
|
||||
|
||||
pub fn parse_rich_flair(flair_type: String, rich_flair: Option<&Vec<Value>>, text_flair: Option<&str>) -> Vec<FlairPart> {
|
||||
let mut result: Vec<FlairPart> = Vec::new();
|
||||
if flair_type == "richtext" && !rich_flair.is_none() {
|
||||
for part in rich_flair.unwrap() {
|
||||
let flair_part_type = part["e"].as_str().unwrap_or_default().to_string();
|
||||
let value = if flair_part_type == "text" {
|
||||
part["t"].as_str().unwrap_or_default().to_string()
|
||||
|
||||
} else if flair_part_type == "emoji" {
|
||||
format_url(part["u"].as_str().unwrap_or_default())
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
result.push(FlairPart {
|
||||
flair_part_type,
|
||||
value,
|
||||
});
|
||||
}
|
||||
} else if flair_type == "text" && !text_flair.is_none() {
|
||||
result.push(FlairPart {
|
||||
flair_part_type: "text".to_string(),
|
||||
value: text_flair.unwrap().to_string(),
|
||||
});
|
||||
match flair_type.as_str() {
|
||||
"richtext" => match rich_flair {
|
||||
Some(rich) => rich
|
||||
.iter()
|
||||
.map(|part| {
|
||||
let value = |name: &str| part[name].as_str().unwrap_or_default();
|
||||
FlairPart {
|
||||
flair_part_type: value("e").to_string(),
|
||||
value: match value("e") {
|
||||
"text" => value("t").to_string(),
|
||||
"emoji" => format_url(value("u")),
|
||||
_ => String::new(),
|
||||
},
|
||||
}
|
||||
})
|
||||
.collect::<Vec<FlairPart>>(),
|
||||
None => Vec::new(),
|
||||
},
|
||||
"text" => match text_flair {
|
||||
Some(text) => vec![FlairPart {
|
||||
flair_part_type: "text".to_string(),
|
||||
value: text.to_string(),
|
||||
}],
|
||||
None => Vec::new(),
|
||||
},
|
||||
_ => Vec::new(),
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub fn time(unix_time: i64) -> String {
|
||||
let time = OffsetDateTime::from_unix_timestamp(unix_time);
|
||||
let time_delta = OffsetDateTime::now_utc() - time;
|
||||
if time_delta > Duration::days(1) {
|
||||
if time_delta > Duration::days(30) {
|
||||
time.format("%b %d '%y") // %b %e '%y
|
||||
} else if time_delta.whole_days() > 0 {
|
||||
format!("{}d ago", time_delta.whole_days())
|
||||
} else if time_delta.whole_hours() > 0 {
|
||||
format!("{}h ago", time_delta.whole_hours())
|
||||
} else {
|
||||
@@ -244,17 +249,12 @@ pub fn time(unix_time: i64) -> String {
|
||||
//
|
||||
|
||||
// val() function used to parse JSON from Reddit APIs
|
||||
pub fn val(j: &serde_json::Value, k: &str) -> String {
|
||||
pub fn val(j: &Value, k: &str) -> String {
|
||||
String::from(j["data"][k].as_str().unwrap_or_default())
|
||||
}
|
||||
|
||||
// nested_val() function used to parse JSON from Reddit APIs
|
||||
pub fn nested_val(j: &serde_json::Value, n: &str, k: &str) -> String {
|
||||
String::from(j["data"][n][k].as_str().unwrap_or_default())
|
||||
}
|
||||
|
||||
// Fetch posts of a user or subreddit and return a vector of posts and the "after" value
|
||||
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), &'static str> {
|
||||
pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post>, String), String> {
|
||||
let res;
|
||||
let post_list;
|
||||
|
||||
@@ -271,7 +271,7 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||
// Fetch the list of posts from the JSON response
|
||||
match res["data"]["children"].as_array() {
|
||||
Some(list) => post_list = list,
|
||||
None => return Err("No posts found"),
|
||||
None => return Err("No posts found".to_string()),
|
||||
}
|
||||
|
||||
let mut posts: Vec<Post> = Vec::new();
|
||||
@@ -292,8 +292,12 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||
community: val(post, "subreddit"),
|
||||
body: rewrite_url(&val(post, "body_html")),
|
||||
author: val(post, "author"),
|
||||
author_flair: Flair{
|
||||
flair_parts: parse_rich_flair(val(post, "author_flair_type"), post["data"]["author_flair_richtext"].as_array(), post["data"]["author_flair_text"].as_str()),
|
||||
author_flair: Flair {
|
||||
flair_parts: parse_rich_flair(
|
||||
val(post, "author_flair_type"),
|
||||
post["data"]["author_flair_richtext"].as_array(),
|
||||
post["data"]["author_flair_text"].as_str(),
|
||||
),
|
||||
background_color: val(post, "author_flair_background_color"),
|
||||
foreground_color: val(post, "author_flair_text_color"),
|
||||
},
|
||||
@@ -303,8 +307,12 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||
thumbnail: format_url(val(post, "thumbnail").as_str()),
|
||||
media,
|
||||
domain: val(post, "domain"),
|
||||
flair: Flair{
|
||||
flair_parts: parse_rich_flair(val(post, "link_flair_type"), post["data"]["link_flair_richtext"].as_array(), post["data"]["link_flair_text"].as_str()),
|
||||
flair: Flair {
|
||||
flair_parts: parse_rich_flair(
|
||||
val(post, "link_flair_type"),
|
||||
post["data"]["link_flair_richtext"].as_array(),
|
||||
post["data"]["link_flair_text"].as_str(),
|
||||
),
|
||||
background_color: val(post, "link_flair_background_color"),
|
||||
foreground_color: if val(post, "link_flair_text_color") == "dark" {
|
||||
"black".to_string()
|
||||
@@ -330,7 +338,7 @@ pub async fn fetch_posts(path: &str, fallback_title: String) -> Result<(Vec<Post
|
||||
|
||||
pub async fn error(msg: String) -> HttpResponse {
|
||||
let body = ErrorTemplate {
|
||||
message: msg,
|
||||
msg,
|
||||
prefs: Preferences::default(),
|
||||
}
|
||||
.render()
|
||||
@@ -339,34 +347,50 @@ pub async fn error(msg: String) -> HttpResponse {
|
||||
}
|
||||
|
||||
// Make a request to a Reddit API and parse the JSON response
|
||||
pub async fn request(path: &str) -> Result<serde_json::Value, &'static str> {
|
||||
pub async fn request(path: &str) -> Result<Value, String> {
|
||||
let url = format!("https://www.reddit.com{}", path);
|
||||
|
||||
// Send request using ureq
|
||||
match ureq::get(&url).call() {
|
||||
// If response is success
|
||||
Ok(response) => {
|
||||
// Parse the response from Reddit as JSON
|
||||
match from_str(&response.into_string().unwrap()) {
|
||||
Ok(json) => Ok(json),
|
||||
Err(_) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Failed to parse page JSON data", url));
|
||||
Err("Failed to parse page JSON data")
|
||||
// Send request using awc
|
||||
async fn send(url: &str) -> Result<String, (bool, String)> {
|
||||
let client = actix_web::client::Client::default();
|
||||
let response = client.get(url).send().await;
|
||||
|
||||
match response {
|
||||
Ok(mut payload) => {
|
||||
// Get first number of response HTTP status code
|
||||
match payload.status().to_string().chars().next() {
|
||||
// If success
|
||||
Some('2') => Ok(String::from_utf8(payload.body().limit(20_000_000).await.unwrap().to_vec()).unwrap()),
|
||||
// If redirection
|
||||
Some('3') => Err((true, payload.headers().get("location").unwrap().to_str().unwrap().to_string())),
|
||||
// Otherwise
|
||||
_ => Err((false, "Page not found".to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
// If response is error
|
||||
Err(ureq::Error::Status(_, _)) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - Page not found", url));
|
||||
Err("Page not found")
|
||||
}
|
||||
// If failed to send request
|
||||
Err(e) => {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(e);
|
||||
Err("Couldn't send request to Reddit")
|
||||
Err(_) => Err((false, "Couldn't send request to Reddit".to_string())),
|
||||
}
|
||||
}
|
||||
|
||||
fn err(u: String, m: String) -> Result<Value, String> {
|
||||
#[cfg(debug_assertions)]
|
||||
dbg!(format!("{} - {}", u, m));
|
||||
Err(m)
|
||||
};
|
||||
|
||||
fn json(url: String, body: String) -> Result<Value, String> {
|
||||
match from_str(body.as_str()) {
|
||||
Ok(json) => Ok(json),
|
||||
Err(_) => err(url, "Failed to parse page JSON data".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
match send(&url).await {
|
||||
Ok(body) => json(url, body),
|
||||
Err((true, location)) => match send(location.as_str()).await {
|
||||
Ok(body) => json(url, body),
|
||||
Err((true, location)) => err(url, location),
|
||||
Err((_, msg)) => err(url, msg),
|
||||
},
|
||||
Err((_, msg)) => err(url, msg),
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user