Skip to content

Commit

Permalink
[ws_util] Handle possible panics when fetching index from JSON
Browse files Browse the repository at this point in the history
Using the square-bracket indexing operator could panic if the given index is out of bounds or the key is non-existent.

Other changes:
- [net_util] Introduce `JsonTryGet` trait to simplify getting values from JSON
  • Loading branch information
m4heshd committed Apr 17, 2024
1 parent 328eb61 commit 42da300
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 9 deletions.
21 changes: 20 additions & 1 deletion backend/src/net_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use axum::{http::Method, Router};
use axum_embed::{FallbackBehavior::Redirect, ServeEmbed};
use once_cell::sync::Lazy;
use reqwest::{header::HeaderMap, Client, Proxy, Response};
use serde_json::{json, Value};
use serde_json::{json, value::Index, Value};
use tokio::net::TcpListener;
use tower_http::cors::{Any, CorsLayer};

Expand All @@ -35,6 +35,25 @@ pub struct LoginSession {
// Types
pub type JSON = Value;

// Traits
/// Allows getting a value out of a JSON using index but returns a `Value` instead of an Option.
/// Returns `Value::Null` if failed to grab the value at index.
pub trait JsonTryGet {
fn try_get<I: Index>(&self, index: I) -> &JSON;

fn try_get_mut<'a, I: Index>(&'a mut self, index: I, alt: &'a mut JSON) -> &'a mut JSON;
}

impl JsonTryGet for JSON {
fn try_get<I: Index>(&self, index: I) -> &JSON {
self.get(index).unwrap_or(&JSON::Null)
}

fn try_get_mut<'a, I: Index>(&'a mut self, index: I, alt: &'a mut JSON) -> &'a mut JSON {
index.index_into_mut(self).unwrap_or(alt)
}
}

// Statics
static HTTP_CLIENT: Lazy<Client> = Lazy::new(Client::new);
static HTTP_PROXIED_CLIENT: Lazy<ArcSwap<Client>> = Lazy::new(|| {
Expand Down
18 changes: 10 additions & 8 deletions backend/src/ws_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ use crate::{
config_util::{ConfigUpdate, get_config, is_debug, UFCRConfig, update_config},
fs_util::open_downloads_dir,
net_util::{
download_media_tools, get_vod_meta, get_vod_stream_url, JSON, login_to_fight_pass,
search_vods, update_proxied_client,
download_media_tools, get_vod_meta, get_vod_stream_url, JSON, JsonTryGet,
login_to_fight_pass, search_vods, update_proxied_client,
},
rt_util::QuitUnwrap,
state_util::{clear_inactive_dlq_vods, get_dlq, Vod},
Expand Down Expand Up @@ -220,9 +220,11 @@ fn send_media_tool_download_progress(socket: &SocketRef, tool: &str, progress: f

/// Handles the `login` WS event.
async fn handle_login_event(ack: AckSender, Data(data): Data<JSON>) {
if let (Some(region), Some(email), Some(pass)) =
(data[0].as_str(), data[1].as_str(), data[2].as_str())
{
if let (Some(region), Some(email), Some(pass)) = (
data.try_get(0).as_str(),
data.try_get(1).as_str(),
data.try_get(2).as_str(),
) {
match login_to_fight_pass(region, email, pass).await {
Ok(tokens) => {
update_config(ConfigUpdate::Region(region.to_string())).await;
Expand All @@ -240,7 +242,7 @@ async fn handle_login_event(ack: AckSender, Data(data): Data<JSON>) {

/// Handles the `search-vods` WS event.
async fn handle_search_vods_event(ack: AckSender, Data(data): Data<JSON>) {
if let (Some(query), Some(page)) = (data[0].as_str(), data[1].as_u64()) {
if let (Some(query), Some(page)) = (data.try_get(0).as_str(), data.try_get(1).as_u64()) {
send_result(ack, search_vods(query, page).await);
} else {
send_error(ack, "Invalid search request");
Expand All @@ -266,8 +268,8 @@ async fn handle_verify_url_event(ack: AckSender, Data(data): Data<JSON>) {
/// Handles the `download` WS event.
async fn handle_download_event(ack: AckSender, Data(mut data): Data<JSON>) {
if let (Ok(mut vod), Some(is_restart)) = (
serde_json::from_value::<Vod>(data[0].take()),
data[1].as_bool(),
serde_json::from_value::<Vod>(data.try_get_mut(0, &mut JSON::Null).take()),
data.try_get(1).as_bool(),
) {
match get_vod_stream_url(vod.id).await {
Ok(hls) => vod.hls = hls,
Expand Down

0 comments on commit 42da300

Please sign in to comment.