diff --git a/src/main.rs b/src/main.rs index 54296fb..8c2a952 100644 --- a/src/main.rs +++ b/src/main.rs @@ -51,6 +51,7 @@ async fn main() -> std::io::Result<()> { let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new(); + providers::init_providers_now(); web::HttpServer::new(move || { web::App::new() diff --git a/src/providers/mod.rs b/src/providers/mod.rs index a4ac45d..adf84aa 100644 --- a/src/providers/mod.rs +++ b/src/providers/mod.rs @@ -47,6 +47,12 @@ pub static ALL_PROVIDERS: Lazy> = Lazy::new(| m }); +pub fn init_providers_now() { + // Idempotent & thread-safe: runs the Lazy init exactly once. + Lazy::force(&ALL_PROVIDERS); +} + + #[async_trait] pub trait Provider: Send + Sync { async fn get_videos( diff --git a/src/providers/omgxxx.rs b/src/providers/omgxxx.rs index f5c1012..b173358 100644 --- a/src/providers/omgxxx.rs +++ b/src/providers/omgxxx.rs @@ -1,15 +1,17 @@ -use crate::api::ClientVersion; -use crate::status::*; -use crate::util::parse_abbreviated_number; use crate::DbPool; +use crate::api::ClientVersion; use crate::providers::Provider; use crate::util::cache::VideoCache; +use crate::util::parse_abbreviated_number; use crate::util::time::parse_time_to_seconds; use crate::videos::{ServerOptions, VideoItem}; +use crate::{status::*, util}; +use async_trait::async_trait; use error_chain::error_chain; use htmlentity::entity::{ICodedDataTrait, decode}; +use std::sync::{Arc, RwLock}; +use std::thread; use std::vec; -use async_trait::async_trait; error_chain! { foreign_links { @@ -21,22 +23,127 @@ error_chain! { #[derive(Debug, Clone)] pub struct OmgxxxProvider { url: String, - sites: Vec, - networks: Vec, + sites: Arc>>, + networks: Arc>>, } impl OmgxxxProvider { pub fn new() -> Self { - OmgxxxProvider { + println!("new"); + let provider = OmgxxxProvider { url: "https://www.omg.xxx".to_string(), - sites: vec![], - networks: vec![], + sites: Arc::new(RwLock::new(vec![])), + networks: Arc::new(RwLock::new(vec![])), + }; + + // Kick off the background load but return immediately + provider.spawn_initial_load(); + provider + } + + fn spawn_initial_load(&self) { + println!("spawn_initial_load"); + let url = self.url.clone(); + // let sites = Arc::clone(&self.sites); + let networks = Arc::clone(&self.networks); + + thread::spawn(move || { + // Create a tiny runtime just for these async tasks + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .expect("build tokio runtime"); + + rt.block_on(async move { + // If you have a streaming sites loader, call it here too + // if let Err(e) = Self::load_sites_into(&url, sites).await { + // eprintln!("load_sites_into failed: {e}"); + // } + + if let Err(e) = Self::load_networks(&url, networks).await { + eprintln!("load_networks failed: {e}"); + } + }); + }); + } + + /// === separate, self-contained logic === + /// Replace the bodies with your real fetching/parsing code. + // async fn load_sites(base_url: &str) -> Result> { + // // Example stub: + // // let html = reqwest::blocking::get(format!("{}/sites/", base_url))?.text()?; + // // Ok(parse_sites_from_html(&html)) + // Ok(vec![ + // FilterOption { + // id: "site-a".into(), + // title: "Site A".into(), + // }, + // FilterOption { + // id: "site-b".into(), + // title: "Site B".into(), + // }, + // ]) + // } + + async fn load_networks(base_url: &str, networks: Arc>>) -> Result<()> { + println!("load_networks"); + let mut requester = util::requester::Requester::new(); + let text = requester.get(&base_url).await.unwrap(); + let networks_div = text.split("class=\"sites__list\"").collect::>()[1] + .split("") + .collect::>()[0]; + let mut networks_vec: Vec = vec![]; + + for network_element in networks_div.split("sites__item").collect::>()[1..].to_vec() { + if network_element.contains("sites__all"){continue;} + let network_url = network_element.split("href=\"").collect::>()[1] + .split("\"") + .collect::>()[0]; + let network_id = network_url.split("/").collect::>()[4].to_string(); + let network_name = network_element.split(">").collect::>()[1] + .split("<") + .collect::>()[0] + .to_string(); + networks_vec.push(FilterOption { + id: network_id.clone(), + title: network_name.clone(), + }); + Self::push_unique( + &networks, + FilterOption { + id: network_id, + title: network_name, + }, + ); + } + return Ok(()); + } + + // Push one item with minimal lock time and dedup by id + fn push_unique(target: &Arc>>, item: FilterOption) { + if let Ok(mut vec) = target.write() { + if !vec.iter().any(|x| x.id == item.id) { + vec.push(item); + // Optional: keep it sorted for nicer UX + // vec.sort_by(|a,b| a.title.cmp(&b.title)); + } } } fn build_channel(&self, clientversion: ClientVersion) -> Channel { let _ = clientversion; - - let channel: crate::status::Channel = Channel{ + let sites: Vec = self + .sites + .read() + .map(|g| g.clone()) // or: .map(|g| g.to_vec()) + .unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new()) + + let networks: Vec = self + .networks + .read() + .map(|g| g.clone()) // or: .map(|g| g.to_vec()) + .unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new()) + + Channel { id: "omgxxx".to_string(), name: "OMG XXX".to_string(), description: "Free Porn Site".to_string(), @@ -48,21 +155,21 @@ impl OmgxxxProvider { ChannelOption { id: "sort".to_string(), title: "Sort".to_string(), - description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(), + description: "Sort the Videos".to_string(), systemImage: "list.number".to_string(), colorName: "blue".to_string(), options: vec![ FilterOption { - id: "latest-updates".to_string(), - title: "Latest".to_string(), + id: "latest-updates".into(), + title: "Latest".into(), }, FilterOption { - id: "most-popular".to_string(), - title: "Most Viewed".to_string(), + id: "most-popular".into(), + title: "Most Viewed".into(), }, FilterOption { - id: "top-rated".to_string(), - title: "Top Rated".to_string(), + id: "top-rated".into(), + title: "Top Rated".into(), }, ], multiSelect: false, @@ -70,28 +177,25 @@ impl OmgxxxProvider { ChannelOption { id: "sites".to_string(), title: "Sites".to_string(), - description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(), + description: "Sort the Videos".to_string(), systemImage: "list.bullet.indent".to_string(), colorName: "green".to_string(), - options: self.sites.clone(), + options: sites, multiSelect: false, }, ChannelOption { id: "networks".to_string(), title: "Networks".to_string(), - description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(), + description: "Sort the Videos".to_string(), systemImage: "list.dash".to_string(), colorName: "purple".to_string(), - options: self.networks.clone(), + options: networks, multiSelect: false, - } - + }, ], nsfw: true, cacheDuration: None, - }; - - return channel; + } } async fn get( @@ -144,7 +248,17 @@ impl OmgxxxProvider { if query.starts_with("@models:") { search_type = "models"; } - let video_url = format!("{}/{}/{}/{}/", self.url, search_type, query.to_lowercase().trim().replace(" ","-").replace("@models:",""), page); + let video_url = format!( + "{}/{}/{}/{}/", + self.url, + search_type, + query + .to_lowercase() + .trim() + .replace(" ", "-") + .replace("@models:", ""), + page + ); // Check our Video Cache. If the result is younger than 1 hour, we return it. let old_items = match cache.get(&video_url) { Some((time, items)) => { @@ -180,9 +294,12 @@ impl OmgxxxProvider { } let mut items: Vec = Vec::new(); let raw_videos = html.split("videos_list_pagination").collect::>()[0] - .split(" class=\"pagination\" ").collect::>()[0] - .split("class=\"list-videos\"").collect::>()[1] - .split("class=\"item\"").collect::>()[1..] + .split(" class=\"pagination\" ") + .collect::>()[0] + .split("class=\"list-videos\"") + .collect::>()[1] + .split("class=\"item\"") + .collect::>()[1..] .to_vec(); for video_segment in &raw_videos { // let vid = video_segment.split("\n").collect::>(); @@ -190,48 +307,81 @@ impl OmgxxxProvider { // println!("Line {}: {}", index, line); // } let video_url: String = video_segment.split(">()[1] - .split("\"") - .collect::>()[0].to_string(); + .split("\"") + .collect::>()[0] + .to_string(); let mut title = video_segment.split(" title=\"").collect::>()[1] - .split("\"").collect::>()[0] + .split("\"") + .collect::>()[0] .to_string(); // html decode title = decode(title.as_bytes()).to_string().unwrap_or(title); let id = video_url.split("/").collect::>()[4].to_string(); - let thumb = match video_segment.split("img loading").collect::>()[1].contains("data-src=\"") { - true => video_segment.split("img loading").collect::>()[1].split("data-src=\"").collect::>()[1] - .split("\"").collect::>()[0] - .to_string(), - false => video_segment.split("img loading").collect::>()[1].split("data-original=\"").collect::>()[1] - .split("\"").collect::>()[0] - .to_string(), + let thumb = match video_segment.split("img loading").collect::>()[1] + .contains("data-src=\"") + { + true => video_segment.split("img loading").collect::>()[1] + .split("data-src=\"") + .collect::>()[1] + .split("\"") + .collect::>()[0] + .to_string(), + false => video_segment.split("img loading").collect::>()[1] + .split("data-original=\"") + .collect::>()[1] + .split("\"") + .collect::>()[0] + .to_string(), }; - let raw_duration = video_segment.split("").collect::>()[1] - .split("<").collect::>()[0] - .split(" ").collect::>().last().unwrap_or(&"") - .to_string(); - let duration = parse_time_to_seconds(raw_duration.as_str()).unwrap_or(0) as u32; - let views = parse_abbreviated_number(video_segment.split("
").collect::>()[1] + let raw_duration = video_segment + .split("") + .collect::>()[1] .split("<") .collect::>()[0] - .to_string().as_str()).unwrap_or(0) as u32; + .split(" ") + .collect::>() + .last() + .unwrap_or(&"") + .to_string(); + let duration = parse_time_to_seconds(raw_duration.as_str()).unwrap_or(0) as u32; + let views = parse_abbreviated_number( + video_segment + .split("
") + .collect::>()[1] + .split("<") + .collect::>()[0] + .to_string() + .as_str(), + ) + .unwrap_or(0) as u32; - let preview = video_segment.split("data-preview=\"").collect::>()[1] - .split("\"").collect::>()[0] + let preview = video_segment + .split("data-preview=\"") + .collect::>()[1] + .split("\"") + .collect::>()[0] .to_string(); - let tags = match video_segment.contains("class=\"models\">"){ - true => video_segment.split("class=\"models\">").collect::>()[1] - .split("
").collect::>()[0] - .split("href=\"").collect::>()[1..] - .into_iter().map( - |s| format!("@models:{}", s.split("/").collect::>()[4] - .to_string()) - ).collect::>().to_vec(), - false => vec![] - } - ; + let tags = match video_segment.contains("class=\"models\">") { + true => video_segment + .split("class=\"models\">") + .collect::>()[1] + .split("
") + .collect::>()[0] + .split("href=\"") + .collect::>()[1..] + .into_iter() + .map(|s| { + format!( + "@models:{}", + s.split("/").collect::>()[4].to_string() + ) + }) + .collect::>() + .to_vec(), + false => vec![], + }; let video_item = VideoItem::new( id, @@ -243,14 +393,11 @@ impl OmgxxxProvider { ) .views(views) .preview(preview) - .tags(tags) - ; + .tags(tags); items.push(video_item); } return items; } - - } #[async_trait] @@ -269,7 +416,7 @@ impl Provider for OmgxxxProvider { let _ = pool; let videos: std::result::Result, Error> = match query { Some(q) => { - self.query(cache, page.parse::().unwrap_or(1), &q,options) + self.query(cache, page.parse::().unwrap_or(1), &q, options) .await } None => { @@ -286,7 +433,10 @@ impl Provider for OmgxxxProvider { } } fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel { - println!("Getting channel for omgxxx with client version: {:?}", clientversion); + println!( + "Getting channel for omgxxx with client version: {:?}", + clientversion + ); self.build_channel(clientversion) } }