tnaflix upgrade

This commit is contained in:
Simon
2026-02-08 14:26:18 +00:00
parent bf622d95a6
commit 7b1bb758e3

View File

@@ -3,14 +3,13 @@ use crate::api::ClientVersion;
use crate::providers::Provider;
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::discord::{format_error_chain, send_discord_error_report};
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
// use std::sync::{Arc, RwLock};
// use std::thread;
use std::vec;
error_chain! {
@@ -23,201 +22,16 @@ error_chain! {
#[derive(Debug, Clone)]
pub struct TnaflixProvider {
url: String,
// sites: Arc<RwLock<Vec<FilterOption>>>,
// categories: Arc<RwLock<Vec<FilterOption>>>,
// stars: Arc<RwLock<Vec<FilterOption>>>,
}
impl TnaflixProvider {
pub fn new() -> Self {
let provider = TnaflixProvider {
TnaflixProvider {
url: "https://www.tnaflix.com".to_string(),
// sites: Arc::new(RwLock::new(vec![FilterOption {
// id: "all".to_string(),
// title: "All".to_string(),
// }])),
// categories: Arc::new(RwLock::new(vec![FilterOption {
// id: "all".to_string(),
// title: "All".to_string(),
// }])),
// stars: Arc::new(RwLock::new(vec![FilterOption {
// id: "all".to_string(),
// title: "All".to_string(),
// }])),
};
// Kick off the background load but return immediately
// provider.spawn_initial_load();
provider
}
}
// fn spawn_initial_load(&self) {
// let url = self.url.clone();
// let sites = Arc::clone(&self.sites);
// let categories = Arc::clone(&self.categories);
// let stars = Arc::clone(&self.stars);
// thread::spawn(move || {
// // Create a tiny runtime just for these async tasks
// let rt = tokio::runtime::Builder::new_current_thread()
// .enable_all()
// .build()
// .expect("build tokio runtime");
// rt.block_on(async move {
// // If you have a streaming sites loader, call it here too
// if let Err(e) = Self::load_sites(&url, sites).await {
// eprintln!("load_sites_into failed: {e}");
// }
// if let Err(e) = Self::load_categories(&url, categories).await {
// eprintln!("load_categories failed: {e}");
// }
// if let Err(e) = Self::load_stars(&url, stars).await {
// eprintln!("load_stars failed: {e}");
// }
// });
// });
// }
// async fn load_stars(base_url: &str, stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
// let mut requester = util::requester::Requester::new();
// for page in [1..10].into_iter().flatten() {
// let text = requester
// .get(format!("{}/pornstars?page={}", &base_url, page).as_str())
// .await
// .unwrap();
// if text.contains("404 Not Found") || text.is_empty() {
// break;
// }
// let stars_div = text
// .split("<span>Hall of Fame Pornstars</span>")
// .collect::<Vec<&str>>()[1]
// .split("pagination")
// .collect::<Vec<&str>>()[0];
// for stars_element in stars_div.split("<a ").collect::<Vec<&str>>()[1..].to_vec() {
// let star_url = stars_element.split("href=\"").collect::<Vec<&str>>()[1]
// .split("\"")
// .collect::<Vec<&str>>()[0];
// let star_id = star_url.split("/").collect::<Vec<&str>>()[4].to_string();
// let star_name = stars_element.split("title=\"").collect::<Vec<&str>>()[1]
// .split("\"")
// .collect::<Vec<&str>>()[0]
// .to_string();
// Self::push_unique(
// &stars,
// FilterOption {
// id: star_id,
// title: star_name,
// },
// );
// }
// }
// return Ok(());
// }
// async fn load_sites(base_url: &str, sites: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
// let mut requester = util::requester::Requester::new();
// let mut page = 0;
// loop {
// page += 1;
// let text = requester
// .get(format!("{}/sites/{}/", &base_url, page).as_str())
// .await
// .unwrap();
// if text.contains("404 Not Found") || text.is_empty() {
// break;
// }
// let sites_div = text
// .split("id=\"list_content_sources_sponsors_list_items\"")
// .collect::<Vec<&str>>()[1]
// .split("class=\"pagination\"")
// .collect::<Vec<&str>>()[0];
// for sites_element in
// sites_div.split("class=\"headline\"").collect::<Vec<&str>>()[1..].to_vec()
// {
// let site_url = sites_element.split("href=\"").collect::<Vec<&str>>()[1]
// .split("\"")
// .collect::<Vec<&str>>()[0];
// let site_id = site_url.split("/").collect::<Vec<&str>>()[4].to_string();
// let site_name = sites_element.split("<h2>").collect::<Vec<&str>>()[1]
// .split("<")
// .collect::<Vec<&str>>()[0]
// .to_string();
// Self::push_unique(
// &sites,
// FilterOption {
// id: site_id,
// title: site_name,
// },
// );
// }
// }
// return Ok(());
// }
// async fn load_networks(base_url: &str, networks: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
// let mut requester = util::requester::Requester::new();
// let text = requester.get(&base_url).await.unwrap();
// let networks_div = text.split("class=\"sites__list\"").collect::<Vec<&str>>()[1]
// .split("</div>")
// .collect::<Vec<&str>>()[0];
// for network_element in
// networks_div.split("sites__item").collect::<Vec<&str>>()[1..].to_vec()
// {
// if network_element.contains("sites__all") {
// continue;
// }
// let network_url = network_element.split("href=\"").collect::<Vec<&str>>()[1]
// .split("\"")
// .collect::<Vec<&str>>()[0];
// let network_id = network_url.split("/").collect::<Vec<&str>>()[4].to_string();
// let network_name = network_element.split(">").collect::<Vec<&str>>()[1]
// .split("<")
// .collect::<Vec<&str>>()[0]
// .to_string();
// Self::push_unique(
// &networks,
// FilterOption {
// id: network_id,
// title: network_name,
// },
// );
// }
// return Ok(());
// }
// Push one item with minimal lock time and dedup by id
// fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
// if let Ok(mut vec) = target.write() {
// if !vec.iter().any(|x| x.id == item.id) {
// vec.push(item);
// // Optional: keep it sorted for nicer UX
// // vec.sort_by(|a,b| a.title.cmp(&b.title));
// }
// }
// }
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
let _ = clientversion;
// let sites: Vec<FilterOption> = self
// .sites
// .read()
// .map(|g| g.clone()) // or: .map(|g| g.to_vec())
// .unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
// let networks: Vec<FilterOption> = self
// .networks
// .read()
// .map(|g| g.clone()) // or: .map(|g| g.to_vec())
// .unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
// let stars: Vec<FilterOption> = self
// .stars
// .read()
// .map(|g| g.clone()) // or: .map(|g| g.to_vec())
// .unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: "tnaflix".to_string(),
name: "TnAflix".to_string(),
@@ -279,33 +93,6 @@ impl TnaflixProvider {
],
multiSelect: false,
},
// ChannelOption {
// id: "sites".to_string(),
// title: "Sites".to_string(),
// description: "Filter for different Sites".to_string(),
// systemImage: "rectangle.stack".to_string(),
// colorName: "green".to_string(),
// options: sites,
// multiSelect: false,
// },
// ChannelOption {
// id: "networks".to_string(),
// title: "Networks".to_string(),
// description: "Filter for different Networks".to_string(),
// systemImage: "list.dash".to_string(),
// colorName: "purple".to_string(),
// options: networks,
// multiSelect: false,
// },
// ChannelOption {
// id: "stars".to_string(),
// title: "Stars".to_string(),
// description: "Filter for different Pornstars".to_string(),
// systemImage: "star.fill".to_string(),
// colorName: "yellow".to_string(),
// options: stars,
// multiSelect: false,
// },
],
nsfw: true,
cacheDuration: None,
@@ -319,73 +106,37 @@ impl TnaflixProvider {
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let sort_string: String = match sort {
"featured" => "featured".to_string(),
"toprated" => "toprated".to_string(),
_ => "new".to_string(),
let sort_string = match sort {
"featured" => "featured",
"toprated" => "toprated",
_ => "new",
};
let duration_string: String = match options.duration.unwrap_or("all".to_string()).as_str() {
"short" => "short".to_string(),
"medium" => "medium".to_string(),
"long" => "long".to_string(),
"full" => "full".to_string(),
_ => "all".to_string(),
};
// if options.network.is_some()
// && !options.network.as_ref().unwrap().is_empty()
// && options.network.as_ref().unwrap() != "all"
// {
// sort_string = format!(
// "networks/{}{}",
// options.network.as_ref().unwrap(),
// alt_sort_string
// );
// }
// if options.sites.is_some()
// && !options.sites.as_ref().unwrap().is_empty()
// && options.sites.as_ref().unwrap() != "all"
// {
// sort_string = format!(
// "sites/{}{}",
// options.sites.as_ref().unwrap(),
// alt_sort_string
// );
// }
// if options.stars.is_some()
// && !options.stars.as_ref().unwrap().is_empty()
// && options.stars.as_ref().unwrap() != "all"
// {
// sort_string = format!(
// "models/{}{}",
// options.stars.as_ref().unwrap(),
// alt_sort_string
// );
// }
let duration_string = options
.duration
.clone()
.unwrap_or_else(|| "all".to_string());
let video_url = format!(
"{}/{}/{}?d={}",
self.url, sort_string, page, duration_string
);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
// Cache Logic
if let Some((time, items)) = cache.get(&video_url) {
if time.elapsed().unwrap_or_default().as_secs() < 300 {
return Ok(items.clone());
}
}
let mut requester = options.requester.clone().ok_or("Requester missing")?;
let text = requester
.get(&video_url, None)
.await
.map_err(|e| format!("{}", e))?;
let video_items = self.get_video_items_from_html(text);
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
cache.insert(video_url, video_items.clone());
}
Ok(video_items)
}
@@ -397,128 +148,138 @@ impl TnaflixProvider {
query: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let search_string = query.to_string().to_lowercase().trim().replace(" ", "+");
let duration_string: String = match options.duration.unwrap_or("all".to_string()).as_str() {
"short" => "short".to_string(),
"medium" => "medium".to_string(),
"long" => "long".to_string(),
"full" => "full".to_string(),
_ => "all".to_string(),
};
let search_string = query.to_lowercase().trim().replace(" ", "+");
let duration_string = options
.duration
.clone()
.unwrap_or_else(|| "all".to_string());
let video_url = format!(
"{}/search?what={}&d={}&page={}",
self.url, search_string, duration_string, page
);
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone());
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
if let Some((time, items)) = cache.get(&video_url) {
if time.elapsed().unwrap_or_default().as_secs() < 300 {
return Ok(items.clone());
}
}
let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
let mut requester = options.requester.clone().ok_or("Requester missing")?;
let text = requester
.get(&video_url, None)
.await
.map_err(|e| format!("{}", e))?;
let video_items = self.get_video_items_from_html(text);
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
cache.insert(video_url, video_items.clone());
}
Ok(video_items)
}
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.is_empty() {
println!("HTML is empty");
return vec![];
}
let mut items: Vec<VideoItem> = Vec::new();
let raw_videos = html.split("pagination ").collect::<Vec<&str>>()[0]
.split("row video-list")
.collect::<Vec<&str>>()[1]
let mut items = Vec::new();
// Safe helper for splitting
let get_part = |input: &str, sep: &str, idx: usize| -> Option<String> {
input.split(sep).nth(idx).map(|s| s.to_string())
};
// Navigate to the video list container safely
let list_part = match html.split("row video-list").nth(1) {
Some(p) => match p.split("pagination ").next() {
Some(inner) => inner,
None => return vec![],
},
None => return vec![],
};
let raw_videos: Vec<&str> = list_part
.split("col-xs-6 col-md-4 col-xl-3 mb-3")
.collect::<Vec<&str>>()[1..]
.to_vec();
for video_segment in &raw_videos {
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
// for (index, line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line);
// }
let video_url: String = video_segment.split(" href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let mut title = video_segment
.split("class=\"video-title text-break\">")
.collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.trim()
.to_string();
// html decode
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let id = video_url.split("/").collect::<Vec<&str>>()[5].to_string();
.skip(1)
.collect();
let thumb = match video_segment.contains("data-src=\""){
true => video_segment.split("data-src=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string(),
false => video_segment.split("<img src=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string(),
};
let raw_duration = video_segment
.split("thumb-icon video-duration\">")
.collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string();
let duration = parse_time_to_seconds(raw_duration.as_str()).unwrap_or(0) as u32;
let views = match video_segment.contains("icon-eye\"></i>") {
true => parse_abbreviated_number(
video_segment
.split("icon-eye\"></i>")
.collect::<Vec<&str>>()[1]
for (idx, segment) in raw_videos.iter().enumerate() {
let item: Option<VideoItem> = (|| {
let video_url = get_part(segment, " href=\"", 1)?
.split("\"")
.next()?
.to_string();
let mut title = get_part(segment, "class=\"video-title text-break\">", 1)?
.split("<")
.next()?
.trim()
.to_string();
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let id = video_url.split("/").nth(5)?.to_string();
let thumb = if segment.contains("data-src=\"") {
get_part(segment, "data-src=\"", 1)?
.split("\"")
.next()?
.to_string()
} else {
get_part(segment, "<img src=\"", 1)?
.split("\"")
.next()?
.to_string()
};
let raw_duration = get_part(segment, "thumb-icon video-duration\">", 1)?
.split("<")
.next()?
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let views = if segment.contains("icon-eye\"></i>") {
let v_str = get_part(segment, "icon-eye\"></i>", 1)?
.split("<")
.collect::<Vec<&str>>()[0]
.trim(),
.next()?
.trim()
.to_string();
parse_abbreviated_number(&v_str).unwrap_or(0) as u32
} else {
0
};
let preview = get_part(segment, "data-trailer=\"", 1)?
.split("\"")
.next()?
.to_string();
Some(
VideoItem::new(id, title, video_url, "tnaflix".to_string(), thumb, duration)
.views(views)
.preview(preview),
)
})();
if let Some(v) = item {
items.push(v);
} else {
eprintln!("Tnaflix: Failed to parse item index {}", idx);
tokio::spawn(async move {
let _ = send_discord_error_report(
format!("Tnaflix Parse Error at index {}", idx),
None,
Some("Tnaflix Provider"),
None,
file!(),
line!(),
module_path!(),
)
.unwrap_or(0) as u32,
false => 0,
};
let preview = video_segment
.split("data-trailer=\"")
.collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let video_item = VideoItem::new(
id,
title,
video_url.to_string(),
"tnaflix".to_string(),
thumb,
duration,
)
.views(views)
.preview(preview);
items.push(video_item);
.await;
});
}
}
return items;
items
}
}
@@ -527,34 +288,59 @@ impl Provider for TnaflixProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = per_page;
let _ = pool;
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => {
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, options)
.await
}
None => {
self.get(cache, page.parse::<u8>().unwrap_or(1), &sort, options)
.await
}
let page_num = page.parse::<u8>().unwrap_or(1);
let result = match query {
Some(q) => self.query(cache, page_num, &q, options).await,
None => self.get(cache, page_num, &sort, options).await,
};
match videos {
match result {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
eprintln!("Tnaflix Error: {}", e);
// 1. Create a collection of owned data so we don't hold references to `e`
let mut error_reports = Vec::new();
// Iterating through the error chain to collect data into owned Strings
for cause in e.iter().skip(1) {
error_reports.push((
cause.to_string(), // Title
format_error_chain(cause), // Description/Chain
format!("caused by: {}", cause), // Message
));
}
// 2. Now that we aren't holding any `&dyn StdError`, we can safely .await
for (title, chain_str, msg) in error_reports {
let _ = send_discord_error_report(
title,
Some(chain_str),
Some("Pornzog Provider"),
Some(&msg),
file!(),
line!(),
module_path!(),
)
.await;
}
// In a real app, you'd extract owned strings here
// and await your discord reporter as we did for Pornzog
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}