Files
hottub/src/providers/pornhd3x.rs
2026-03-30 02:34:26 +00:00

1369 lines
44 KiB
Rust

use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{
Provider, build_proxy_url, report_provider_error, report_provider_error_background,
requester_or_default, strip_url_scheme,
};
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::videos::{ServerOptions, VideoEmbed, VideoFormat, VideoItem};
use async_trait::async_trait;
use chrono::{NaiveDate, TimeZone, Utc};
use error_chain::error_chain;
use futures::stream::{self, StreamExt};
use htmlentity::entity::{ICodedDataTrait, decode};
use regex::Regex;
use scraper::{ElementRef, Html, Selector};
use serde_json::Value;
use std::sync::atomic::{AtomicU32, Ordering};
use std::sync::{Arc, RwLock};
use std::{thread, vec};
use url::Url;
use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "studio-network",
tags: &["premium", "studio", "aggregator"],
};
error_chain! {
foreign_links {
Io(std::io::Error);
Json(serde_json::Error);
}
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
}
}
const BASE_URL: &str = "https://www.pornhd3x.tv";
const CHANNEL_ID: &str = "pornhd3x";
const HOME_ARCHIVE_PATH: &str = "/premium-porn-hd";
const SOURCE_SECRET: &str = "98126avrbi6m49vd7shxkn985";
const SOURCE_COOKIE_PREFIX: &str = "826avrbi6m49vd7shxkn985m";
const SOURCE_COOKIE_SUFFIX: &str = "k06twz87wwxtp3dqiicks2df";
const RECENT_TAG_DETAIL_LIMIT: usize = 8;
const DETAIL_CONCURRENCY: usize = 4;
#[derive(Debug, Clone)]
pub struct Pornhd3xProvider {
url: String,
categories: Arc<RwLock<Vec<FilterOption>>>,
tags: Arc<RwLock<Vec<FilterOption>>>,
stars: Arc<RwLock<Vec<FilterOption>>>,
uploaders: Arc<RwLock<Vec<FilterOption>>>,
source_counter: Arc<AtomicU32>,
}
#[derive(Debug, Clone)]
enum Target {
Latest,
Search(String),
Archive(String),
}
#[derive(Debug, Clone)]
struct ListStub {
detail_url: String,
title: String,
thumb: String,
preview: Option<String>,
}
#[derive(Debug, Clone, Default)]
struct DetailMetadata {
title: Option<String>,
thumb: Option<String>,
studio: Option<(String, String)>,
categories: Vec<(String, String)>,
actors: Vec<(String, String)>,
tags: Vec<(String, String)>,
uploaded_at: Option<u64>,
episode_id: Option<String>,
}
impl Pornhd3xProvider {
pub fn new() -> Self {
let provider = Self {
url: BASE_URL.to_string(),
categories: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
tags: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
stars: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
uploaders: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
source_counter: Arc::new(AtomicU32::new(0)),
};
provider.spawn_initial_load();
provider
}
fn spawn_initial_load(&self) {
let provider = self.clone();
thread::spawn(move || {
let runtime = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(runtime) => runtime,
Err(error) => {
report_provider_error_background(
CHANNEL_ID,
"spawn_initial_load.runtime_build",
&error.to_string(),
);
return;
}
};
runtime.block_on(async move {
provider.refresh_filter_catalogs().await;
});
});
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
let categories = self
.categories
.read()
.map(|value| value.clone())
.unwrap_or_default();
let tags = self
.tags
.read()
.map(|value| value.clone())
.unwrap_or_default();
let stars = self
.stars
.read()
.map(|value| value.clone())
.unwrap_or_default();
let uploaders = self
.uploaders
.read()
.map(|value| value.clone())
.unwrap_or_default();
Channel {
id: CHANNEL_ID.to_string(),
name: "PornHD3X".to_string(),
description:
"Premium studio archive pages with studio, category, pornstar, and keyword filters."
.to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=www.pornhd3x.tv"
.to_string(),
status: "active".to_string(),
categories: categories.iter().map(|value| value.title.clone()).collect(),
options: vec![
ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description:
"Latest archive only. Extra top lists on the site currently error server-side."
.to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![FilterOption {
id: "new".to_string(),
title: "Latest".to_string(),
}],
multiSelect: false,
},
ChannelOption {
id: "sites".to_string(),
title: "Uploaders".to_string(),
description: "Browse studio archives directly.".to_string(),
systemImage: "person.crop.square".to_string(),
colorName: "purple".to_string(),
options: uploaders,
multiSelect: false,
},
ChannelOption {
id: "categories".to_string(),
title: "Categories".to_string(),
description: "Browse a category archive.".to_string(),
systemImage: "square.grid.2x2".to_string(),
colorName: "orange".to_string(),
options: categories,
multiSelect: false,
},
ChannelOption {
id: "stars".to_string(),
title: "Pornstars".to_string(),
description: "Browse a pornstar archive.".to_string(),
systemImage: "star.fill".to_string(),
colorName: "yellow".to_string(),
options: stars,
multiSelect: false,
},
ChannelOption {
id: "filter".to_string(),
title: "Tags".to_string(),
description: "Browse discovered keyword archives.".to_string(),
systemImage: "tag.fill".to_string(),
colorName: "green".to_string(),
options: tags,
multiSelect: false,
},
],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn selector(value: &str) -> Result<Selector> {
Selector::parse(value)
.map_err(|error| Error::from(format!("selector `{value}` parse failed: {error}")))
}
fn regex(value: &str) -> Result<Regex> {
Regex::new(value).map_err(|error| Error::from(format!("regex `{value}` failed: {error}")))
}
fn collapse_whitespace(text: &str) -> String {
text.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn decode_html(text: &str) -> String {
decode(text.as_bytes())
.to_string()
.unwrap_or_else(|_| text.to_string())
}
fn text_of(element: &ElementRef<'_>) -> String {
Self::decode_html(&Self::collapse_whitespace(
&element.text().collect::<Vec<_>>().join(" "),
))
}
fn titleize_slug(slug: &str) -> String {
slug.split(['-', '_'])
.filter(|part| !part.is_empty())
.map(|part| {
let mut chars = part.chars();
match chars.next() {
Some(first) => {
let mut word = first.to_uppercase().collect::<String>();
word.push_str(chars.as_str());
word
}
None => String::new(),
}
})
.collect::<Vec<_>>()
.join(" ")
}
fn normalize_title(value: &str) -> String {
value
.to_ascii_lowercase()
.replace('&', " and ")
.chars()
.map(|ch| if ch.is_ascii_alphanumeric() { ch } else { ' ' })
.collect::<String>()
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
}
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
if item.id.is_empty() || item.title.is_empty() {
return;
}
if let Ok(mut values) = target.write() {
if !values.iter().any(|value| value.id == item.id) {
values.push(item);
}
}
}
fn normalize_url(&self, raw: &str) -> String {
let value = raw.trim();
if value.is_empty() {
return String::new();
}
if value.starts_with("//") {
return format!("https:{value}");
}
if let Ok(url) = Url::parse(value) {
if let Some(host) = url.host_str() {
if host.contains("pornhd3x.tv") || host.contains("brazzers3x.") {
return format!(
"{}{}{}",
self.url,
url.path(),
url.query()
.map(|query| format!("?{query}"))
.unwrap_or_default()
);
}
}
if value.starts_with("http://") {
return value.replacen("http://", "https://", 1);
}
return value.to_string();
}
if value.starts_with('/') {
return format!("{}{}", self.url, value);
}
format!("{}/{}", self.url.trim_end_matches('/'), value)
}
fn sanitize_search_query(query: &str) -> String {
query
.chars()
.map(|ch| {
if ch.is_ascii_alphanumeric() || ch == ' ' || ch == '-' {
ch
} else {
' '
}
})
.collect::<String>()
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
}
fn build_home_url(&self, page: u32) -> String {
if page > 1 {
format!("{}{}/page-{}", self.url, HOME_ARCHIVE_PATH, page)
} else {
format!("{}{}", self.url, HOME_ARCHIVE_PATH)
}
}
fn build_search_url(&self, query: &str, page: u32) -> String {
let query = Self::sanitize_search_query(query).replace(' ', "%20");
if page > 1 {
format!("{}/search/{}/page-{}", self.url, query, page)
} else {
format!("{}/search/{}", self.url, query)
}
}
fn build_archive_page_url(base: &str, page: u32) -> String {
let base = base.trim_end_matches('/');
if page > 1 {
format!("{base}/page-{page}")
} else {
base.to_string()
}
}
fn is_allowed_archive_url(&self, value: &str) -> bool {
let normalized = self.normalize_url(value);
normalized.starts_with(&self.url)
&& [
"/studio/",
"/category/",
"/tag/",
"/pornstar/",
HOME_ARCHIVE_PATH,
]
.iter()
.any(|prefix| normalized.contains(prefix))
}
fn match_filter(options: &[FilterOption], query: &str) -> Option<String> {
let normalized_query = Self::normalize_title(query);
options
.iter()
.find(|value| {
value.id != "all" && Self::normalize_title(&value.title) == normalized_query
})
.map(|value| value.id.clone())
}
fn is_allowed_detail_url(&self, value: &str) -> bool {
let normalized = self.normalize_url(value);
let Some(url) = Url::parse(&normalized).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
(host == "www.pornhd3x.tv" || host == "pornhd3x.tv") && url.path().starts_with("/movies/")
}
fn proxied_video(
&self,
options: &ServerOptions,
detail_url: &str,
quality: Option<&str>,
) -> String {
if detail_url.is_empty() || !self.is_allowed_detail_url(detail_url) {
return detail_url.to_string();
}
let mut target = strip_url_scheme(detail_url);
if let Some(quality) = quality.map(str::trim).filter(|quality| !quality.is_empty()) {
target.push_str("/__quality__/");
target.push_str(&quality.replace(' ', "%20"));
}
build_proxy_url(options, CHANNEL_ID, &target)
}
fn filters_need_refresh(&self) -> bool {
let categories_len = self
.categories
.read()
.map(|values| values.len())
.unwrap_or_default();
let tags_len = self
.tags
.read()
.map(|values| values.len())
.unwrap_or_default();
let stars_len = self
.stars
.read()
.map(|values| values.len())
.unwrap_or_default();
let uploaders_len = self
.uploaders
.read()
.map(|values| values.len())
.unwrap_or_default();
categories_len <= 1 || tags_len <= 1 || stars_len <= 1 || uploaders_len <= 1
}
async fn refresh_filter_catalogs(&self) {
if let Err(error) = self.load_home_catalogs().await {
report_provider_error_background(
CHANNEL_ID,
"refresh_filter_catalogs.home",
&error.to_string(),
);
}
if let Err(error) = self.load_sitemap_catalogs().await {
report_provider_error_background(
CHANNEL_ID,
"refresh_filter_catalogs.sitemap",
&error.to_string(),
);
}
if let Err(error) = self.load_recent_tags().await {
report_provider_error_background(
CHANNEL_ID,
"refresh_filter_catalogs.recent_tags",
&error.to_string(),
);
}
}
async fn ensure_catalogs(&self) {
if self.filters_need_refresh() {
self.refresh_filter_catalogs().await;
}
}
async fn load_home_catalogs(&self) -> Result<()> {
let mut requester = requester_or_default(
&ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: None,
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
},
CHANNEL_ID,
"load_home_catalogs.requester",
);
let html = requester
.get(BASE_URL, Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(error.to_string()))?;
let document = Html::parse_document(&html);
let category_selector = Self::selector("#menu a.ml-item[href^=\"/category/\"]")?;
let studio_selector = Self::selector("#menu a.ml-item[href^=\"/studio/\"]")?;
for element in document.select(&category_selector) {
let Some(href) = element.value().attr("href") else {
continue;
};
let title = Self::text_of(&element);
if title.is_empty() {
continue;
}
Self::push_unique(
&self.categories,
FilterOption {
id: self.normalize_url(href),
title,
},
);
}
for element in document.select(&studio_selector) {
let Some(href) = element.value().attr("href") else {
continue;
};
let title = Self::text_of(&element);
if title.is_empty() {
continue;
}
Self::push_unique(
&self.uploaders,
FilterOption {
id: self.normalize_url(href),
title,
},
);
}
Ok(())
}
async fn load_sitemap_catalogs(&self) -> Result<()> {
let mut requester = requester_or_default(
&ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: None,
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
},
CHANNEL_ID,
"load_sitemap_catalogs.requester",
);
let xml = requester
.get(&format!("{}/sitemap.xml", self.url), Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(error.to_string()))?;
let loc_regex = Self::regex(r"<loc>([^<]+)</loc>")?;
for captures in loc_regex.captures_iter(&xml) {
let Some(raw_url) = captures.get(1).map(|value| value.as_str()) else {
continue;
};
let normalized = self.normalize_url(raw_url);
let Some(parsed) = Url::parse(&normalized).ok() else {
continue;
};
let path = parsed.path().trim_end_matches('/');
if let Some(slug) = path.strip_prefix("/studio/") {
Self::push_unique(
&self.uploaders,
FilterOption {
id: normalized.clone(),
title: Self::titleize_slug(slug),
},
);
} else if let Some(slug) = path.strip_prefix("/pornstar/") {
Self::push_unique(
&self.stars,
FilterOption {
id: normalized.clone(),
title: Self::titleize_slug(slug),
},
);
}
}
Ok(())
}
async fn load_recent_tags(&self) -> Result<()> {
let mut requester = requester_or_default(
&ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: None,
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
},
CHANNEL_ID,
"load_recent_tags.requester",
);
let html = requester
.get(&self.build_home_url(1), Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(error.to_string()))?;
let stubs = self.parse_list_items(&html)?;
for stub in stubs.into_iter().take(RECENT_TAG_DETAIL_LIMIT) {
let detail_html = requester
.get(&stub.detail_url, Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(error.to_string()))?;
if let Ok(metadata) = self.parse_detail_metadata(&detail_html) {
self.store_detail_filters(&metadata);
}
}
Ok(())
}
fn resolve_option_target(&self, options: &ServerOptions) -> Target {
if let Some(value) = options.sites.as_deref() {
if value != "all" && self.is_allowed_archive_url(value) {
return Target::Archive(self.normalize_url(value));
}
}
if let Some(value) = options.stars.as_deref() {
if value != "all" && self.is_allowed_archive_url(value) {
return Target::Archive(self.normalize_url(value));
}
}
if let Some(value) = options.filter.as_deref() {
if value != "all" && self.is_allowed_archive_url(value) {
return Target::Archive(self.normalize_url(value));
}
}
if let Some(value) = options.categories.as_deref() {
if value != "all" && self.is_allowed_archive_url(value) {
return Target::Archive(self.normalize_url(value));
}
}
Target::Latest
}
fn resolve_query_target(&self, query: &str) -> Target {
if let Ok(uploaders) = self.uploaders.read() {
if let Some(target) = Self::match_filter(&uploaders, query) {
return Target::Archive(target);
}
}
if let Ok(stars) = self.stars.read() {
if let Some(target) = Self::match_filter(&stars, query) {
return Target::Archive(target);
}
}
if let Ok(tags) = self.tags.read() {
if let Some(target) = Self::match_filter(&tags, query) {
return Target::Archive(target);
}
}
if let Ok(categories) = self.categories.read() {
if let Some(target) = Self::match_filter(&categories, query) {
return Target::Archive(target);
}
}
Target::Search(query.to_string())
}
fn target_url(&self, target: &Target, page: u32) -> String {
match target {
Target::Latest => self.build_home_url(page),
Target::Search(query) => self.build_search_url(query, page),
Target::Archive(url) => Self::build_archive_page_url(url, page),
}
}
fn parse_list_items(&self, html: &str) -> Result<Vec<ListStub>> {
let document = Html::parse_document(html);
let item_selector = Self::selector(".movies-list .ml-item")?;
let anchor_selector = Self::selector("a.ml-mask[href]")?;
let title_selector = Self::selector(".mli-info h2")?;
let img_selector = Self::selector("img[data-original], img[src]")?;
let preview_holder_selector = Self::selector(".thumb__img[data-preview]")?;
let preview_icon_selector =
Self::selector("span.player_icon[str], span.player_icon1[str]")?;
let mut items = Vec::new();
for element in document.select(&item_selector) {
let Some(anchor) = element.select(&anchor_selector).next() else {
continue;
};
let Some(href) = anchor.value().attr("href") else {
continue;
};
let detail_url = self.normalize_url(href);
if detail_url.is_empty() || !detail_url.contains("/movies/") {
continue;
}
let title = element
.select(&title_selector)
.next()
.map(|value| Self::text_of(&value))
.filter(|value| !value.is_empty())
.or_else(|| anchor.value().attr("title").map(Self::decode_html))
.unwrap_or_default();
if title.is_empty() {
continue;
}
let thumb = element
.select(&img_selector)
.next()
.and_then(|value| {
value
.value()
.attr("data-original")
.or_else(|| value.value().attr("src"))
})
.map(|value| self.normalize_url(value))
.unwrap_or_default();
let preview = element
.select(&preview_holder_selector)
.next()
.and_then(|value| value.value().attr("data-preview"))
.or_else(|| {
element
.select(&preview_icon_selector)
.next()
.and_then(|value| value.value().attr("str"))
})
.map(|value| self.normalize_url(value))
.filter(|value| !value.is_empty());
if detail_url
.trim_end_matches('/')
.rsplit('/')
.next()
.unwrap_or_default()
.is_empty()
{
continue;
}
items.push(ListStub {
detail_url,
title,
thumb,
preview,
});
}
Ok(items)
}
fn parse_uploaded_at(title: &str) -> Option<u64> {
let patterns = [
r"(?P<day>\d{1,2})\.(?P<month>\d{1,2})\.(?P<year>\d{4})",
r"(?P<day>\d{1,2})-(?P<month>\d{1,2})-(?P<year>\d{4})",
r"\((?P<day>\d{1,2})-(?P<month>\d{1,2})-(?P<year>\d{4})\)",
];
for pattern in patterns {
let Ok(regex) = Regex::new(pattern) else {
continue;
};
let Some(captures) = regex.captures(title) else {
continue;
};
let day = captures.name("day")?.as_str().parse::<u32>().ok()?;
let month = captures.name("month")?.as_str().parse::<u32>().ok()?;
let year = captures.name("year")?.as_str().parse::<i32>().ok()?;
let date = NaiveDate::from_ymd_opt(year, month, day)?;
let datetime = Utc.from_utc_datetime(&date.and_hms_opt(0, 0, 0)?);
return Some(datetime.timestamp() as u64);
}
None
}
fn parse_detail_metadata(&self, html: &str) -> Result<DetailMetadata> {
let document = Html::parse_document(html);
let title_selector = Self::selector(".mvic-desc h3")?;
let og_image_selector = Self::selector("meta[property=\"og:image\"]")?;
let studio_selector = Self::selector("#bread a[href*=\"/studio/\"]")?;
let category_selector = Self::selector(".mvici-left a[href*=\"/category/\"]")?;
let actor_selector = Self::selector(".mvici-left a[href*=\"/pornstar/\"]")?;
let tag_selector = Self::selector("#mv-keywords a[href*=\"/tag/\"]")?;
let episode_selector = Self::selector("#uuid, a.btn-eps[episode-id]")?;
let title = document
.select(&title_selector)
.next()
.map(|value| Self::text_of(&value))
.filter(|value| !value.is_empty());
let thumb = document
.select(&og_image_selector)
.next()
.and_then(|value| value.value().attr("content"))
.map(|value| self.normalize_url(value))
.filter(|value| !value.is_empty());
let studio = document.select(&studio_selector).next().and_then(|value| {
let href = value.value().attr("href")?;
let title = Self::text_of(&value);
(!title.is_empty()).then_some((self.normalize_url(href), title))
});
let categories = document
.select(&category_selector)
.filter_map(|value| {
let href = value.value().attr("href")?;
let title = Self::text_of(&value);
(!title.is_empty()).then_some((self.normalize_url(href), title))
})
.collect::<Vec<_>>();
let actors = document
.select(&actor_selector)
.filter_map(|value| {
let href = value.value().attr("href")?;
let title = Self::text_of(&value);
(!title.is_empty()).then_some((self.normalize_url(href), title))
})
.collect::<Vec<_>>();
let tags = document
.select(&tag_selector)
.filter_map(|value| {
let href = value.value().attr("href")?;
let title = Self::text_of(&value);
(!title.is_empty()).then_some((self.normalize_url(href), title))
})
.collect::<Vec<_>>();
let episode_id = document
.select(&episode_selector)
.find_map(|value| {
value
.value()
.attr("value")
.or_else(|| value.value().attr("episode-id"))
})
.map(ToOwned::to_owned);
Ok(DetailMetadata {
uploaded_at: title.as_deref().and_then(Self::parse_uploaded_at),
title,
thumb,
studio,
categories,
actors,
tags,
episode_id,
})
}
fn store_detail_filters(&self, metadata: &DetailMetadata) {
if let Some((url, title)) = &metadata.studio {
Self::push_unique(
&self.uploaders,
FilterOption {
id: url.clone(),
title: title.clone(),
},
);
}
for (url, title) in &metadata.categories {
Self::push_unique(
&self.categories,
FilterOption {
id: url.clone(),
title: title.clone(),
},
);
}
for (url, title) in &metadata.actors {
Self::push_unique(
&self.stars,
FilterOption {
id: url.clone(),
title: title.clone(),
},
);
}
for (url, title) in &metadata.tags {
Self::push_unique(
&self.tags,
FilterOption {
id: url.clone(),
title: title.clone(),
},
);
}
}
fn build_source_cookie_name(episode_id: &str) -> String {
format!("{SOURCE_COOKIE_PREFIX}{episode_id}{SOURCE_COOKIE_SUFFIX}")
}
fn build_source_hash(episode_id: &str, nonce: &str) -> String {
format!(
"{:x}",
md5::compute(format!("{episode_id}{nonce}{SOURCE_SECRET}"))
)
}
fn next_source_request(&self) -> (u32, String) {
let count = self.source_counter.fetch_add(1, Ordering::Relaxed) + 1;
let nonce = format!("{:06x}", count % 0xFF_FFFF);
(count, nonce)
}
async fn fetch_sources(
&self,
requester: &mut crate::util::requester::Requester,
referer: &str,
episode_id: &str,
) -> Result<Value> {
let (count, nonce) = self.next_source_request();
let source_url = format!(
"{}/ajax/get_sources/{}/{hash}?count={count}&mobile=true",
self.url,
episode_id,
hash = Self::build_source_hash(episode_id, &nonce),
count = count,
);
let existing_cookie = requester.cookie_header_for_url(&source_url);
let cookie_name = Self::build_source_cookie_name(episode_id);
let cookie_value = format!("{cookie_name}={nonce}");
let combined_cookie = match existing_cookie {
Some(existing) if !existing.trim().is_empty() => format!("{existing}; {cookie_value}"),
_ => cookie_value,
};
let response = requester
.get_with_headers(
&source_url,
vec![
("Cookie".to_string(), combined_cookie),
("Referer".to_string(), referer.to_string()),
("X-Requested-With".to_string(), "XMLHttpRequest".to_string()),
(
"Accept".to_string(),
"application/json, text/javascript, */*; q=0.01".to_string(),
),
],
Some(Version::HTTP_11),
)
.await
.map_err(|error| Error::from(error.to_string()))?;
if response.trim().is_empty() {
return Err(Error::from("source payload empty"));
}
Ok(serde_json::from_str::<Value>(&response)?)
}
fn build_formats(
&self,
value: &Value,
options: &ServerOptions,
detail_url: &str,
) -> Vec<VideoFormat> {
let mut formats = Vec::new();
for playlist in value
.get("playlist")
.and_then(|playlist| playlist.as_array())
.into_iter()
.flatten()
{
for source in playlist
.get("sources")
.and_then(|sources| sources.as_array())
.into_iter()
.flatten()
{
let Some(file) = source.get("file").and_then(|file| file.as_str()) else {
continue;
};
let url = self.normalize_url(file);
if url.is_empty() {
continue;
}
let quality = source
.get("label")
.and_then(|label| label.as_str())
.unwrap_or("HLS")
.to_string();
let format_name = if url.contains(".m3u8") { "hls" } else { "mp4" };
let format_url = self.proxied_video(options, detail_url, Some(&quality));
let format = VideoFormat::new(format_url, quality.clone(), format_name.to_string())
.format_id(quality.to_ascii_lowercase())
.format_note(quality);
formats.push(format);
}
}
formats
}
async fn enrich_stub(
&self,
stub: ListStub,
options: &ServerOptions,
) -> Result<Option<VideoItem>> {
let mut requester = requester_or_default(options, CHANNEL_ID, "enrich_stub.requester");
let detail_html = requester
.get(&stub.detail_url, Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(error.to_string()))?;
let metadata = self.parse_detail_metadata(&detail_html)?;
self.store_detail_filters(&metadata);
let Some(episode_id) = metadata.episode_id.clone() else {
return Ok(None);
};
let source_payload = match self
.fetch_sources(&mut requester, &stub.detail_url, &episode_id)
.await
{
Ok(payload) => payload,
Err(error) => {
report_provider_error_background(
CHANNEL_ID,
"enrich_stub.fetch_sources",
&format!("detail_url={}; error={}", stub.detail_url, error),
);
return Ok(None);
}
};
let mut formats = self.build_formats(&source_payload, options, &stub.detail_url);
let proxied_url = self.proxied_video(options, &stub.detail_url, None);
let direct_url = if !proxied_url.is_empty() {
Some(proxied_url)
} else {
formats
.first()
.map(|format| format.url.clone())
.or_else(|| {
source_payload
.get("embed_url")
.or_else(|| source_payload.get("embedUrl"))
.and_then(|value| value.as_str())
.map(|value| self.normalize_url(value))
})
};
let Some(url) = direct_url else {
return Ok(None);
};
let title = metadata
.title
.clone()
.filter(|value| !value.is_empty())
.unwrap_or(stub.title.clone());
let thumb = metadata
.thumb
.clone()
.filter(|value| !value.is_empty())
.unwrap_or(stub.thumb.clone());
let mut item = VideoItem::new(
episode_id.clone(),
title.clone(),
url.clone(),
CHANNEL_ID.to_string(),
thumb,
0,
);
item.preview = stub.preview.clone();
if let Some((uploader_url, uploader)) = &metadata.studio {
item.uploader = Some(uploader.clone());
item.uploaderUrl = Some(uploader_url.clone());
}
let mut item_tags = metadata
.tags
.iter()
.map(|(_, title)| title.clone())
.collect::<Vec<_>>();
item_tags.extend(metadata.categories.iter().map(|(_, title)| title.clone()));
item_tags.extend(metadata.actors.iter().map(|(_, title)| title.clone()));
item_tags.sort();
item_tags.dedup();
if !item_tags.is_empty() {
item.tags = Some(item_tags);
}
item.uploadedAt = metadata.uploaded_at;
if !formats.is_empty() {
item.formats = Some(std::mem::take(&mut formats));
}
if source_payload
.get("embed")
.and_then(|value| value.as_bool())
.unwrap_or(false)
{
if let Some(embed_url) = source_payload
.get("embed_url")
.or_else(|| source_payload.get("embedUrl"))
.and_then(|value| value.as_str())
{
item.embed = Some(VideoEmbed {
html: format!(
"<iframe src=\"{}\" width=\"100%\" height=\"500\" frameborder=\"0\" allowfullscreen></iframe>",
self.normalize_url(embed_url)
),
source: self.normalize_url(embed_url),
});
}
}
Ok(Some(item))
}
async fn fetch_items_for_target(
&self,
cache: VideoCache,
target: Target,
page: u32,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let request_url = self.target_url(&target, page);
let old_items = match cache.get(&request_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 30 {
return Ok(items.clone());
}
items.clone()
}
None => vec![],
};
let mut requester =
requester_or_default(&options, CHANNEL_ID, "fetch_items_for_target.list");
let html = match requester.get(&request_url, Some(Version::HTTP_11)).await {
Ok(html) => html,
Err(error) => {
report_provider_error(
CHANNEL_ID,
"fetch_items_for_target.list",
&format!("url={request_url}; error={error}"),
)
.await;
return Ok(old_items);
}
};
let stubs = match self.parse_list_items(&html) {
Ok(stubs) => stubs,
Err(error) => {
report_provider_error(
CHANNEL_ID,
"fetch_items_for_target.parse_list",
&format!("url={request_url}; error={error}"),
)
.await;
return Ok(old_items);
}
};
let videos = stream::iter(stubs.into_iter().map(|stub| {
let options = options.clone();
async move { self.enrich_stub(stub, &options).await.ok().flatten() }
}))
.buffer_unordered(DETAIL_CONCURRENCY)
.filter_map(async move |value| value)
.collect::<Vec<_>>()
.await;
if !videos.is_empty() {
cache.remove(&request_url);
cache.insert(request_url, videos.clone());
return Ok(videos);
}
Ok(old_items)
}
}
#[async_trait]
impl Provider for Pornhd3xProvider {
async fn get_videos(
&self,
cache: VideoCache,
_pool: DbPool,
_sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
self.ensure_catalogs().await;
let page = page
.parse::<u32>()
.ok()
.filter(|value| *value > 0)
.unwrap_or(1);
let target = match query
.as_deref()
.map(str::trim)
.filter(|value| !value.is_empty())
{
Some(query) => self.resolve_query_target(query),
None => self.resolve_option_target(&options),
};
match self
.fetch_items_for_target(cache, target, page, options)
.await
{
Ok(items) => items,
Err(error) => {
report_provider_error_background(CHANNEL_ID, "get_videos", &error.to_string());
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}
#[cfg(test)]
mod tests {
use super::*;
impl Pornhd3xProvider {
fn new_for_tests() -> Self {
Self {
url: BASE_URL.to_string(),
categories: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
tags: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
stars: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
uploaders: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
source_counter: Arc::new(AtomicU32::new(0)),
}
}
}
#[test]
fn builds_search_urls_like_live_site() {
let provider = Pornhd3xProvider::new_for_tests();
assert_eq!(
provider.build_search_url("Brazzers", 1),
"https://www.pornhd3x.tv/search/Brazzers"
);
assert_eq!(
provider.build_search_url("big tits", 3),
"https://www.pornhd3x.tv/search/big%20tits/page-3"
);
}
#[test]
fn builds_source_cookie_name_and_hash() {
assert_eq!(
Pornhd3xProvider::build_source_cookie_name("49Q27JL3HCPVNJQN"),
"826avrbi6m49vd7shxkn985m49Q27JL3HCPVNJQNk06twz87wwxtp3dqiicks2df"
);
assert_eq!(
Pornhd3xProvider::build_source_hash("49Q27JL3HCPVNJQN", "abcdef"),
"8846c87b6e67760c42094713ec6f278a"
);
}
#[test]
fn parses_known_dates() {
assert!(Pornhd3xProvider::parse_uploaded_at(
"Brazzers / - Ryan Reid, Kayley Gunner, Mick Blue Surprise Dick For Their Anniversary / 22.3.2026"
)
.is_some());
assert!(
Pornhd3xProvider::parse_uploaded_at(
"New Dana Vespoli Kimmy Kimm A Brand Nude Incentive (22-03-2026)"
)
.is_some()
);
}
#[test]
fn builds_proxied_video_urls() {
let provider = Pornhd3xProvider::new_for_tests();
let options = ServerOptions {
featured: None,
category: None,
sites: None,
filter: None,
language: None,
public_url_base: Some("https://example.com".to_string()),
requester: None,
network: None,
stars: None,
categories: None,
duration: None,
sort: None,
sexuality: None,
};
assert_eq!(
provider.proxied_video(
&options,
"https://www.pornhd3x.tv/movies/example-video",
None,
),
"https://example.com/proxy/pornhd3x/www.pornhd3x.tv/movies/example-video"
);
assert_eq!(
provider.proxied_video(
&options,
"https://www.pornhd3x.tv/movies/example-video",
Some("720p"),
),
"https://example.com/proxy/pornhd3x/www.pornhd3x.tv/movies/example-video/__quality__/720p"
);
}
}