This commit is contained in:
Simon
2026-04-05 21:27:47 +00:00
parent 004399ecbe
commit 8d39b3a36f
6 changed files with 360 additions and 2 deletions

171
src/proxies/heavyfetish.rs Normal file
View File

@@ -0,0 +1,171 @@
use std::collections::HashMap;
use ntex::web;
use regex::Regex;
use scraper::{Html, Selector};
use url::Url;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct HeavyfetishProxy {}
impl HeavyfetishProxy {
pub fn new() -> Self {
Self {}
}
fn normalize_detail_url(endpoint: &str) -> Option<String> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
return None;
}
let detail_url = if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint.to_string()
} else {
format!("https://{}", endpoint.trim_start_matches('/'))
};
Self::is_allowed_detail_url(&detail_url).then_some(detail_url)
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(parsed) = Url::parse(url).ok() else {
return false;
};
if parsed.scheme() != "https" {
return false;
}
let Some(host) = parsed.host_str() else {
return false;
};
(host == "heavyfetish.com" || host == "www.heavyfetish.com")
&& parsed.path().starts_with("/videos/")
}
fn normalize_url(raw: &str) -> String {
let value = raw.trim().replace("\\/", "/");
if value.is_empty() {
return String::new();
}
if value.starts_with("//") {
return format!("https:{value}");
}
if value.starts_with('/') {
return format!("https://heavyfetish.com{value}");
}
if value.starts_with("http://") {
return value.replacen("http://", "https://", 1);
}
value
}
fn quality_from_url(url: &str) -> String {
for quality in ["2160p", "1440p", "1080p", "720p", "480p", "360p", "240p"] {
if url.contains(quality) {
return quality.to_string();
}
}
"480p".to_string()
}
fn quality_score(label: &str) -> u32 {
label
.chars()
.filter(|value| value.is_ascii_digit())
.collect::<String>()
.parse::<u32>()
.unwrap_or(0)
}
fn regex(value: &str) -> Option<Regex> {
Regex::new(value).ok()
}
fn extract_js_value(block: &str, regex: &Regex) -> Option<String> {
regex
.captures(block)
.and_then(|captures| captures.get(1))
.map(|value| value.as_str().replace("\\/", "/").replace("\\'", "'"))
}
fn selector(value: &str) -> Option<Selector> {
Selector::parse(value).ok()
}
fn extract_source_url(html: &str) -> Option<String> {
let flashvars_regex = Self::regex(r#"(?s)var\s+flashvars\s*=\s*\{(.*?)\};"#)?;
let value_regex = |key: &str| Self::regex(&format!(r#"{key}:\s*'((?:\\'|[^'])*)'"#));
let mut seen = HashMap::<String, String>::new();
if let Some(flashvars) = flashvars_regex
.captures(html)
.and_then(|value| value.get(1))
.map(|value| value.as_str().to_string())
{
for key in ["video_alt_url2", "video_alt_url", "video_url"] {
let Some(url_regex) = value_regex(key) else {
continue;
};
let Some(text_regex) = value_regex(&format!("{key}_text")) else {
continue;
};
let Some(url) = Self::extract_js_value(&flashvars, &url_regex) else {
continue;
};
let normalized = Self::normalize_url(&url);
if normalized.is_empty() {
continue;
}
let quality = Self::extract_js_value(&flashvars, &text_regex)
.filter(|value| !value.is_empty())
.unwrap_or_else(|| Self::quality_from_url(&normalized));
seen.entry(quality).or_insert(normalized);
}
}
let document = Html::parse_document(html);
let Some(download_selector) = Self::selector("#download_popup a[href*='/get_file/']")
else {
return seen
.iter()
.max_by_key(|(quality, _)| Self::quality_score(quality))
.map(|(_, url)| url.clone());
};
for element in document.select(&download_selector) {
let href = element.value().attr("href").unwrap_or_default();
let normalized = Self::normalize_url(href);
if normalized.is_empty() {
continue;
}
let quality = Self::quality_from_url(&normalized);
seen.entry(quality).or_insert(normalized);
}
seen.iter()
.max_by_key(|(quality, _)| Self::quality_score(quality))
.map(|(_, url)| url.clone())
}
}
impl crate::proxies::Proxy for HeavyfetishProxy {
async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String {
let Some(detail_url) = Self::normalize_detail_url(&url) else {
return String::new();
};
let mut requester = requester.get_ref().clone();
let html = requester.get(&detail_url, None).await.unwrap_or_default();
if html.is_empty() {
return String::new();
}
Self::extract_source_url(&html).unwrap_or_default()
}
}

133
src/proxies/hqporner.rs Normal file
View File

@@ -0,0 +1,133 @@
use ntex::web;
use regex::Regex;
use url::Url;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct HqpornerProxy {}
impl HqpornerProxy {
pub fn new() -> Self {
Self {}
}
fn normalize_detail_url(endpoint: &str) -> Option<String> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
return None;
}
let detail_url = if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint.to_string()
} else {
format!("https://{}", endpoint.trim_start_matches('/'))
};
Self::is_allowed_detail_url(&detail_url).then_some(detail_url)
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(url) = Url::parse(url).ok() else {
return false;
};
if url.scheme() != "https" {
return false;
}
let Some(host) = url.host_str() else {
return false;
};
(host == "hqporner.com" || host == "www.hqporner.com") && url.path().starts_with("/hdporn/")
}
fn normalize_url(raw: &str) -> String {
let value = raw.trim();
if value.is_empty() {
return String::new();
}
if value.starts_with("//") {
return format!("https:{value}");
}
if value.starts_with('/') {
return format!("https://www.hqporner.com{value}");
}
if value.starts_with("http://") {
return value.replacen("http://", "https://", 1);
}
value.to_string()
}
fn regex(value: &str) -> Option<Regex> {
Regex::new(value).ok()
}
fn extract_player_url(detail_html: &str) -> Option<String> {
let path = detail_html
.split("url: '/blocks/altplayer.php?i=")
.nth(1)
.and_then(|s| s.split('\'').next())?;
Some(Self::normalize_url(&format!(
"/blocks/altplayer.php?i={path}"
)))
}
fn extract_source_url(player_html: &str) -> Option<String> {
for source in player_html.split("<source ").skip(1) {
let src = source
.split("src=\\\"")
.nth(1)
.and_then(|s| s.split("\\\"").next())
.or_else(|| {
source
.split("src=\"")
.nth(1)
.and_then(|s| s.split('"').next())
})
.unwrap_or_default();
let url = Self::normalize_url(src);
if !url.is_empty() {
return Some(url);
}
}
let source_regex = Self::regex(r#"src=\\\"([^\\"]+)\\\""#)?;
source_regex
.captures(player_html)
.and_then(|caps| caps.get(1))
.map(|m| Self::normalize_url(m.as_str()))
.filter(|value| !value.is_empty())
}
}
impl crate::proxies::Proxy for HqpornerProxy {
async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String {
let Some(detail_url) = Self::normalize_detail_url(&url) else {
return String::new();
};
let mut requester = requester.get_ref().clone();
let headers = vec![("Referer".to_string(), "https://hqporner.com/".to_string())];
let detail_html = requester
.get_with_headers(&detail_url, headers.clone(), None)
.await
.unwrap_or_default();
if detail_html.is_empty() {
return String::new();
}
let Some(player_url) = Self::extract_player_url(&detail_html) else {
return String::new();
};
let player_html = requester
.get_with_headers(&player_url, headers, None)
.await
.unwrap_or_default();
if player_html.is_empty() {
return String::new();
}
Self::extract_source_url(&player_html).unwrap_or_default()
}
}

View File

@@ -1,14 +1,19 @@
use crate::proxies::doodstream::DoodstreamProxy;
use crate::proxies::heavyfetish::HeavyfetishProxy;
use crate::proxies::hqporner::HqpornerProxy;
use crate::proxies::pornhd3x::Pornhd3xProxy;
use ntex::web;
use crate::proxies::pimpbunny::PimpbunnyProxy;
use crate::proxies::porndish::PorndishProxy;
use crate::proxies::shooshtime::ShooshtimeProxy;
use crate::proxies::spankbang::SpankbangProxy;
use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester};
pub mod doodstream;
pub mod hanimecdn;
pub mod heavyfetish;
pub mod hqporner;
pub mod hqpornerthumb;
pub mod javtiful;
pub mod noodlemagazine;
@@ -31,6 +36,9 @@ pub enum AnyProxy {
Pimpbunny(PimpbunnyProxy),
Porndish(PorndishProxy),
Spankbang(SpankbangProxy),
Shooshtime(ShooshtimeProxy),
Hqporner(HqpornerProxy),
Heavyfetish(HeavyfetishProxy),
}
pub trait Proxy {
@@ -47,6 +55,9 @@ impl Proxy for AnyProxy {
AnyProxy::Pimpbunny(p) => p.get_video_url(url, requester).await,
AnyProxy::Porndish(p) => p.get_video_url(url, requester).await,
AnyProxy::Spankbang(p) => p.get_video_url(url, requester).await,
AnyProxy::Shooshtime(p) => p.get_video_url(url, requester).await,
AnyProxy::Hqporner(p) => p.get_video_url(url, requester).await,
AnyProxy::Heavyfetish(p) => p.get_video_url(url, requester).await,
}
}
}

View File

@@ -18,6 +18,10 @@ struct SourceCandidate {
pub struct ShooshtimeProxy {}
impl ShooshtimeProxy {
pub fn new() -> Self {
Self {}
}
fn normalize_detail_request(endpoint: &str) -> Option<(String, Option<String>)> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
@@ -191,6 +195,22 @@ impl ShooshtimeProxy {
}
}
impl crate::proxies::Proxy for ShooshtimeProxy {
async fn get_video_url(&self, url: String, requester: web::types::State<Requester>) -> String {
let Some((detail_url, quality)) = Self::normalize_detail_request(&url) else {
return String::new();
};
let mut requester = requester.get_ref().clone();
let html = requester.get(&detail_url, None).await.unwrap_or_default();
if html.is_empty() {
return String::new();
}
Self::select_source_url(&html, quality.as_deref()).unwrap_or_default()
}
}
pub async fn serve_media(
req: HttpRequest,
requester: web::types::State<Requester>,

View File

@@ -1,10 +1,13 @@
use ntex::web::{self, HttpRequest};
use crate::proxies::doodstream::DoodstreamProxy;
use crate::proxies::heavyfetish::HeavyfetishProxy;
use crate::proxies::hqporner::HqpornerProxy;
use crate::proxies::javtiful::JavtifulProxy;
use crate::proxies::pimpbunny::PimpbunnyProxy;
use crate::proxies::porndish::PorndishProxy;
use crate::proxies::pornhd3x::Pornhd3xProxy;
use crate::proxies::shooshtime::ShooshtimeProxy;
use crate::proxies::spankbang::SpankbangProxy;
use crate::proxies::sxyprn::SxyprnProxy;
use crate::proxies::*;
@@ -36,6 +39,16 @@ pub fn config(cfg: &mut web::ServiceConfig) {
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/hqporner/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/heavyfetish/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/pornhd3x/{endpoint}*")
.route(web::post().to(proxy2redirect))
@@ -43,6 +56,11 @@ pub fn config(cfg: &mut web::ServiceConfig) {
)
.service(
web::resource("/shooshtime/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/shooshtime-media/{endpoint}*")
.route(web::post().to(crate::proxies::shooshtime::serve_media))
.route(web::get().to(crate::proxies::shooshtime::serve_media)),
)
@@ -108,7 +126,10 @@ fn get_proxy(proxy: &str) -> Option<AnyProxy> {
"doodstream" => Some(AnyProxy::Doodstream(DoodstreamProxy::new())),
"sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())),
"javtiful" => Some(AnyProxy::Javtiful(JavtifulProxy::new())),
"hqporner" => Some(AnyProxy::Hqporner(HqpornerProxy::new())),
"heavyfetish" => Some(AnyProxy::Heavyfetish(HeavyfetishProxy::new())),
"pornhd3x" => Some(AnyProxy::Pornhd3x(Pornhd3xProxy::new())),
"shooshtime" => Some(AnyProxy::Shooshtime(ShooshtimeProxy::new())),
"pimpbunny" => Some(AnyProxy::Pimpbunny(PimpbunnyProxy::new())),
"porndish" => Some(AnyProxy::Porndish(PorndishProxy::new())),
"spankbang" => Some(AnyProxy::Spankbang(SpankbangProxy::new())),

View File

@@ -1,7 +1,7 @@
use std::{collections::HashMap, env, sync::Arc};
use serde_json::json;
use serde_json::Value;
use serde_json::json;
use tokio::sync::Mutex;
use wreq::{Client, Proxy};
use wreq_util::Emulation;
@@ -118,7 +118,9 @@ impl Flaresolverr {
}
async fn create_session(&self) -> Result<String, Box<dyn std::error::Error + Send + Sync>> {
let body = self.post_payload(json!({ "cmd": "sessions.create" })).await?;
let body = self
.post_payload(json!({ "cmd": "sessions.create" }))
.await?;
let session = body
.get("session")
.and_then(Value::as_str)