Compare commits

..

9 Commits

Author SHA1 Message Date
Simon
e3d62524be removed debug print 2026-05-06 13:50:47 +00:00
Simon
5e5786010a doodstream and lulustream in sxyprn integrated
Co-authored-by: Copilot <copilot@github.com>
2026-05-06 11:17:26 +00:00
Simon
9e8f326518 sxyprn format changes vidara 2026-05-05 19:11:26 +00:00
Simon
c4292c2ffc blowjobspro launch 2026-05-05 18:41:05 +00:00
Simon
3da09dc561 erome launch 2026-05-05 14:20:47 +00:00
Simon
d6fa364b28 youporn 2026-05-05 13:53:01 +00:00
Simon
01306c508a vidara and cargo updates 2026-05-05 12:25:43 +00:00
Simon
8ae0fcb544 vidara proxy for sxyprn 2026-05-05 08:58:27 +00:00
Simon
60d29ca905 sxyprn fix 2026-05-04 16:18:54 +00:00
20 changed files with 2122 additions and 312 deletions

View File

@@ -15,25 +15,25 @@ env_logger = "0.11.8"
error-chain = "0.12.4" error-chain = "0.12.4"
futures = "0.3.31" futures = "0.3.31"
htmlentity = "1.3.2" htmlentity = "1.3.2"
ntex = { version = "2.15.1", features = ["tokio"] } ntex = { version = "3", features = ["tokio"] }
ntex-files = "2.0.0" ntex-files = "3"
serde = "1.0.228" serde = "1.0.228"
serde_json = "1.0.145" serde_json = "1.0.145"
tokio = { version = "1.49", features = ["full"] } tokio = { version = "1.49", features = ["full"] }
wreq = { version = "6.0.0-rc.26", features = ["cookies", "multipart", "json"] } wreq = { version = "5", features = ["cookies", "multipart", "json"] }
wreq-util = "3.0.0-rc.10" wreq-util = "2"
percent-encoding = "2.3.2" percent-encoding = "2.3.2"
capitalize = "0.3.4" capitalize = "0.3.4"
url = "2.5.7" url = "2.5.7"
base64 = "0.22.1" base64 = "0.22.1"
scraper = "0.24.0" scraper = "0.26.0"
once_cell = "1.21.3" once_cell = "1.21.3"
rustc-hash = "2.1.1" rustc-hash = "2.1.1"
async-trait = "0.1" async-trait = "0.1"
regex = "1.12.2" regex = "1.12.2"
titlecase = "3.6.0" titlecase = "3.6.0"
dashmap = "6.1.0" dashmap = "6.1.0"
lru = "0.16.3" lru = "0.18.0"
rand = "0.10.0" rand = "0.10.0"
chrono = "0.4.44" chrono = "0.4.44"
md5 = "0.8.0" md5 = "0.8.0"

View File

@@ -36,6 +36,11 @@ const PROVIDERS: &[ProviderDef] = &[
module: "pornhub", module: "pornhub",
ty: "PornhubProvider", ty: "PornhubProvider",
}, },
ProviderDef {
id: "youporn",
module: "youporn",
ty: "YoupornProvider",
},
ProviderDef { ProviderDef {
id: "pornhd3x", id: "pornhd3x",
module: "pornhd3x", module: "pornhd3x",
@@ -251,6 +256,16 @@ const PROVIDERS: &[ProviderDef] = &[
module: "hsex", module: "hsex",
ty: "HsexProvider", ty: "HsexProvider",
}, },
ProviderDef {
id: "blowjobspro",
module: "blowjobspro",
ty: "BlowjobsproProvider",
},
ProviderDef {
id: "erome",
module: "erome",
ty: "EromeProvider",
},
ProviderDef { ProviderDef {
id: "sextb", id: "sextb",
module: "sextb", module: "sextb",

View File

@@ -9,7 +9,9 @@ This is the current implementation inventory as of this snapshot of the repo. Us
| `all` | `meta-search` | no | no | Aggregates all compiled providers. | | `all` | `meta-search` | no | no | Aggregates all compiled providers. |
| `archivebate` | `live-cams` | no | no | Livewire-backed cam archive listings with platform/gender/profile shortcuts. | | `archivebate` | `live-cams` | no | no | Livewire-backed cam archive listings with platform/gender/profile shortcuts. |
| `beeg` | `mainstream-tube` | no | no | Basic mainstream tube pattern. | | `beeg` | `mainstream-tube` | no | no | Basic mainstream tube pattern. |
| `blowjobspro` | `mainstream-tube` | no | no | KVS-style HTML provider with async search pagination and category shortcut routing. |
| `chaturbate` | `live-cams` | no | no | Live cam channel. | | `chaturbate` | `live-cams` | no | no | Live cam channel. |
| `erome` | `amateur-homemade` | no | no | HTML album scraper with hot/new feeds, keyword search, and uploader-slug shortcuts (`uploader:<name>`). |
| `freepornvideosxxx` | `studio-network` | no | no | Studio-style scraper. | | `freepornvideosxxx` | `studio-network` | no | no | Studio-style scraper. |
| `freeuseporn` | `fetish-kink` | no | no | Fetish archive pattern. | | `freeuseporn` | `fetish-kink` | no | no | Fetish archive pattern. |
| `hanime` | `hentai-animation` | no | yes | Uses proxied CDN/thumb handling. | | `hanime` | `hentai-animation` | no | yes | Uses proxied CDN/thumb handling. |
@@ -57,6 +59,7 @@ This is the current implementation inventory as of this snapshot of the repo. Us
| `xxthots` | `onlyfans` | no | no | OnlyFans-like metadata example. | | `xxthots` | `onlyfans` | no | no | OnlyFans-like metadata example. |
| `yesporn` | `mainstream-tube` | no | no | Preview format examples. | | `yesporn` | `mainstream-tube` | no | no | Preview format examples. |
| `youjizz` | `mainstream-tube` | no | no | Mainstream tube provider. | | `youjizz` | `mainstream-tube` | no | no | Mainstream tube provider. |
| `youporn` | `mainstream-tube` | no | no | Pornhub-network HTML provider with watch-page playback URLs and tag/channel/pornstar shortcuts. |
## Proxy Routes ## Proxy Routes

View File

@@ -599,7 +599,7 @@ async fn videos_post(
} }
for video in video_items.iter_mut() { for video in video_items.iter_mut() {
if video.duration <= 120 { if video.duration <= 120 && video.duration > 0{
let mut preview_url = video.url.clone(); let mut preview_url = video.url.clone();
if let Some(formats) = &video.formats { if let Some(formats) = &video.formats {
if let Some(first) = formats.first() { if let Some(first) = formats.first() {
@@ -657,7 +657,6 @@ async fn videos_post(
for video in video_items.iter_mut() { for video in video_items.iter_mut() {
video.id = format!("{}:{}", channel, video.id); video.id = format!("{}:{}", channel, video.id);
println!("Video ID set to: {}", video.id);
} }
// There is a bug in Hottub38 that makes the client error for a 403-url even though formats work fine // There is a bug in Hottub38 that makes the client error for a 403-url even though formats work fine

View File

@@ -85,28 +85,32 @@ async fn main() -> std::io::Result<()> {
crate::flow_debug!("http server binding addr=0.0.0.0:18080 workers=8"); crate::flow_debug!("http server binding addr=0.0.0.0:18080 workers=8");
web::HttpServer::new(move || { web::HttpServer::new(move || {
web::App::new() let pool = pool.clone();
.state(pool.clone()) let cache = cache.clone();
.state(cache.clone()) let requester = requester.clone();
.state(requester.clone()) async move {
.wrap(web::middleware::Logger::default()) web::App::new()
.service(web::scope("/api").configure(api::config)) .state(pool)
.service(web::scope("/proxy").configure(proxy::config)) .state(cache)
.service( .state(requester)
web::resource("/").route(web::get().to(|req: web::HttpRequest| async move { .middleware(web::middleware::Logger::default())
let host = match std::env::var("DOMAIN") { .service(web::scope("/api").configure(api::config))
Ok(d) => d, .service(web::scope("/proxy").configure(proxy::config))
Err(_) => req.connection_info().host().to_string(), .service(
}; web::resource("/").route(web::get().to(|req: web::HttpRequest| async move {
let source_forward_header = format!("hottub://source?url={}", host); let host = match std::env::var("DOMAIN") {
web::HttpResponse::Found() Ok(d) => d,
.header("Location", source_forward_header) Err(_) => req.connection_info().host().to_string(),
.finish() };
})), let source_forward_header = format!("hottub://source?url={}", host);
) web::HttpResponse::Found()
.service(fs::Files::new("/", "static").index_file("index.html")) .header("Location", source_forward_header)
.finish()
})),
)
.service(fs::Files::new("/", "static").index_file("index.html"))
}
}) })
.workers(8)
// .bind_openssl(("0.0.0.0", 18080), builder)? // .bind_openssl(("0.0.0.0", 18080), builder)?
.bind(("0.0.0.0", 18080))? .bind(("0.0.0.0", 18080))?
.run() .run()

View File

@@ -0,0 +1,531 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{
Provider, report_provider_error, report_provider_error_background, requester_or_default,
};
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use percent_encoding::{NON_ALPHANUMERIC, utf8_percent_encode};
use regex::Regex;
use scraper::{Html, Selector};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use std::thread;
use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["tube", "kvs", "blowjob"],
};
const BASE_URL: &str = "https://blowjobs.pro";
const CHANNEL_ID: &str = "blowjobspro";
const FIREFOX_UA: &str =
"Mozilla/5.0 (X11; Linux x86_64; rv:147.0) Gecko/20100101 Firefox/147.0";
const HTML_ACCEPT: &str =
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8";
error_chain! {
foreign_links {
Io(std::io::Error);
}
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
}
}
#[derive(Debug, Clone)]
pub struct BlowjobsproProvider {
url: String,
categories: Arc<RwLock<Vec<FilterOption>>>,
category_map: Arc<RwLock<HashMap<String, String>>>,
}
#[derive(Debug, Clone)]
enum Target {
Latest,
MostViewed,
TopRated,
Search { query: String },
Category { url: String },
}
impl BlowjobsproProvider {
pub fn new() -> Self {
let provider = Self {
url: BASE_URL.to_string(),
categories: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
category_map: Arc::new(RwLock::new(HashMap::new())),
};
provider.spawn_initial_load();
provider
}
fn spawn_initial_load(&self) {
let url = self.url.clone();
let categories = Arc::clone(&self.categories);
let category_map = Arc::clone(&self.category_map);
thread::spawn(move || {
let runtime = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(runtime) => runtime,
Err(error) => {
report_provider_error_background(
CHANNEL_ID,
"spawn_initial_load.runtime_build",
&error.to_string(),
);
return;
}
};
runtime.block_on(async move {
if let Err(error) = Self::load_categories(&url, categories, category_map).await {
report_provider_error_background(CHANNEL_ID, "load_categories", &error.to_string());
}
});
});
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
let categories = self
.categories
.read()
.map(|value| value.clone())
.unwrap_or_default();
Channel {
id: CHANNEL_ID.to_string(),
name: "Blowjobs.pro".to_string(),
description: "Blowjobs.pro KVS listings with latest, most viewed, top rated, search, and category shortcuts."
.to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=blowjobs.pro".to_string(),
status: "active".to_string(),
categories: categories.iter().map(|value| value.title.clone()).collect(),
options: vec![
ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Browse feed ordering.".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "new".to_string(),
title: "Latest".to_string(),
},
FilterOption {
id: "popular".to_string(),
title: "Most Viewed".to_string(),
},
FilterOption {
id: "rated".to_string(),
title: "Top Rated".to_string(),
},
],
multiSelect: false,
},
ChannelOption {
id: "categories".to_string(),
title: "Categories".to_string(),
description: "Jump directly to category archives.".to_string(),
systemImage: "square.grid.2x2".to_string(),
colorName: "orange".to_string(),
options: categories,
multiSelect: false,
},
],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn selector(value: &str) -> Result<Selector> {
Selector::parse(value)
.map_err(|error| Error::from(format!("selector `{value}` parse failed: {error}")))
}
fn regex(value: &str) -> Result<Regex> {
Regex::new(value).map_err(|error| Error::from(format!("regex `{value}` failed: {error}")))
}
fn decode_html(text: &str) -> String {
decode(text.as_bytes())
.to_string()
.unwrap_or_else(|_| text.to_string())
}
fn collapse_whitespace(text: &str) -> String {
text.split_whitespace().collect::<Vec<_>>().join(" ")
}
fn normalize_title(title: &str) -> String {
title
.trim()
.trim_start_matches('#')
.replace(['_', '-'], " ")
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.to_ascii_lowercase()
}
fn normalize_url(&self, url: &str) -> String {
let trimmed = url.trim();
if trimmed.is_empty() {
return String::new();
}
if trimmed.starts_with("http://") || trimmed.starts_with("https://") {
return trimmed.to_string();
}
if trimmed.starts_with("//") {
return format!("https:{trimmed}");
}
if trimmed.starts_with('/') {
return format!("{}{}", self.url, trimmed);
}
format!("{}/{}", self.url, trimmed.trim_start_matches("./"))
}
fn html_headers(referer: &str) -> Vec<(String, String)> {
vec![
("User-Agent".to_string(), FIREFOX_UA.to_string()),
("Accept".to_string(), HTML_ACCEPT.to_string()),
("Referer".to_string(), referer.to_string()),
]
}
fn build_search_path(query: &str) -> String {
query
.split_whitespace()
.map(|part| utf8_percent_encode(part, NON_ALPHANUMERIC).to_string())
.collect::<Vec<_>>()
.join("-")
}
fn build_archive_page_url(archive_url: &str, page: u16) -> String {
if page <= 1 {
return archive_url.trim_end_matches('/').to_string() + "/";
}
format!("{}/{page}/", archive_url.trim_end_matches('/'))
}
fn build_target_url(&self, target: &Target, page: u16) -> String {
match target {
Target::Latest => {
Self::build_archive_page_url(&format!("{}/latest-updates/", self.url), page)
}
Target::MostViewed => {
Self::build_archive_page_url(&format!("{}/most-popular/", self.url), page)
}
Target::TopRated => {
Self::build_archive_page_url(&format!("{}/top-rated/", self.url), page)
}
Target::Category { url } => Self::build_archive_page_url(url, page),
Target::Search { query } => {
let normalized = Self::build_search_path(query);
format!("{}/search/{normalized}/", self.url)
}
}
}
fn target_from_request(
&self,
sort: &str,
query: Option<&str>,
category: Option<&str>,
) -> Target {
let category_value = category.unwrap_or("").trim();
if !category_value.is_empty() && !category_value.eq_ignore_ascii_case("all") {
if let Some(url) = self.resolve_category(category_value) {
return Target::Category { url };
}
}
if let Some(raw_query) = query {
let trimmed = raw_query.trim();
if !trimmed.is_empty() {
if let Some(value) = trimmed.strip_prefix("category:")
&& let Some(url) = self.resolve_category(value)
{
return Target::Category { url };
}
if let Some(url) = self.resolve_category(trimmed) {
return Target::Category { url };
}
return Target::Search {
query: trimmed.to_string(),
};
}
}
match sort {
"popular" => Target::MostViewed,
"rated" => Target::TopRated,
_ => Target::Latest,
}
}
fn resolve_category(&self, value: &str) -> Option<String> {
let normalized = Self::normalize_title(value);
if normalized.is_empty() {
return None;
}
self.category_map
.read()
.ok()
.and_then(|map| map.get(&normalized).cloned())
}
async fn fetch_html(
&self,
options: &ServerOptions,
url: &str,
referer: &str,
) -> Result<String> {
let mut requester = requester_or_default(options, CHANNEL_ID, "fetch_html");
requester
.get_with_headers(url, Self::html_headers(referer), Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(format!("request failed for {url}: {error}")))
}
fn search_page_from_async(&self, query: &str, page: u16) -> Option<String> {
if page <= 1 {
return None;
}
let query_value = query.trim();
if query_value.is_empty() {
return None;
}
let from = page;
let encoded_query = utf8_percent_encode(query_value, NON_ALPHANUMERIC).to_string();
Some(format!(
"{}/search/{}/?mode=async&function=get_block&block_id=list_videos_videos_list_search_result&q={encoded_query}&category_ids=&sort_by=&from_videos%2Bfrom_albums={from}",
self.url,
Self::build_search_path(query_value)
))
}
async fn fetch_listing_html(
&self,
options: &ServerOptions,
target: &Target,
page: u16,
) -> Result<String> {
if let Target::Search { query } = target
&& page > 1
&& let Some(async_url) = self.search_page_from_async(query, page)
{
return self
.fetch_html(options, &async_url, &format!("{}/search/{}/", self.url, Self::build_search_path(query)))
.await;
}
let page_url = self.build_target_url(target, page);
self.fetch_html(options, &page_url, &self.url).await
}
fn parse_listing_html(&self, html: &str) -> Result<Vec<VideoItem>> {
let href_re = Self::regex(r#"href="([^"]+/videos/\d+/[^"]*)""#)?;
let title_re = Self::regex(r#"title="([^"]+)""#)?;
let thumb_re = Self::regex(r#"(?:data-original|src)="([^"]+/contents/videos_screenshots/[^"]+)""#)?;
let duration_re = Self::regex(r#"<div class="duration">\s*([^<]+)\s*</div>"#)?;
let views_re = Self::regex(r#"<div class="views">\s*([^<]+)\s*</div>"#)?;
let tag_re = Self::regex(r#"<a href="[^"]*/categories/[^"]*"[^>]*>\s*([^<]+)\s*</a>"#)?;
let id_re = Self::regex(r"/videos/(\d+)/")?;
let mut videos = Vec::new();
for segment in html.split("<div class=\"item\">").skip(1) {
let Some(href_caps) = href_re.captures(segment) else {
continue;
};
let Some(raw_url) = href_caps.get(1).map(|m| m.as_str()) else {
continue;
};
let url = self.normalize_url(raw_url);
let id = id_re
.captures(&url)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
.unwrap_or_else(|| url.clone());
let title = title_re
.captures(segment)
.and_then(|caps| caps.get(1).map(|m| Self::decode_html(m.as_str())))
.unwrap_or_default();
if title.is_empty() {
continue;
}
let thumb = thumb_re
.captures(segment)
.and_then(|caps| caps.get(1).map(|m| self.normalize_url(m.as_str())))
.unwrap_or_default();
let duration = duration_re
.captures(segment)
.and_then(|caps| caps.get(1).map(|m| m.as_str().trim().to_string()))
.and_then(|value| parse_time_to_seconds(&value))
.unwrap_or(0)
.max(0) as u32;
let views = views_re
.captures(segment)
.and_then(|caps| caps.get(1).map(|m| m.as_str().trim().to_string()))
.and_then(|value| parse_abbreviated_number(&value))
.map(|value| value as u32);
let tags = tag_re
.captures_iter(segment)
.filter_map(|caps| {
caps.get(1).map(|m| {
Self::decode_html(m.as_str())
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string()
})
})
.filter(|value| !value.is_empty())
.collect::<Vec<_>>();
let mut item = VideoItem::new(
id,
title,
url,
CHANNEL_ID.to_string(),
thumb,
duration,
);
item.tags = Some(tags);
item.views = views;
videos.push(item);
}
Ok(videos)
}
async fn load_categories(
url: &str,
categories: Arc<RwLock<Vec<FilterOption>>>,
category_map: Arc<RwLock<HashMap<String, String>>>,
) -> Result<()> {
let mut requester = crate::util::requester::Requester::new();
let category_url = format!("{url}/categories/");
let html = requester
.get_with_headers(
&category_url,
Self::html_headers(url),
Some(Version::HTTP_11),
)
.await
.map_err(|error| Error::from(format!("category fetch failed: {error}")))?;
let doc = Html::parse_document(&html);
let link_selector = Self::selector("#list_categories_categories_list_items a.item-link")?;
let mut options = vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}];
let mut map = HashMap::new();
for link in doc.select(&link_selector) {
let Some(href) = link.value().attr("href") else {
continue;
};
let raw_title = link
.value()
.attr("title")
.unwrap_or("")
.to_string();
let title = Self::decode_html(&raw_title).trim().to_string();
if title.is_empty() {
continue;
}
let normalized = Self::normalize_title(&title);
if normalized.is_empty() {
continue;
}
options.push(FilterOption {
id: normalized.clone(),
title: title.clone(),
});
map.insert(normalized, href.to_string());
}
if let Ok(mut guard) = categories.write() {
*guard = options;
}
if let Ok(mut guard) = category_map.write() {
*guard = map;
}
Ok(())
}
}
#[async_trait]
impl Provider for BlowjobsproProvider {
async fn get_videos(
&self,
_cache: VideoCache,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let page_number = page.parse::<u16>().unwrap_or(1).max(1);
let target = self.target_from_request(
sort.as_str(),
query.as_deref(),
options.categories.as_deref(),
);
let html = match self.fetch_listing_html(&options, &target, page_number).await {
Ok(html) => html,
Err(error) => {
report_provider_error(CHANNEL_ID, "get_videos.fetch_listing_html", &error.to_string())
.await;
return vec![];
}
};
match self.parse_listing_html(&html) {
Ok(videos) => videos,
Err(error) => {
report_provider_error(CHANNEL_ID, "get_videos.parse_listing_html", &error.to_string())
.await;
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}

384
src/providers/erome.rs Normal file
View File

@@ -0,0 +1,384 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{Provider, report_provider_error, requester_or_default};
use crate::status::*;
use crate::util::parse_abbreviated_number;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use regex::Regex;
use scraper::{ElementRef, Html, Selector};
use url::form_urlencoded;
use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "amateur-homemade",
tags: &["amateur", "albums", "homemade"],
};
const BASE_URL: &str = "https://www.erome.com";
const CHANNEL_ID: &str = "erome";
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct EromeProvider {
url: String,
}
#[derive(Debug, Clone)]
enum Target {
ExploreHot,
ExploreNew,
Search { query: String, order_new: bool },
UploaderPosts { slug: String },
}
impl EromeProvider {
pub fn new() -> Self {
Self {
url: BASE_URL.to_string(),
}
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: CHANNEL_ID.to_string(),
name: "EroMe".to_string(),
description:
"EroMe album feed with hot/new routing, search, and uploader profile shortcuts."
.to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=erome.com".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![
ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Browse EroMe hot or new feeds.".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "new".to_string(),
title: "New".to_string(),
},
FilterOption {
id: "hot".to_string(),
title: "Hot".to_string(),
},
],
multiSelect: false,
},
ChannelOption {
id: "sites".to_string(),
title: "Uploader".to_string(),
description: "Jump directly to an uploader profile slug.".to_string(),
systemImage: "person.crop.square".to_string(),
colorName: "purple".to_string(),
options: vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}],
multiSelect: false,
},
],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn selector(value: &str) -> Option<Selector> {
Selector::parse(value).ok()
}
fn regex(value: &str) -> Option<Regex> {
Regex::new(value).ok()
}
fn normalize_text(value: &str) -> String {
decode(value.as_bytes())
.to_string()
.unwrap_or_else(|_| value.to_string())
.replace('\u{a0}', " ")
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string()
}
fn normalize_url(&self, value: &str) -> String {
let trimmed = value.trim();
if trimmed.is_empty() {
return String::new();
}
if trimmed.starts_with("http://") || trimmed.starts_with("https://") {
return trimmed.to_string();
}
if trimmed.starts_with("//") {
return format!("https:{trimmed}");
}
format!(
"{}/{}",
self.url.trim_end_matches('/'),
trimmed.trim_start_matches('/')
)
}
fn extract_album_guid_from_url(value: &str) -> Option<String> {
let re = Self::regex(r#"/a/([A-Za-z0-9]+)"#)?;
re.captures(value)
.and_then(|caps| caps.get(1).map(|m| m.as_str().to_string()))
}
fn uploader_slug_from_query(query: &str) -> Option<String> {
let q = query.trim();
if q.is_empty() {
return None;
}
let lower = q.to_ascii_lowercase();
let stripped = if let Some(rest) = lower.strip_prefix("uploader:") {
rest.trim().to_string()
} else if let Some(rest) = lower.strip_prefix("user:") {
rest.trim().to_string()
} else {
return None;
};
let slug = stripped
.trim_start_matches('@')
.chars()
.filter(|c| c.is_ascii_alphanumeric() || *c == '_' || *c == '-')
.collect::<String>();
(!slug.is_empty()).then_some(slug)
}
fn resolve_target(&self, query: &str, options: &ServerOptions, sort: &str) -> Target {
if let Some(site) = options.sites.as_deref() {
let normalized = site.trim();
if !normalized.is_empty() && !normalized.eq_ignore_ascii_case("all") {
let slug = normalized
.trim_start_matches('@')
.chars()
.filter(|c| c.is_ascii_alphanumeric() || *c == '_' || *c == '-')
.collect::<String>();
if !slug.is_empty() {
return Target::UploaderPosts { slug };
}
}
}
if !query.trim().is_empty() {
if let Some(slug) = Self::uploader_slug_from_query(query) {
return Target::UploaderPosts { slug };
}
return Target::Search {
query: query.trim().to_string(),
order_new: matches!(sort, "new" | "latest"),
};
}
if matches!(sort, "new" | "latest") {
Target::ExploreNew
} else {
Target::ExploreHot
}
}
fn build_url_for_target(&self, target: &Target, page: u16) -> String {
let page = page.max(1);
match target {
Target::ExploreHot => {
if page == 1 {
format!("{}/explore", self.url)
} else {
format!("{}/explore?page={page}", self.url)
}
}
Target::ExploreNew => {
if page == 1 {
format!("{}/explore/new", self.url)
} else {
format!("{}/explore/new?page={page}", self.url)
}
}
Target::Search { query, order_new } => {
let mut serializer = form_urlencoded::Serializer::new(String::new());
serializer.append_pair("q", query);
if *order_new {
serializer.append_pair("o", "new");
}
serializer.append_pair("page", &page.to_string());
format!("{}/search?{}", self.url, serializer.finish())
}
Target::UploaderPosts { slug } => {
let mut serializer = form_urlencoded::Serializer::new(String::new());
serializer.append_pair("t", "posts");
serializer.append_pair("page", &page.to_string());
format!("{}/{}?{}", self.url, slug, serializer.finish())
}
}
}
fn text_from_selector(parent: &ElementRef<'_>, selector: &Selector) -> String {
parent
.select(selector)
.next()
.map(|node| Self::normalize_text(&node.text().collect::<Vec<_>>().join(" ")))
.unwrap_or_default()
}
fn parse_video_item(&self, card: &ElementRef<'_>) -> Option<VideoItem> {
let link_selector = Self::selector("a.album-link[href]")?;
let title_selector = Self::selector("a.album-title")?;
let thumb_selector = Self::selector("img.album-thumbnail")?;
let user_selector = Self::selector("span.album-user")?;
let views_selector = Self::selector("span.album-bottom-views")?;
let link = card.select(&link_selector).next()?;
let href = link.value().attr("href")?;
let album_url = self.normalize_url(href);
let album_id = Self::extract_album_guid_from_url(&album_url)?;
let title = Self::text_from_selector(card, &title_selector);
let fallback_title = link
.value()
.attr("title")
.map(Self::normalize_text)
.unwrap_or_default();
let final_title = if !title.is_empty() { title } else { fallback_title };
if final_title.is_empty() {
return None;
}
let thumb = card
.select(&thumb_selector)
.find_map(|node| {
node.value()
.attr("src")
.or_else(|| node.value().attr("data-rotate-src"))
})
.map(|value| self.normalize_url(value))
.unwrap_or_default();
let uploader = Self::text_from_selector(card, &user_selector);
let uploader_slug = uploader
.trim()
.trim_start_matches('@')
.chars()
.filter(|c| c.is_ascii_alphanumeric() || *c == '_' || *c == '-')
.collect::<String>();
let views_text = Self::text_from_selector(card, &views_selector);
let views = parse_abbreviated_number(views_text.replace(',', ".").replace(' ', "").as_str());
let uploader_url = if uploader_slug.is_empty() {
String::new()
} else {
format!("{}/{}?t=posts", self.url, uploader_slug)
};
let mut item = VideoItem::new(
album_id,
final_title,
album_url,
CHANNEL_ID.to_string(),
thumb,
0,
);
if let Some(value) = views {
item = item.views(value);
}
if !uploader.is_empty() {
item = item.uploader(uploader);
}
if !uploader_url.is_empty() {
item = item.uploader_url(uploader_url);
}
if !uploader_slug.is_empty() {
item.uploaderId = Some(format!("{CHANNEL_ID}:{uploader_slug}"));
}
Some(item)
}
fn parse_listing(&self, html: &str) -> Vec<VideoItem> {
let document = Html::parse_document(html);
let Some(card_selector) = Self::selector("div.album") else {
return vec![];
};
document
.select(&card_selector)
.filter_map(|card| self.parse_video_item(&card))
.collect()
}
fn html_headers(referer: &str) -> Vec<(String, String)> {
vec![
(
"accept".to_string(),
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8".to_string(),
),
("accept-language".to_string(), "en-US,en;q=0.8".to_string()),
(
"user-agent".to_string(),
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36".to_string(),
),
("referer".to_string(), referer.to_string()),
]
}
async fn fetch_page(&self, options: &ServerOptions, url: &str) -> Result<String> {
let mut requester = requester_or_default(options, CHANNEL_ID, "get_videos");
requester
.get_with_headers(url, Self::html_headers(&format!("{}/explore", self.url)), Some(Version::HTTP_11))
.await
.map_err(|error| Error::from(format!("request failed for {url}: {error}")))
}
}
#[async_trait]
impl Provider for EromeProvider {
async fn get_videos(
&self,
_cache: crate::util::cache::VideoCache,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let sort_value = if sort.is_empty() {
options.sort.as_deref().unwrap_or("new").to_string()
} else {
sort
};
let query_value = query.unwrap_or_default();
let page_value = page.parse::<u16>().unwrap_or(1);
let target = self.resolve_target(&query_value, &options, &sort_value);
let url = self.build_url_for_target(&target, page_value);
match self.fetch_page(&options, &url).await {
Ok(html) => self.parse_listing(&html),
Err(error) => {
report_provider_error(CHANNEL_ID, "get_videos.fetch", &error.to_string()).await;
vec![]
}
}
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}

View File

@@ -202,7 +202,7 @@ impl FreeusepornProvider {
.await .await
.map_err(|error| format!("search submit failed url={search_url}; error={error}"))?; .map_err(|error| format!("search submit failed url={search_url}; error={error}"))?;
Ok(response.uri().to_string().trim_end_matches('/').to_string()) Ok(response.url().to_string().trim_end_matches('/').to_string())
} }
fn build_formats(&self, id: &str) -> Vec<VideoFormat> { fn build_formats(&self, id: &str) -> Vec<VideoFormat> {

View File

@@ -7,7 +7,9 @@ use crate::util::discord::format_error_chain;
use crate::util::discord::send_discord_error_report; use crate::util::discord::send_discord_error_report;
use crate::util::requester::Requester; use crate::util::requester::Requester;
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::util::hoster_proxy::{proxy_name_for_url, rewrite_hoster_url};
use crate::videos::ServerOptions; use crate::videos::ServerOptions;
use crate::videos::VideoFormat;
use crate::videos::VideoItem; use crate::videos::VideoItem;
use async_trait::async_trait; use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
@@ -331,6 +333,13 @@ impl SxyprnProvider {
.and_then(|s| s.split("</div>").next()) .and_then(|s| s.split("</div>").next())
.ok_or_else(|| ErrorKind::Parse("failed to extract title_parts".into()))?; .ok_or_else(|| ErrorKind::Parse("failed to extract title_parts".into()))?;
let title_links: Vec<String> = video_segment
.split("href='https://")
.skip(1)
.filter_map(|part| part.split("'").next().map(|u| u.to_string()))
.collect();
let document = Html::parse_document(title_parts); let document = Html::parse_document(title_parts);
let selector = Selector::parse("*") let selector = Selector::parse("*")
.map_err(|e| ErrorKind::Parse(format!("selector parse failed: {e}")))?; .map_err(|e| ErrorKind::Parse(format!("selector parse failed: {e}")))?;
@@ -353,13 +362,61 @@ impl SxyprnProvider {
.replace('\n', "") .replace('\n', "")
.replace(" + ", " ") .replace(" + ", " ")
.replace(" ", " ") .replace(" ", " ")
.replace("\\", "")
.trim() .trim()
.to_string(); .to_string();
// De-duplicate repeated titles
let words: Vec<&str> = title.split_whitespace().collect();
if words.len() > 1 {
for pattern_len in (1..=words.len() / 2).rev() {
let pattern = &words[0..pattern_len];
let mut all_match = true;
let mut idx = pattern_len;
while idx < words.len() {
let end = std::cmp::min(idx + pattern_len, words.len());
if &words[idx..end] != &pattern[0..(end - idx)] {
all_match = false;
break;
}
idx += pattern_len;
}
if all_match && words.len() % pattern_len == 0 {
title = pattern.join(" ");
break;
}
}
}
if title.to_ascii_lowercase().starts_with("new ") { if title.to_ascii_lowercase().starts_with("new ") {
title = title[4..].to_string(); title = title[4..].to_string();
} }
// Extract tags from title (words starting with #)
let mut tags = Vec::new();
let words: Vec<&str> = title.split_whitespace().collect();
let mut cleaned_words = Vec::new();
for word in words {
let raw_tag = word
.trim_end_matches(|c: char| !c.is_alphanumeric() && c != '_' && c != '-')
.to_string();
if raw_tag.starts_with('#') && raw_tag.len() > 1 {
let tag = raw_tag[1..].to_string();
if !tags.contains(&tag) {
tags.push(tag);
}
} else {
cleaned_words.push(word.to_string());
}
}
// Reconstruct title without tags
title = cleaned_words.join(" ");
// id (DON'T index [6]) // id (DON'T index [6])
let id = video_url let id = video_url
.split('/') .split('/')
@@ -376,7 +433,7 @@ impl SxyprnProvider {
.nth(1) .nth(1)
.and_then(|s| s.split("data-src='").nth(1)) .and_then(|s| s.split("data-src='").nth(1))
.and_then(|s| s.split('\'').next()) .and_then(|s| s.split('\'').next())
.ok_or_else(|| ErrorKind::Parse("failed to extract thumb".into()))?; .unwrap_or("");
let thumb = format!("https:{thumb_path}"); let thumb = format!("https:{thumb_path}");
@@ -416,36 +473,95 @@ impl SxyprnProvider {
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32; let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
// stream urls (your filter condition looks suspicious; leaving as-is) // stream urls - collect both lulustream and vidara.so URLs
let stream_urls = video_segment let mut formats = vec![];
.split("extlink_icon extlink")
.filter_map(|part| { // Add sxyprn format
part.split("href='") let sxyprn_url = format!(
.last() "{}/proxy/sxyprn/post/{}",
.and_then(|s| s.split('\'').next()) options.public_url_base.as_deref().unwrap_or(""),
.map(|u| u.to_string()) id
}) );
.filter(|url| url.starts_with("https://lulustream.")) formats.push(
.collect::<Vec<String>>(); VideoFormat::new(sxyprn_url.clone(), "auto".to_string(), "mp4".to_string())
.format_note(
sxyprn_url
.split("/")
.nth(4)
.unwrap_or("sxyprn")
.to_string(),
),
);
// Also collect and transform vidara.so URLs to proxy format and add as formats
let vidara_urls: Vec<String> = title_links
.iter()
.filter(|url| proxy_name_for_url(url).as_deref() == Some("vidara"))
.map(|url| rewrite_hoster_url(options, url))
.collect();
for vidara_url in vidara_urls {
formats.push(
VideoFormat::m3u8(vidara_url.clone(), "1080".to_string(), "m3u8".to_string())
.format_note(
vidara_url
.split("/")
.nth(4)
.unwrap_or("vidara")
.to_string(),
)
.format_id("vidara".to_string()),
);
}
let doodstream_urls: Vec<String> = title_links
.iter()
.filter(|url| proxy_name_for_url(url).as_deref() == Some("doodstream"))
.map(|url| rewrite_hoster_url(options, url))
.collect();
for dood_url in doodstream_urls {
formats.push(
VideoFormat::m3u8(dood_url.clone(), "auto".to_string(), "m3u8".to_string())
.format_note("doodstream".to_string())
.format_id("doodstream".to_string()),
);
}
let lulustream_urls: Vec<String> = title_links
.iter()
.filter(|url| proxy_name_for_url(url).as_deref() == Some("lulustream"))
.map(|url| rewrite_hoster_url(options, url))
.collect();
for lulustream_url in lulustream_urls {
formats.push(
VideoFormat::m3u8(lulustream_url.clone(), "auto".to_string(), "m3u8".to_string())
.format_note("lulustream".to_string())
.format_id("lulustream".to_string()),
);
}
let video_item_url = stream_urls.first().cloned().unwrap_or_else(|| {
crate::providers::build_proxy_url(options, "sxyprn", &format!("post/{}", id))
});
let mut video_item = VideoItem::new( let mut video_item = VideoItem::new(
id, id.clone(),
title, title,
video_item_url, format!("{}/post/{}", self.url, id.clone()),
"sxyprn".to_string(), "sxyprn".to_string(),
thumb, thumb,
duration, duration,
) )
.views(views.parse::<u32>().unwrap_or(0)); .views(views.parse::<u32>().unwrap_or(0))
.formats(formats);
if let Some(p) = preview { // Add tags if any were found
video_item = video_item.preview(p); if !tags.is_empty() {
video_item.tags = Some(tags);
} }
if preview.is_some() {
video_item.preview = preview;
}
items.push(video_item); items.push(video_item);
} }

View File

@@ -405,7 +405,7 @@ impl YespornProvider {
))); )));
} }
let canonical_url = response.uri().to_string(); let canonical_url = response.url().to_string();
let body = response let body = response
.text() .text()
.await .await

587
src/providers/youporn.rs Normal file
View File

@@ -0,0 +1,587 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::{Provider, report_provider_error, requester_or_default};
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use scraper::{ElementRef, Html, Selector};
use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use std::thread;
use url::{Url, form_urlencoded};
use wreq::Version;
pub const CHANNEL_METADATA: crate::providers::ProviderChannelMetadata =
crate::providers::ProviderChannelMetadata {
group_id: "mainstream-tube",
tags: &["mainstream", "studio", "search"],
};
const BASE_URL: &str = "https://www.youporn.com";
const CHANNEL_ID: &str = "youporn";
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct YoupornProvider {
url: String,
shortcuts: Arc<RwLock<HashMap<String, Target>>>,
}
#[derive(Debug, Clone)]
enum Target {
Latest { sort: String },
Search { query: String },
Tag { slug: String, sort: String },
Channel { slug: String, sort: String },
Pornstar { slug: String, sort: String },
Amateur { slug: String, sort: String },
}
impl YoupornProvider {
pub fn new() -> Self {
let provider = Self {
url: BASE_URL.to_string(),
shortcuts: Arc::new(RwLock::new(HashMap::new())),
};
provider.spawn_initial_load();
provider
}
fn spawn_initial_load(&self) {
let shortcuts = Arc::clone(&self.shortcuts);
let url = self.url.clone();
thread::spawn(move || {
let rt = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(v) => v,
Err(_) => return,
};
rt.block_on(async move {
let mut requester = crate::util::requester::Requester::new();
if let Ok(html) = requester.get(&url, None).await {
let map = Self::collect_shortcuts(&html);
if let Ok(mut guard) = shortcuts.write() {
*guard = map;
}
}
});
});
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: CHANNEL_ID.to_string(),
name: "YouPorn".to_string(),
description: "YouPorn listings with search, tag/channel shortcuts, and watch-page playback URLs."
.to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=youporn.com".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Latest feed ordering.".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![FilterOption {
id: "new".to_string(),
title: "Most Recent".to_string(),
}],
multiSelect: false,
}],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn selector(value: &str) -> Option<Selector> {
Selector::parse(value).ok()
}
fn normalize_text(value: &str) -> String {
decode(value.as_bytes())
.to_string()
.unwrap_or_else(|_| value.to_string())
.split_whitespace()
.collect::<Vec<_>>()
.join(" ")
.trim()
.to_string()
}
fn normalize_url(&self, value: &str) -> String {
let trimmed = value.trim();
if trimmed.is_empty() {
return String::new();
}
if trimmed.starts_with("http://") || trimmed.starts_with("https://") {
return trimmed.to_string();
}
if trimmed.starts_with("//") {
return format!("https:{trimmed}");
}
format!(
"{}/{}",
self.url.trim_end_matches('/'),
trimmed.trim_start_matches('/')
)
}
fn normalized_sort(sort: &str) -> &'static str {
let _ = sort;
"new"
}
fn sort_suffix(sort: &str) -> &'static str {
let _ = sort;
""
}
fn page_suffix(page: u8) -> String {
if page > 1 {
format!("?page={page}")
} else {
String::new()
}
}
fn html_headers(referer: &str) -> Vec<(String, String)> {
vec![
(
"accept".to_string(),
"text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,*/*;q=0.8".to_string(),
),
("accept-language".to_string(), "en-US,en;q=0.7".to_string()),
("cache-control".to_string(), "no-cache".to_string()),
("pragma".to_string(), "no-cache".to_string()),
(
"user-agent".to_string(),
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36".to_string(),
),
("referer".to_string(), referer.to_string()),
]
}
fn target_from_query(&self, query: &str, sort: &str) -> Target {
let q = query.trim();
if q.is_empty() {
return Target::Latest {
sort: Self::normalized_sort(sort).to_string(),
};
}
let lower = q.to_ascii_lowercase();
for (prefix, kind) in [
("tag:", "tag"),
("channel:", "channel"),
("pornstar:", "pornstar"),
("amateur:", "amateur"),
] {
if let Some(rest) = lower.strip_prefix(prefix) {
let slug = rest.trim().replace(' ', "-");
if !slug.is_empty() {
return match kind {
"tag" => Target::Tag {
slug,
sort: Self::normalized_sort(sort).to_string(),
},
"channel" => Target::Channel {
slug,
sort: Self::normalized_sort(sort).to_string(),
},
"pornstar" => Target::Pornstar {
slug,
sort: Self::normalized_sort(sort).to_string(),
},
_ => Target::Amateur {
slug,
sort: Self::normalized_sort(sort).to_string(),
},
};
}
}
}
let shortcut_key = lower.split_whitespace().collect::<Vec<_>>().join(" ");
if let Ok(guard) = self.shortcuts.read()
&& let Some(target) = guard.get(&shortcut_key)
{
return match target {
Target::Tag { slug, .. } => Target::Tag {
slug: slug.clone(),
sort: Self::normalized_sort(sort).to_string(),
},
Target::Channel { slug, .. } => Target::Channel {
slug: slug.clone(),
sort: Self::normalized_sort(sort).to_string(),
},
Target::Pornstar { slug, .. } => Target::Pornstar {
slug: slug.clone(),
sort: Self::normalized_sort(sort).to_string(),
},
Target::Amateur { slug, .. } => Target::Amateur {
slug: slug.clone(),
sort: Self::normalized_sort(sort).to_string(),
},
_ => target.clone(),
};
}
Target::Search {
query: q.to_string(),
}
}
fn build_url(&self, target: &Target, page: u8) -> String {
match target {
Target::Latest { sort } => format!(
"{}/{}{}",
self.url,
Self::sort_suffix(sort),
Self::page_suffix(page)
),
Target::Search { query } => {
let encoded: String = form_urlencoded::byte_serialize(query.as_bytes()).collect();
if page > 1 {
format!("{}/search/?query={encoded}&page={page}", self.url)
} else {
format!("{}/search/?query={encoded}", self.url)
}
}
Target::Tag { slug, sort } => format!(
"{}/porntags/{}/{}{}",
self.url,
slug.trim_matches('/'),
Self::sort_suffix(sort),
Self::page_suffix(page)
),
Target::Channel { slug, sort } => format!(
"{}/channel/{}/{}{}",
self.url,
slug.trim_matches('/'),
Self::sort_suffix(sort),
Self::page_suffix(page)
),
Target::Pornstar { slug, sort } => format!(
"{}/pornstar/{}/{}{}",
self.url,
slug.trim_matches('/'),
Self::sort_suffix(sort),
Self::page_suffix(page)
),
Target::Amateur { slug, sort } => format!(
"{}/amateur/{}/{}{}",
self.url,
slug.trim_matches('/'),
Self::sort_suffix(sort),
Self::page_suffix(page)
),
}
}
fn collect_shortcuts(html: &str) -> HashMap<String, Target> {
let mut map = HashMap::new();
let document = Html::parse_document(html);
let Some(link_selector) = Self::selector("a[href]") else {
return map;
};
for link in document.select(&link_selector) {
let Some(href) = link.value().attr("href") else {
continue;
};
let title = Self::normalize_text(&link.text().collect::<String>()).to_ascii_lowercase();
if title.is_empty() {
continue;
}
let path = if href.starts_with("http://") || href.starts_with("https://") {
Url::parse(href)
.ok()
.map(|u| u.path().to_string())
.unwrap_or_default()
} else {
href.to_string()
};
if let Some(slug) = path
.strip_prefix("/porntags/")
.map(|v| v.trim_matches('/').to_string())
{
if !slug.is_empty() {
map.insert(
title,
Target::Tag {
slug,
sort: "new".to_string(),
},
);
}
continue;
}
if let Some(slug) = path
.strip_prefix("/channel/")
.map(|v| v.trim_matches('/').to_string())
{
if !slug.is_empty() {
map.insert(
title,
Target::Channel {
slug,
sort: "new".to_string(),
},
);
}
continue;
}
if let Some(slug) = path
.strip_prefix("/pornstar/")
.map(|v| v.trim_matches('/').to_string())
{
if !slug.is_empty() {
map.insert(
title,
Target::Pornstar {
slug,
sort: "new".to_string(),
},
);
}
continue;
}
if let Some(slug) = path
.strip_prefix("/amateur/")
.map(|v| v.trim_matches('/').to_string())
&& !slug.is_empty()
{
map.insert(
title,
Target::Amateur {
slug,
sort: "new".to_string(),
},
);
}
}
map
}
fn text_of(node: Option<ElementRef<'_>>) -> String {
node.map(|v| Self::normalize_text(&v.text().collect::<String>()))
.unwrap_or_default()
}
fn parse_items(&self, html: &str) -> Vec<VideoItem> {
let document = Html::parse_document(html);
let Some(card_selector) = Self::selector("article.video-box.js_video-box") else {
return vec![];
};
let link_selector = Self::selector("a[data-testid='plw_video_thumbnail_link'], a.video-box-image, a.video-title-text");
let title_selector = Self::selector("a.video-title-text");
let thumb_selector = Self::selector("img");
let duration_selector = Self::selector(".tm_video_duration");
let views_selector = Self::selector("span.info-views");
let uploader_selector = Self::selector("a.author-title-text");
let tag_selector = Self::selector("a.bubble-porntag");
let mut items = Vec::new();
for card in document.select(&card_selector) {
let link_node = link_selector
.as_ref()
.and_then(|s| card.select(s).next());
let href = link_node
.and_then(|v| v.value().attr("href"))
.unwrap_or_default();
if !href.contains("/watch/") {
continue;
}
let id = card
.value()
.attr("data-video-id")
.map(|v| v.to_string())
.or_else(|| {
href.split("/watch/")
.nth(1)
.and_then(|v| v.split('/').next())
.map(|v| v.to_string())
})
.unwrap_or_default();
if id.is_empty() {
continue;
}
let title = title_selector
.as_ref()
.and_then(|s| card.select(s).next())
.map(|v| {
let from_title = v.value().attr("title").unwrap_or_default();
if from_title.is_empty() {
Self::normalize_text(&v.text().collect::<String>())
} else {
Self::normalize_text(from_title)
}
})
.unwrap_or_default();
let thumb = thumb_selector
.as_ref()
.and_then(|s| card.select(s).next())
.and_then(|v| {
v.value()
.attr("data-original")
.or_else(|| v.value().attr("data-src"))
.or_else(|| v.value().attr("src"))
})
.map(|v| self.normalize_url(v))
.unwrap_or_default();
let duration_text = Self::text_of(duration_selector.as_ref().and_then(|s| card.select(s).next()));
let duration = parse_time_to_seconds(&duration_text).unwrap_or(0) as u32;
let view_text = views_selector
.as_ref()
.and_then(|s| card.select(s).next())
.map(|v| Self::normalize_text(&v.text().collect::<String>()))
.unwrap_or_default();
let views = parse_abbreviated_number(&view_text).unwrap_or(0) as u32;
let rating = views_selector
.as_ref()
.and_then(|s| card.select(s).nth(1))
.map(|v| Self::normalize_text(&v.text().collect::<String>()).replace('%', ""))
.and_then(|v| v.parse::<f32>().ok());
let uploader_node = uploader_selector.as_ref().and_then(|s| card.select(s).next());
let uploader_name = uploader_node
.as_ref()
.map(|v| Self::normalize_text(&v.text().collect::<String>()))
.unwrap_or_default();
let uploader_href = uploader_node
.and_then(|v| v.value().attr("href"))
.map(|v| self.normalize_url(v));
let uploader_id = card
.value()
.attr("data-uploader-id")
.map(|v| format!("{CHANNEL_ID}:{v}"));
let preview = link_node
.and_then(|v| v.value().attr("data-mediabook"))
.map(|v| v.replace("&amp;", "&"));
let mut tags = Vec::new();
if let Some(sel) = &tag_selector {
for tag in card.select(sel) {
let title = Self::normalize_text(&tag.text().collect::<String>());
if !title.is_empty() {
tags.push(title);
}
}
}
let mut item = VideoItem::new(
id,
title,
self.normalize_url(href),
CHANNEL_ID.to_string(),
thumb,
duration,
)
.views(views);
if let Some(value) = rating {
item = item.rating(value);
}
if !uploader_name.is_empty() {
item = item.uploader(uploader_name);
}
if let Some(value) = uploader_href {
item.uploaderUrl = Some(value);
}
if let Some(value) = uploader_id {
item.uploaderId = Some(value);
}
if let Some(value) = preview {
item = item.preview(value);
}
if !tags.is_empty() {
item = item.tags(tags);
}
items.push(item);
}
items
}
}
#[async_trait]
impl Provider for YoupornProvider {
async fn get_videos(
&self,
cache: VideoCache,
_db_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let query = query.unwrap_or_default();
let page = page.parse::<u8>().unwrap_or(1);
let target = self.target_from_query(&query, &sort);
let video_url = self.build_url(&target, page);
let old_items = match cache.get(&video_url) {
Some((time, items)) if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 => {
return items.clone();
}
Some((_time, items)) => items.clone(),
None => vec![],
};
let mut requester = requester_or_default(&options, CHANNEL_ID, "get_videos");
let referer = format!("{}/", self.url.trim_end_matches('/'));
let text = match requester
.get_with_headers(&video_url, Self::html_headers(&referer), Some(Version::HTTP_11))
.await
{
Ok(text) => text,
Err(e) => {
report_provider_error(
CHANNEL_ID,
"get_videos.request",
&format!("url={video_url}; error={e}"),
)
.await;
return old_items;
}
};
let items = self.parse_items(&text);
if items.is_empty() {
return old_items;
}
cache.remove(&video_url);
cache.insert(video_url, items.clone());
items
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}

100
src/proxies/lulustream.rs Normal file
View File

@@ -0,0 +1,100 @@
use ntex::web;
use url::Url;
use serde_json::json;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct LulustreamProxy {}
impl LulustreamProxy {
pub fn new() -> Self {
LulustreamProxy {}
}
fn normalize_detail_request(endpoint: &str) -> Option<(String, String)> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
return None;
}
let detail_url = if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint.to_string()
} else if endpoint.starts_with("lulustream.com/") || endpoint.starts_with("www.lulustream.com/") ||
endpoint.starts_with("luluvdo.com/")
{
format!("https://{endpoint}")
} else {
format!("https://lulustream.com/{endpoint}")
};
if !Self::is_allowed_detail_url(&detail_url) {
return None;
}
let parsed = Url::parse(&detail_url).ok()?;
let video_id = parsed.path_segments()?
.last()
.map(ToOwned::to_owned)?;
Some((detail_url, video_id))
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(parsed) = Url::parse(url).ok() else {
return false;
};
if parsed.scheme() != "https" {
return false;
}
let Some(host) = parsed.host_str() else {
return false;
};
(host == "lulustream.com" || host == "www.lulustream.com" || host == "luluvdo.com")
&& (parsed.path().starts_with("/v/")||parsed.path().starts_with("/e/"))
}
pub async fn get_video_url(
&self,
url: String,
requester: web::types::State<Requester>,
) -> String {
let mut requester = requester.get_ref().clone();
let Some((detail_url, video_id)) = Self::normalize_detail_request(&url) else {
println!("LulustreamProxy: Invalid detail URL: {url}");
return String::new();
};
let text = requester.get(&detail_url, None).await.unwrap_or_default();
let video_url = text.split("sources: [{file:\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or_default()
.to_string();
if video_url.is_empty() {
println!("LulustreamProxy: Failed to extract video URL for video ID: {video_id}");
}
video_url
}
}
#[cfg(test)]
mod tests {
use super::LulustreamProxy;
#[test]
fn normalizes_detail_request_with_full_url() {
let (url, video_id) =
LulustreamProxy::normalize_detail_request("https://lulustream.com/d/s484n23k8opy")
.expect("detail request should parse");
assert_eq!(url, "https://lulustream.com/d/s484n23k8opy");
assert_eq!(video_id, "s484n23k8opy");
}
#[test]
fn normalizes_detail_request_with_path_only() {
let (url, video_id) = LulustreamProxy::normalize_detail_request("d/s484n23k8opy")
.expect("detail request should parse");
assert_eq!(url, "https://lulustream.com/d/s484n23k8opy");
assert_eq!(video_id, "s484n23k8opy");
}
}

View File

@@ -11,6 +11,8 @@ use crate::proxies::shooshtime::ShooshtimeProxy;
use crate::proxies::spankbang::SpankbangProxy; use crate::proxies::spankbang::SpankbangProxy;
use crate::proxies::vjav::VjavProxy; use crate::proxies::vjav::VjavProxy;
use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester}; use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester};
use crate::proxies::vidara::VidaraProxy;
use crate::proxies::lulustream::LulustreamProxy;
pub mod archivebate; pub mod archivebate;
pub mod doodstream; pub mod doodstream;
@@ -19,6 +21,7 @@ pub mod heavyfetish;
pub mod hqporner; pub mod hqporner;
pub mod hqpornerthumb; pub mod hqpornerthumb;
pub mod javtiful; pub mod javtiful;
pub mod lulustream;
pub mod noodlemagazine; pub mod noodlemagazine;
pub mod pimpbunny; pub mod pimpbunny;
pub mod porndish; pub mod porndish;
@@ -28,6 +31,7 @@ pub mod pornhubthumb;
pub mod shooshtime; pub mod shooshtime;
pub mod spankbang; pub mod spankbang;
pub mod sxyprn; pub mod sxyprn;
pub mod vidara;
pub mod vjav; pub mod vjav;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -36,6 +40,7 @@ pub enum AnyProxy {
Doodstream(DoodstreamProxy), Doodstream(DoodstreamProxy),
Sxyprn(SxyprnProxy), Sxyprn(SxyprnProxy),
Javtiful(javtiful::JavtifulProxy), Javtiful(javtiful::JavtifulProxy),
Lulustream(LulustreamProxy),
Pornhd3x(Pornhd3xProxy), Pornhd3x(Pornhd3xProxy),
Pimpbunny(PimpbunnyProxy), Pimpbunny(PimpbunnyProxy),
Porndish(PorndishProxy), Porndish(PorndishProxy),
@@ -44,6 +49,7 @@ pub enum AnyProxy {
Hqporner(HqpornerProxy), Hqporner(HqpornerProxy),
Heavyfetish(HeavyfetishProxy), Heavyfetish(HeavyfetishProxy),
Vjav(VjavProxy), Vjav(VjavProxy),
Vidara(VidaraProxy),
} }
pub trait Proxy { pub trait Proxy {
@@ -57,6 +63,7 @@ impl Proxy for AnyProxy {
AnyProxy::Doodstream(p) => p.get_video_url(url, requester).await, AnyProxy::Doodstream(p) => p.get_video_url(url, requester).await,
AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await, AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await,
AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await, AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await,
AnyProxy::Lulustream(p) => p.get_video_url(url, requester).await,
AnyProxy::Pornhd3x(p) => p.get_video_url(url, requester).await, AnyProxy::Pornhd3x(p) => p.get_video_url(url, requester).await,
AnyProxy::Pimpbunny(p) => p.get_video_url(url, requester).await, AnyProxy::Pimpbunny(p) => p.get_video_url(url, requester).await,
AnyProxy::Porndish(p) => p.get_video_url(url, requester).await, AnyProxy::Porndish(p) => p.get_video_url(url, requester).await,
@@ -65,6 +72,7 @@ impl Proxy for AnyProxy {
AnyProxy::Hqporner(p) => p.get_video_url(url, requester).await, AnyProxy::Hqporner(p) => p.get_video_url(url, requester).await,
AnyProxy::Heavyfetish(p) => p.get_video_url(url, requester).await, AnyProxy::Heavyfetish(p) => p.get_video_url(url, requester).await,
AnyProxy::Vjav(p) => p.get_video_url(url, requester).await, AnyProxy::Vjav(p) => p.get_video_url(url, requester).await,
AnyProxy::Vidara(p) => p.get_video_url(url, requester).await,
} }
} }
} }

View File

@@ -38,6 +38,7 @@ impl SxyprnProxy {
) -> String { ) -> String {
let mut requester = requester.get_ref().clone(); let mut requester = requester.get_ref().clone();
let url = "https://sxyprn.com/".to_string() + &url; let url = "https://sxyprn.com/".to_string() + &url;
// println!("Fetching URL: {}", url);
let text = requester.get(&url, None).await.unwrap_or("".to_string()); let text = requester.get(&url, None).await.unwrap_or("".to_string());
if text.is_empty() { if text.is_empty() {
return "".to_string(); return "".to_string();
@@ -48,44 +49,33 @@ impl SxyprnProxy {
.split("\"}") .split("\"}")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.replace("\\", ""); .replace("\\", "");
//println!("src: {}",data_string); // println!("src: {}", data_string);
let mut tmp = data_string let mut tmp = data_string
.split("/") .split("/")
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
//println!("tmp: {:?}",tmp); // println!("tmp: {:?}", tmp);
tmp[1] = format!( tmp[1] = format!(
"{}8/{}", "{}8/{}",
tmp[1], tmp[1],
boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str())) boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str()))
); );
//println!("tmp[1]: {:?}",tmp[1]); // println!("tmp[1]: {:?}", tmp[1]);
//preda //preda
tmp[5] = format!( tmp[5] = format!(
"{}", "{}",
tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str()) tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str())
); );
//println!("tmp: {:?}",tmp); // println!("tmp: {:?}", tmp);
let sxyprn_video_url = format!("https://sxyprn.com{}", tmp.join("/")); let sxyprn_video_url = format!("https://sxyprn.com{}", tmp.join("/"));
// println!("sxyprn_video_url: {}", sxyprn_video_url);
let response = requester.get_raw(&sxyprn_video_url).await; match crate::util::get_redirect_location(&sxyprn_video_url) {
match response { Ok(Some(loc)) => {return format!("https:{}", loc)},
Ok(resp) => { Ok(None) => println!("No redirect found for {}", sxyprn_video_url),
return format!( Err(e) => eprintln!("Request failed: {}", e),
"https:{}",
resp.headers()
.get("Location")
.unwrap()
.to_str()
.unwrap_or("")
.to_string()
);
}
Err(e) => {
println!("Error fetching video URL: {}", e);
}
} }
return "".to_string(); return "".to_string();
} }
} }

124
src/proxies/vidara.rs Normal file
View File

@@ -0,0 +1,124 @@
use ntex::web;
use url::Url;
use serde_json::json;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct VidaraProxy {}
impl VidaraProxy {
pub fn new() -> Self {
VidaraProxy {}
}
fn normalize_detail_request(endpoint: &str) -> Option<(String, String)> {
let endpoint = endpoint.trim().trim_start_matches('/');
if endpoint.is_empty() {
return None;
}
let detail_url = if endpoint.starts_with("http://") || endpoint.starts_with("https://") {
endpoint.to_string()
} else if endpoint.starts_with("vidara.so/") || endpoint.starts_with("www.vidara.so/")
{
format!("https://{endpoint}")
} else {
format!("https://vidara.so/{endpoint}")
};
if !Self::is_allowed_detail_url(&detail_url) {
return None;
}
let parsed = Url::parse(&detail_url).ok()?;
let video_id = parsed.path_segments()?
.last()
.map(ToOwned::to_owned)?;
Some((detail_url, video_id))
}
fn is_allowed_detail_url(url: &str) -> bool {
let Some(parsed) = Url::parse(url).ok() else {
return false;
};
if parsed.scheme() != "https" {
return false;
}
let Some(host) = parsed.host_str() else {
return false;
};
(host == "vidara.so" || host == "www.vidara.so")
&& (parsed.path().starts_with("/v/")||parsed.path().starts_with("/e/"))
}
pub async fn get_video_url(
&self,
url: String,
requester: web::types::State<Requester>,
) -> String {
let mut requester = requester.get_ref().clone();
let Some((detail_url, video_id)) = Self::normalize_detail_request(&url) else {
println!("VidaraProxy: Invalid detail URL: {url}");
return String::new();
};
let body = json!({
"filecode": video_id,
"device": "web"
});
// println!("VidaraProxy: Requesting streaming URL for {detail_url} with body: {body}");
let response = requester
.post_json(
"https://vidara.so/api/stream",
&body,
vec![
("Referer".to_string(), detail_url.clone())
],
)
.await;
// println!("VidaraProxy: Requested streaming URL for {detail_url}, got response: {:?}", response);
let Ok(response) = response else {
return String::new();
};
let Ok(response_text) = response.text().await else {
return String::new();
};
// println!("VidaraProxy: Response text for {detail_url}: {response_text}");
let Ok(json): Result<serde_json::Value, _> = serde_json::from_str(&response_text) else {
return String::new();
};
json["streaming_url"]
.as_str()
.map(ToOwned::to_owned)
.unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use super::VidaraProxy;
#[test]
fn normalizes_detail_request_with_full_url() {
let (url, video_id) =
VidaraProxy::normalize_detail_request("https://vidara.so/v/eJ9O4QqG1Ln2")
.expect("detail request should parse");
assert_eq!(url, "https://vidara.so/v/eJ9O4QqG1Ln2");
assert_eq!(video_id, "eJ9O4QqG1Ln2");
}
#[test]
fn normalizes_detail_request_with_path_only() {
let (url, video_id) = VidaraProxy::normalize_detail_request("video/1000/demo")
.expect("detail request should parse");
assert_eq!(url, "https://vidara.so/video/1000/demo");
assert_eq!(video_id, "1000");
}
}

View File

@@ -12,6 +12,7 @@ use crate::proxies::shooshtime::ShooshtimeProxy;
use crate::proxies::spankbang::SpankbangProxy; use crate::proxies::spankbang::SpankbangProxy;
use crate::proxies::sxyprn::SxyprnProxy; use crate::proxies::sxyprn::SxyprnProxy;
use crate::proxies::vjav::VjavProxy; use crate::proxies::vjav::VjavProxy;
use crate::proxies::vidara::VidaraProxy;
use crate::proxies::*; use crate::proxies::*;
use crate::util::requester::Requester; use crate::util::requester::Requester;
@@ -71,6 +72,11 @@ pub fn config(cfg: &mut web::ServiceConfig) {
.route(web::post().to(proxy2redirect)) .route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)), .route(web::get().to(proxy2redirect)),
) )
.service(
web::resource("/vidara/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
)
.service( .service(
web::resource("/shooshtime-media/{endpoint}*") web::resource("/shooshtime-media/{endpoint}*")
.route(web::post().to(crate::proxies::shooshtime::serve_media)) .route(web::post().to(crate::proxies::shooshtime::serve_media))
@@ -139,6 +145,7 @@ fn get_proxy(proxy: &str) -> Option<AnyProxy> {
"vjav" => Some(AnyProxy::Vjav(VjavProxy::new())), "vjav" => Some(AnyProxy::Vjav(VjavProxy::new())),
"pornhd3x" => Some(AnyProxy::Pornhd3x(Pornhd3xProxy::new())), "pornhd3x" => Some(AnyProxy::Pornhd3x(Pornhd3xProxy::new())),
"shooshtime" => Some(AnyProxy::Shooshtime(ShooshtimeProxy::new())), "shooshtime" => Some(AnyProxy::Shooshtime(ShooshtimeProxy::new())),
"vidara" => Some(AnyProxy::Vidara(VidaraProxy::new())),
"pimpbunny" => Some(AnyProxy::Pimpbunny(PimpbunnyProxy::new())), "pimpbunny" => Some(AnyProxy::Pimpbunny(PimpbunnyProxy::new())),
"porndish" => Some(AnyProxy::Porndish(PorndishProxy::new())), "porndish" => Some(AnyProxy::Porndish(PorndishProxy::new())),
"spankbang" => Some(AnyProxy::Spankbang(SpankbangProxy::new())), "spankbang" => Some(AnyProxy::Spankbang(SpankbangProxy::new())),

View File

@@ -3,25 +3,41 @@ use url::Url;
use crate::providers::{build_proxy_url, strip_url_scheme}; use crate::providers::{build_proxy_url, strip_url_scheme};
use crate::videos::ServerOptions; use crate::videos::ServerOptions;
#[allow(dead_code)]
const DOODSTREAM_HOSTS: &[&str] = &[ const DOODSTREAM_HOSTS: &[&str] = &[
"doodstream.com",
"turboplayers.xyz", "turboplayers.xyz",
"www.turboplayers.xyz",
"trailerhg.xyz", "trailerhg.xyz",
"www.trailerhg.xyz",
"streamhg.com", "streamhg.com",
"www.streamhg.com", ];
const LULUSTREAM_HOSTS: &[&str] = &[
"luluvdo.com",
"lulustream.com",
];
const VIDARA_HOSTS: &[&str] = &[
"vidara.so",
]; ];
#[allow(dead_code)] #[allow(dead_code)]
pub fn proxy_name_for_url(url: &str) -> Option<&'static str> { pub fn proxy_name_for_url(url: &str) -> Option<&'static str> {
let parsed = Url::parse(url).ok()?; let parsed = match !url.starts_with("http://") && !url.starts_with("https://"){
true => Url::parse(&format!("https://{}", url)).ok()?,
false => Url::parse(url).ok()?
};
let host = parsed.host_str()?.to_ascii_lowercase(); let host = parsed.host_str()?.to_ascii_lowercase();
if DOODSTREAM_HOSTS.contains(&host.as_str()) { if DOODSTREAM_HOSTS.contains(&host.as_str()) {
return Some("doodstream"); return Some("doodstream");
} }
if LULUSTREAM_HOSTS.contains(&host.as_str()) {
return Some("lulustream");
}
if VIDARA_HOSTS.contains(&host.as_str()) {
return Some("vidara");
}
None None
} }

View File

@@ -1,3 +1,6 @@
use std::error::Error;
use std::process::Command;
pub mod cache; pub mod cache;
pub mod discord; pub mod discord;
pub mod flaresolverr; pub mod flaresolverr;
@@ -50,3 +53,37 @@ pub fn interleave<T: Clone>(lists: &[Vec<T>]) -> Vec<T> {
result result
} }
pub fn get_redirect_location(url: &str) -> Result<Option<String>, Box<dyn Error>> {
// 1. Execute curl:
// -s: Silent (no progress bar)
// -I: Fetch headers only (HEAD request)
let output = Command::new("curl")
.arg("-sI")
.arg(url)
.output()?;
// Check if the command executed successfully
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(format!("curl command failed: {}", stderr).into());
}
// 2. Parse the stdout
let stdout = String::from_utf8_lossy(&output.stdout);
// HTTP headers are separated by \r\n or \n
for line in stdout.lines() {
// Case-insensitive check for "Location:"
if line.to_lowercase().starts_with("location:") {
// Split "Location: https://example.com" into ["Location", " https://example.com"]
let parts: Vec<&str> = line.splitn(2, ':').collect();
if parts.len() == 2 {
// Trim whitespace and potential carriage returns (\r)
return Ok(Some(parts[1].trim().to_string()));
}
}
}
Ok(None)
}

View File

@@ -6,9 +6,9 @@ use std::time::Duration;
use wreq::Client; use wreq::Client;
use wreq::Proxy; use wreq::Proxy;
use wreq::Response; use wreq::Response;
use wreq::Uri; use wreq::Url;
use wreq::Version; use wreq::Version;
use wreq::cookie::{CookieStore, Cookies, Jar}; use wreq::cookie::{CookieStore, Jar};
use wreq::header::{HeaderMap, HeaderValue, SET_COOKIE, USER_AGENT}; use wreq::header::{HeaderMap, HeaderValue, SET_COOKIE, USER_AGENT};
use wreq::multipart::Form; use wreq::multipart::Form;
use wreq::redirect::Policy; use wreq::redirect::Policy;
@@ -67,7 +67,7 @@ impl Requester {
for value in response.headers().get_all(SET_COOKIE).iter() { for value in response.headers().get_all(SET_COOKIE).iter() {
if let Ok(cookie) = value.to_str() { if let Ok(cookie) = value.to_str() {
self.cookie_jar.add_cookie_str(cookie, &origin.to_string()); self.cookie_jar.add_cookie_str(cookie, &origin);
} }
} }
} }
@@ -115,7 +115,7 @@ impl Requester {
} }
self.cookie_jar self.cookie_jar
.add_cookie_str(&cookie_string, &origin.to_string()); .add_cookie_str(&cookie_string, &origin);
} }
} }
@@ -199,7 +199,7 @@ impl Requester {
fn build_client(cookie_jar: Arc<Jar>, user_agent: Option<&str>) -> Client { fn build_client(cookie_jar: Arc<Jar>, user_agent: Option<&str>) -> Client {
let mut builder = Client::builder() let mut builder = Client::builder()
.cert_verification(false) .cert_verification(false)
.emulation(Emulation::Firefox146) .emulation(Emulation::Firefox136)
.cookie_provider(cookie_jar) .cookie_provider(cookie_jar)
.redirect(Policy::default()); .redirect(Policy::default());
@@ -249,20 +249,13 @@ impl Requester {
} }
pub fn cookie_header_for_url(&self, url: &str) -> Option<String> { pub fn cookie_header_for_url(&self, url: &str) -> Option<String> {
let parsed = url.parse::<Uri>().ok()?; let parsed = url.parse::<Url>().ok()?;
match self.cookie_jar.cookies(&parsed) { let joined = self.cookie_jar.cookies(&parsed)
Cookies::Compressed(value) => value.to_str().ok().map(ToOwned::to_owned), .into_iter()
Cookies::Uncompressed(values) => { .filter_map(|c| c.to_str().ok().map(ToOwned::to_owned))
let joined = values .collect::<Vec<_>>()
.into_iter() .join("; ");
.filter_map(|value| value.to_str().ok().map(ToOwned::to_owned)) (!joined.is_empty()).then_some(joined)
.collect::<Vec<_>>()
.join("; ");
(!joined.is_empty()).then_some(joined)
}
Cookies::Empty => None,
_ => None,
}
} }
pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> { pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> {
@@ -686,7 +679,7 @@ mod tests {
let origin = "https://shared-cookie-requester-test.invalid/"; let origin = "https://shared-cookie-requester-test.invalid/";
a.cookie_jar a.cookie_jar
.add_cookie_str("shared_cookie=1; Path=/; SameSite=Lax", origin); .add_cookie_str("shared_cookie=1; Path=/; SameSite=Lax", &url::Url::parse(origin).unwrap());
let cookie_header = b let cookie_header = b
.cookie_header_for_url("https://shared-cookie-requester-test.invalid/path") .cookie_header_for_url("https://shared-cookie-requester-test.invalid/path")

View File

@@ -157,114 +157,26 @@ impl VideoItem {
self.tags = Some(tags); self.tags = Some(tags);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "porndish",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "chaturbate",
hottub_provider = "porn4fans",
hottub_provider = "xfree",
hottub_provider = "pornhub",
))]
pub fn uploader(mut self, uploader: String) -> Self { pub fn uploader(mut self, uploader: String) -> Self {
self.uploader = Some(uploader); self.uploader = Some(uploader);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "heavyfetish",
hottub_provider = "porndish",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "chaturbate",
))]
pub fn uploader_url(mut self, uploader_url: String) -> Self { pub fn uploader_url(mut self, uploader_url: String) -> Self {
self.uploaderUrl = Some(uploader_url); self.uploaderUrl = Some(uploader_url);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "beeg",
hottub_provider = "chaturbate",
hottub_provider = "freepornvideosxxx",
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "hentaihaven",
hottub_provider = "hypnotube",
hottub_provider = "javtiful",
hottub_provider = "noodlemagazine",
hottub_provider = "okxxx",
hottub_provider = "omgxxx",
hottub_provider = "perfectgirls",
hottub_provider = "pimpbunny",
hottub_provider = "pmvhaven",
hottub_provider = "porn00",
hottub_provider = "porn4fans",
hottub_provider = "porndish",
hottub_provider = "pornhat",
hottub_provider = "pornhub",
hottub_provider = "redtube",
hottub_provider = "rule34gen",
hottub_provider = "rule34video",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "sxyprn",
hottub_provider = "tnaflix",
hottub_provider = "tokyomotion",
hottub_provider = "viralxxxporn",
hottub_provider = "xfree",
hottub_provider = "xxthots",
hottub_provider = "yesporn",
hottub_provider = "youjizz",
))]
pub fn views(mut self, views: u32) -> Self { pub fn views(mut self, views: u32) -> Self {
self.views = Some(views); self.views = Some(views);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "beeg",
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "hsex",
hottub_provider = "porn4fans",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "tokyomotion",
hottub_provider = "vrporn",
hottub_provider = "yesporn",
))]
pub fn rating(mut self, rating: f32) -> Self { pub fn rating(mut self, rating: f32) -> Self {
self.rating = Some(rating); self.rating = Some(rating);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "porndish",
hottub_provider = "shooshtime",
hottub_provider = "heavyfetish",
hottub_provider = "xfree",
))]
pub fn uploaded_at(mut self, uploaded_at: u64) -> Self { pub fn uploaded_at(mut self, uploaded_at: u64) -> Self {
self.uploadedAt = Some(uploaded_at); self.uploadedAt = Some(uploaded_at);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "hentaihaven",
hottub_provider = "hqporner",
hottub_provider = "javtiful",
hottub_provider = "noodlemagazine",
hottub_provider = "pimpbunny",
hottub_provider = "pmvhaven",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
))]
pub fn formats(mut self, formats: Vec<VideoFormat>) -> Self { pub fn formats(mut self, formats: Vec<VideoFormat>) -> Self {
if formats.is_empty() { if formats.is_empty() {
return self; return self;
@@ -272,42 +184,11 @@ impl VideoItem {
self.formats = Some(formats); self.formats = Some(formats);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "freepornvideosxxx",
hottub_provider = "heavyfetish",
hottub_provider = "homoxxx",
hottub_provider = "javtiful",
hottub_provider = "missav",
hottub_provider = "okxxx",
hottub_provider = "omgxxx",
hottub_provider = "perfectgirls",
hottub_provider = "pimpbunny",
hottub_provider = "pmvhaven",
hottub_provider = "pornhat",
hottub_provider = "redtube",
hottub_provider = "rule34gen",
hottub_provider = "shooshtime",
hottub_provider = "spankbang",
hottub_provider = "sxyprn",
hottub_provider = "tnaflix",
hottub_provider = "xfree",
hottub_provider = "xxdbx",
hottub_provider = "yesporn",
))]
pub fn preview(mut self, preview: String) -> Self { pub fn preview(mut self, preview: String) -> Self {
self.preview = Some(preview); self.preview = Some(preview);
self self
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hentaihaven",
hottub_provider = "hanime",
hottub_provider = "heavyfetish",
hottub_provider = "paradisehill",
hottub_provider = "xfree",
))]
pub fn aspect_ratio(mut self, aspect_ratio: f32) -> Self { pub fn aspect_ratio(mut self, aspect_ratio: f32) -> Self {
self.aspectRatio = Some(aspect_ratio); self.aspectRatio = Some(aspect_ratio);
self self
@@ -401,13 +282,36 @@ impl VideoFormat {
http_headers: None, http_headers: None,
} }
} }
#[cfg(any( pub fn m3u8(url: String, quality: String, format: String) -> Self {
not(hottub_single_provider), let _ = format;
hottub_provider = "vrporn", VideoFormat {
hottub_provider = "perverzija", url,
hottub_provider = "porndish", quality,
hottub_provider = "spankbang", format: format, // Default format
))] format_id: Some("m3u8-1080".to_string()),
format_note: None,
filesize: None,
asr: None,
fps: None,
width: None,
height: None,
tbr: None,
language: None,
language_preference: None,
ext: Some("m3u8".to_string()),
vcodec: None,
acodec: None,
dynamic_range: None,
abr: None,
vbr: None,
container: None,
protocol: Some("m3u8_native".to_string()),
audio_ext: Some("none".to_string()),
video_ext: Some("m3u8".to_string()),
resolution: None,
http_headers: None,
}
}
pub fn add_http_header(&mut self, key: String, value: String) { pub fn add_http_header(&mut self, key: String, value: String) {
if self.http_headers.is_none() { if self.http_headers.is_none() {
self.http_headers = Some(HashMap::new()); self.http_headers = Some(HashMap::new());
@@ -416,14 +320,6 @@ impl VideoFormat {
headers.insert(key, value); headers.insert(key, value);
} }
} }
#[cfg(any(
not(hottub_single_provider),
hottub_provider = "hentaihaven",
hottub_provider = "noodlemagazine",
hottub_provider = "shooshtime",
hottub_provider = "heavyfetish",
hottub_provider = "hsex",
))]
pub fn http_header(&mut self, key: String, value: String) -> Self { pub fn http_header(&mut self, key: String, value: String) -> Self {
if self.http_headers.is_none() { if self.http_headers.is_none() {
self.http_headers = Some(HashMap::new()); self.http_headers = Some(HashMap::new());
@@ -448,86 +344,86 @@ impl VideoFormat {
self.format_note = Some(format_note); self.format_note = Some(format_note);
self self
} }
// pub fn filesize(mut self, filesize: u32) -> Self { pub fn filesize(mut self, filesize: u32) -> Self {
// self.filesize = Some(filesize); self.filesize = Some(filesize);
// self self
// } }
// pub fn asr(mut self, asr: u32) -> Self { pub fn asr(mut self, asr: u32) -> Self {
// self.asr = Some(asr); self.asr = Some(asr);
// self self
// } }
// pub fn fps(mut self, fps: u32) -> Self { pub fn fps(mut self, fps: u32) -> Self {
// self.fps = Some(fps); self.fps = Some(fps);
// self self
// } }
// pub fn width(mut self, width: u32) -> Self { pub fn width(mut self, width: u32) -> Self {
// self.width = Some(width); self.width = Some(width);
// self self
// } }
// pub fn height(mut self, height: u32) -> Self { pub fn height(mut self, height: u32) -> Self {
// self.height = Some(height); self.height = Some(height);
// self self
// } }
// pub fn tbr(mut self, tbr: u32) -> Self { pub fn tbr(mut self, tbr: u32) -> Self {
// self.tbr = Some(tbr); self.tbr = Some(tbr);
// self self
// } }
// pub fn language(mut self, language: String) -> Self { pub fn language(mut self, language: String) -> Self {
// self.language = Some(language); self.language = Some(language);
// self self
// } }
// pub fn language_preference(mut self, language_preference: u32) -> Self { pub fn language_preference(mut self, language_preference: u32) -> Self {
// self.language_preference = Some(language_preference); self.language_preference = Some(language_preference);
// self self
// } }
// pub fn ext(mut self, ext: String) -> Self { pub fn ext(mut self, ext: String) -> Self {
// self.ext = Some(ext); self.ext = Some(ext);
// self self
// } }
// pub fn vcodec(mut self, vcodec: String) -> Self { pub fn vcodec(mut self, vcodec: String) -> Self {
// self.vcodec = Some(vcodec); self.vcodec = Some(vcodec);
// self self
// } }
// pub fn acodec(mut self, acodec: String) -> Self { pub fn acodec(mut self, acodec: String) -> Self {
// self.acodec = Some(acodec); self.acodec = Some(acodec);
// self self
// } }
// pub fn dynamic_range(mut self, dynamic_range: String) -> Self { pub fn dynamic_range(mut self, dynamic_range: String) -> Self {
// self.dynamic_range = Some(dynamic_range); self.dynamic_range = Some(dynamic_range);
// self self
// } }
// pub fn abr(mut self, abr: u32) -> Self { pub fn abr(mut self, abr: u32) -> Self {
// self.abr = Some(abr); self.abr = Some(abr);
// self self
// } }
// pub fn vbr(mut self, vbr: u32) -> Self { pub fn vbr(mut self, vbr: u32) -> Self {
// self.vbr = Some(vbr); self.vbr = Some(vbr);
// self self
// } }
// pub fn container(mut self, container: String) -> Self { pub fn container(mut self, container: String) -> Self {
// self.container = Some(container); self.container = Some(container);
// self self
// } }
// pub fn protocol(mut self, protocol: String) -> Self { pub fn protocol(mut self, protocol: String) -> Self {
// self.protocol = Some(protocol); self.protocol = Some(protocol);
// self self
// } }
// pub fn audio_ext(mut self, audio_ext: String) -> Self { pub fn audio_ext(mut self, audio_ext: String) -> Self {
// self.audio_ext = Some(audio_ext); self.audio_ext = Some(audio_ext);
// self self
// } }
// pub fn video_ext(mut self, video_ext: String) -> Self { pub fn video_ext(mut self, video_ext: String) -> Self {
// self.video_ext = Some(video_ext); self.video_ext = Some(video_ext);
// self self
// } }
// pub fn resolution(mut self, resolution: String) -> Self { pub fn resolution(mut self, resolution: String) -> Self {
// self.resolution = Some(resolution); self.resolution = Some(resolution);
// self self
// } }
// pub fn http_headers(mut self, http_headers: HashMap<String, String>) -> Self { pub fn http_headers(mut self, http_headers: HashMap<String, String>) -> Self {
// self.http_headers = Some(http_headers); self.http_headers = Some(http_headers);
// self self
// } }
} }
#[derive(serde::Serialize, Debug)] #[derive(serde::Serialize, Debug)]
pub struct Videos { pub struct Videos {