Compare commits

..

63 Commits

Author SHA1 Message Date
Simon
5a08d2afe7 repeat a request if it fails initially 2026-01-21 11:32:02 +00:00
Simon
5224a2eb47 improved Error resistance 2026-01-21 11:24:03 +00:00
Simon
e7fb0ed723 adapted to new layout 2026-01-16 08:54:37 +00:00
Simon
6a7bc68849 improved all provider 2026-01-15 19:17:46 +00:00
Simon
27e2bcdbba fixes 2026-01-15 19:04:28 +00:00
Simon
182eb8ac01 less printing 2026-01-14 15:42:44 +00:00
Simon
e2f3bc2ecb bugfixes 2026-01-14 15:41:22 +00:00
Simon
4f9c7835bf added url to error log 2026-01-14 14:17:00 +00:00
Simon
87b9d20240 some more debugging 2026-01-14 14:15:26 +00:00
Simon
708560d2e8 removed prints 2026-01-14 11:50:15 +00:00
Simon
cacd45d893 upgrades 2026-01-14 11:49:27 +00:00
Simon
602dbe50f0 bugfixes 2026-01-14 11:30:32 +00:00
Simon
cce6104df3 title bugfix 2026-01-13 21:40:51 +00:00
Simon
34992242b7 various bugfixes 2026-01-13 18:13:51 +00:00
Simon
aaff7d00c6 hypnotube 2026-01-10 18:29:29 +00:00
Simon
eb49998593 fixed a bug where the url was wrongly formatted 2026-01-07 14:39:24 +00:00
Simon
cf04441a69 javtiful proxy 2026-01-07 14:24:18 +00:00
Simon
6fac9d6d45 corrected status "cacheDuration" to 1800 2026-01-07 13:18:51 +00:00
Simon
2edb12a024 corrected url for search queries 2026-01-07 13:17:22 +00:00
Simon
7f3ae83b1b more bugfixes 2026-01-07 13:09:05 +00:00
Simon
0b3f1fdc1d macro fix 2026-01-07 13:06:36 +00:00
Simon
792e246121 bugfix 2026-01-07 13:06:15 +00:00
Simon
0fc3bed6a7 javtiful done 2026-01-07 12:48:38 +00:00
Simon
c0368b2876 bugfixes 2026-01-03 23:51:19 +00:00
Simon
4a7528c516 bugfixes 2026-01-03 10:17:39 +00:00
Simon
97eeccf2bd more fixes 2026-01-02 15:32:07 +00:00
Simon
5ab2afa967 omgxxx bugfix 2026-01-02 15:11:27 +00:00
Simon
262b908692 more fixes 2026-01-02 14:58:29 +00:00
Simon
89eecbe790 bugfixes 2026-01-02 14:55:13 +00:00
Simon
27bb3daec4 more blacklisting 2025-12-27 10:25:00 +00:00
Simon
f1eb3c236b typo 2025-12-27 10:20:58 +00:00
Simon
e7854ac1ac bugfixes 2025-12-27 10:17:23 +00:00
Simon
ca67eff142 bugfix 2025-12-25 22:53:27 +00:00
Simon
0e347234b3 bugfixes 2025-12-25 07:07:14 +00:00
Simon
11c8c1a48f ignore doodstream.com 2025-12-22 12:25:09 +00:00
Simon
6536fb13b3 better tag system 2025-12-11 11:58:11 +00:00
Simon
9789afb12b max size of 100k for fast cache 2025-12-08 07:12:20 +00:00
Simon
b986faa1d4 healthcheck, logging and ulimit adjustment 2025-12-05 09:11:59 +00:00
Simon
7124b388fa cleanup 2025-12-05 09:09:47 +00:00
Simon
632931f515 search bugfix 2025-12-04 20:12:57 +00:00
Simon
9739560c03 removed unimportant prints 2025-12-04 13:51:34 +00:00
Simon
80d874a004 query bug fix 2025-12-04 13:37:24 +00:00
Simon
64dc7455ee http version 2 2025-12-04 13:27:16 +00:00
Simon
9e30eedc77 run init load in its own thread 2025-12-04 13:11:46 +00:00
Simon
75e28608bd missav bugfixes 2025-12-04 11:54:31 +00:00
Simon
e22a3f2d6d prevent empty tags/formats 2025-12-01 16:07:45 +00:00
Simon
07b812be64 pimpbunny 2025-11-30 14:15:09 +00:00
Simon
61e38caed5 fixed wrong order of format/quality 2025-11-30 07:05:49 +00:00
Simon
e5a6c8decc reverse formats order so high quality is selected first 2025-11-30 07:03:42 +00:00
Simon
d856ade32b adjusted requester to supply http::version itself 2025-11-30 06:53:21 +00:00
Simon
2de6a7d42b testing found 2025-11-29 20:14:59 +00:00
Simon
39e38249b7 noodlemagazine 2025-11-29 20:08:46 +00:00
Simon
e924c89573 undo 2025-11-29 18:52:48 +00:00
Simon
3f57569511 htmlencode videourl 2025-11-29 18:46:28 +00:00
Simon
23190ee05c bugfix 2025-11-29 17:22:41 +00:00
Simon
12053ce6db removed debug print 2025-11-29 17:21:04 +00:00
Simon
5522f2e37d pmvhaven backend fix 2025-11-29 17:16:21 +00:00
Simon
8f885c79d4 send categories in channel info 2025-11-29 15:56:22 +00:00
Simon
d7e7f70bd2 bugfixes 2025-11-29 14:20:36 +00:00
Simon
0e02a1b821 tags upgrade 2025-11-29 13:55:56 +00:00
Simon
cafb990fd4 removed debug prints 2025-11-29 08:24:14 +00:00
Simon
53ac33f856 hqporner 2025-11-29 08:20:38 +00:00
Simon
ef57172fdd omg.xxx changed some html layouts 2025-11-29 08:20:34 +00:00
49 changed files with 3895 additions and 1791 deletions

View File

@@ -15,8 +15,8 @@ ntex = { version = "2.15.1", features = ["tokio"] }
ntex-files = "2.0.0" ntex-files = "2.0.0"
serde = "1.0.228" serde = "1.0.228"
serde_json = "1.0.145" serde_json = "1.0.145"
tokio = { version = "1.47.1", features = ["full"] } tokio = { version = "1.49", features = ["full"] }
wreq = { version = "5.3.0", features = ["full", "cookies"] } wreq = { version = "5.3.0", features = ["full", "cookies", "multipart"] }
wreq-util = "2" wreq-util = "2"
percent-encoding = "2.3.2" percent-encoding = "2.3.2"
capitalize = "0.3.4" capitalize = "0.3.4"
@@ -27,6 +27,8 @@ once_cell = "1.21.3"
rustc-hash = "2.1.1" rustc-hash = "2.1.1"
async-trait = "0.1" async-trait = "0.1"
regex = "1.12.2" regex = "1.12.2"
titlecase = "3.6.0"
dashmap = "6.1.0"
[lints.rust] [lints.rust]
unexpected_cfgs = "allow" unexpected_cfgs = "allow"

View File

@@ -51,7 +51,6 @@ impl HentaimoonProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()

View File

@@ -1,197 +0,0 @@
use crate::util::parse_abbreviated_number;
use crate::DbPool;
use crate::providers::Provider;
use crate::util::cache::VideoCache;
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use std::env;
use std::vec;
use wreq::{Client, Proxy};
use wreq_util::Emulation;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
}
#[derive(Debug, Clone)]
pub struct NoodlemagazineProvider {
url: String,
}
impl NoodlemagazineProvider {
pub fn new() -> Self {
NoodlemagazineProvider {
url: "https://noodlemagazine.com".to_string(),
}
}
async fn get(
&self,
cache: VideoCache,
page: u8,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = format!("{}/popular/recent?p={}", self.url, page-1);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let search_string = query.replace(" ", "%20");
let video_url = format!("{}/video/{}?p={}", self.url, search_string, page);
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone());
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.is_empty() {
println!("HTML is empty");
return vec![];
}
let mut items: Vec<VideoItem> = Vec::new();
let raw_videos = html.split("- Made with <svg ").collect::<Vec<&str>>()[0]
.split("<div class=\"item\">")
.collect::<Vec<&str>>()[1..]
.to_vec();
for video_segment in &raw_videos {
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
// for (index, line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line);
// }
let video_url: String = format!("{}{}",self.url, video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0].to_string());
let mut title = video_segment.split("\"title\">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string();
// html decode
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let id = video_url.split("/").collect::<Vec<&str>>()[4].to_string();
let raw_duration = video_segment.split("#clock-o").collect::<Vec<&str>>()[1]
.split("</svg>").collect::<Vec<&str>>()[1]
.split("<").collect::<Vec<&str>>()[0]
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let thumb = video_segment.split("<img ").collect::<Vec<&str>>()[1]
.split("data-src=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let views_part = video_segment.split("#eye").collect::<Vec<&str>>()[1]
.split("</svg>").collect::<Vec<&str>>()[1]
.split("<").collect::<Vec<&str>>()[0]
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let views = parse_abbreviated_number(&views_part).unwrap_or(0) as u32;
let video_item = VideoItem::new(
id,
title,
video_url.to_string(),
"noodlemagazine".to_string(),
thumb,
duration,
)
.views(views)
;
items.push(video_item);
}
return items;
}
}
impl Provider for NoodlemagazineProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = per_page;
let _ = pool;
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => {
self.query(cache, page.parse::<u8>().unwrap_or(1), &q,options)
.await
}
None => {
self.get(cache, page.parse::<u8>().unwrap_or(1), options)
.await
}
};
match videos {
Ok(v) => v,
Err(e) => {
println!("Error fetching videos: {}", e);
vec![]
}
}
}
}

View File

@@ -38,7 +38,7 @@ impl SpankbangProvider {
let old_items = match cache.get(&url) { let old_items = match cache.get(&url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url); // println!("Cache hit for URL: {}", url);
return Ok(items.clone()); return Ok(items.clone());
} }
else{ else{
@@ -123,7 +123,7 @@ impl SpankbangProvider {
let old_items = match cache.get(&url) { let old_items = match cache.get(&url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url); // println!("Cache hit for URL: {}", url);
return Ok(items.clone()); return Ok(items.clone());
} }
else{ else{

View File

@@ -14,17 +14,33 @@ services:
environment: environment:
- RUST_LOG=info - RUST_LOG=info
- BURP_URL=http://127.0.0.1:8081 # local burpsuite proxy for crawler analysis - BURP_URL=http://127.0.0.1:8081 # local burpsuite proxy for crawler analysis
- PROXY=1 # 1 for enable, else disabled - PROXY=0 # 1 for enable, else disabled
- DATABASE_URL=hottub.db # sqlite db to store hard to get videos for easy access - DATABASE_URL=hottub.db # sqlite db to store hard to get videos for easy access
- FLARE_URL=http://flaresolverr:8191/v1 # flaresolverr to get around cloudflare 403 codes - FLARE_URL=http://flaresolverr:8191/v1 # flaresolverr to get around cloudflare 403 codes
- DOMAIN=hottub.spacemoehre.de # optional for the 302 forward on "/" - DOMAIN=hottub.spacemoehre.de # optional for the 302 forward on "/" to
restart: unless-stopped restart: unless-stopped
working_dir: /app working_dir: /app
ports: ports:
- 80:18080 - 80:18080
- 6901:6901 # vnc port to access burpsuite - 6901:6901 # vnc port to access burpsuite
- 8081:8080 # burpsuite port of http proxy - 8081:8080 # burpsuite port of http(s) proxy
logging:
driver: "json-file"
options:
max-size: "10m" # Maximum size of each log file (e.g., 10MB)
max-file: "3" # Maximum number of log files to keep
healthcheck:
test: ["CMD-SHELL", "curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:18080/api/status | grep -q 200"]
interval: 30s
timeout: 5s
retries: 3
start_period: 1s
ulimits:
nofile:
soft: 65536
hard: 65536
# flaresolverr to bypass cloudflare protections
flaresolverr: flaresolverr:
container_name: flaresolverr container_name: flaresolverr
ports: ports:
@@ -39,9 +55,4 @@ services:
max-size: "10m" # Maximum size of each log file (e.g., 10MB) max-size: "10m" # Maximum size of each log file (e.g., 10MB)
max-file: "3" # Maximum number of log files to keep max-file: "3" # Maximum number of log files to keep
restarter: # restarts the flaresolverr so its always ready for work
image: docker:cli
container_name: flaresolverr-restarter
volumes: ["/var/run/docker.sock:/var/run/docker.sock"]
command: ["/bin/sh", "-c", "while true; do sleep 26400; docker restart flaresolverr; done"]
restart: unless-stopped

View File

@@ -3,19 +3,19 @@ use ntex::http::header;
use ntex::web; use ntex::web;
use ntex::web::HttpRequest; use ntex::web::HttpRequest;
use std::cmp::Ordering; use std::cmp::Ordering;
use std::fs; use std::{fs, io};
use tokio::task; use tokio::task;
use crate::providers::all::AllProvider; use crate::providers::all::AllProvider;
use crate::providers::hanime::HanimeProvider; use crate::providers::hanime::HanimeProvider;
use crate::providers::okporn::OkpornProvider; use crate::providers::okporn::OkpornProvider;
use crate::providers::perverzija::PerverzijaProvider; use crate::providers::perverzija::PerverzijaProvider;
use crate::providers::pmvhaven::PmvhavenProvider;
use crate::providers::pornhub::PornhubProvider; use crate::providers::pornhub::PornhubProvider;
use crate::providers::redtube::RedtubeProvider; use crate::providers::redtube::RedtubeProvider;
use crate::providers::rule34video::Rule34videoProvider; use crate::providers::rule34video::Rule34videoProvider;
// use crate::providers::spankbang::SpankbangProvider; // use crate::providers::spankbang::SpankbangProvider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::discord::send_discord_error_report;
use crate::util::requester::Requester; use crate::util::requester::Requester;
use crate::{DbPool, db, status::*, videos::*}; use crate::{DbPool, db, status::*, videos::*};
use cute::c; use cute::c;
@@ -105,7 +105,12 @@ pub fn config(cfg: &mut web::ServiceConfig) {
web::resource("/videos") web::resource("/videos")
// .route(web::get().to(videos_get)) // .route(web::get().to(videos_get))
.route(web::post().to(videos_post)), .route(web::post().to(videos_post)),
); )
.service(
web::resource("/test")
.route(web::get().to(test))
)
;
} }
async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> { async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
@@ -158,88 +163,16 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
id: "lg".to_string(), id: "lg".to_string(),
title: "Longest".to_string(), title: "Longest".to_string(),
}, },
FilterOption {
id: "cm".to_string(),
title: "Newest".to_string(),
},
], ],
multiSelect: false, multiSelect: false,
}], }],
nsfw: true, nsfw: true,
cacheDuration: Some(1800), cacheDuration: Some(1800),
}); });
if clientversion >= ClientVersion::new(22, 101, "22e".to_string()) {
// pmvhaven
status.add_channel(Channel {
id: "pmvhaven".to_string(),
name: "Pmvhaven".to_string(),
description: "Explore a curated collection of captivating PMV".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=pmvhaven.com".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![
ChannelOption {
id: "category".to_string(),
title: "Category".to_string(),
description: "Category of PMV Video get".to_string(), //"Sort the videos by Date or Name.".to_string(),
systemImage: "folder".to_string(),
colorName: "yellow".to_string(),
options: vec![
FilterOption {
id: "all".to_string(),
title: "All".to_string(),
},
FilterOption {
id: "pmv".to_string(),
title: "PMV".to_string(),
},
FilterOption {
id: "hmv".to_string(),
title: "HMV".to_string(),
},
FilterOption {
id: "tiktok".to_string(),
title: "Tiktok".to_string(),
},
FilterOption {
id: "koreanbj".to_string(),
title: "KoreanBJ".to_string(),
},
FilterOption {
id: "hypno".to_string(),
title: "Hypno".to_string(),
},
FilterOption {
id: "other".to_string(),
title: "Other".to_string(),
},
],
multiSelect: false,
},
ChannelOption {
id: "sort".to_string(),
title: "Filter".to_string(),
description: "Filter PMV Videos".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "Newest".to_string(),
title: "Newest".to_string(),
},
FilterOption {
id: "Top Rated".to_string(),
title: "Top Rated".to_string(),
},
FilterOption {
id: "Most Viewed".to_string(),
title: "Most Viewed".to_string(),
},
],
multiSelect: false,
},
],
nsfw: true,
cacheDuration: Some(1800),
});
}
if clientversion >= ClientVersion::new(22, 97, "22a".to_string()) { if clientversion >= ClientVersion::new(22, 97, "22a".to_string()) {
// perverzija // perverzija
status.add_channel(Channel { status.add_channel(Channel {
@@ -418,40 +351,6 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
cacheDuration: None, cacheDuration: None,
}); });
// status.add_channel(Channel {
// id: "spankbang".to_string(),
// name: "SpankBang".to_string(),
// description: "Popular Porn Videos - SpankBang".to_string(),
// premium: false,
// favicon: "https://www.google.com/s2/favicons?sz=64&domain=spankbang.com".to_string(),
// status: "active".to_string(),
// categories: vec![],
// options: vec![ChannelOption {
// id: "sort".to_string(),
// title: "Sort".to_string(),
// description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
// systemImage: "list.number".to_string(),
// colorName: "blue".to_string(),
// options: vec![
// FilterOption {
// id: "trending_videos".to_string(),
// title: "Trending".to_string(),
// },
// FilterOption {
// id: "new_videos".to_string(),
// title: "New".to_string(),
// },
// FilterOption {
// id: "most_popular".to_string(),
// title: "Popular".to_string(),
// },
// ],
// multiSelect: false,
// }],
// nsfw: true,
//cacheDuration: Some(1800),
// });
// rule34video // rule34video
status.add_channel(Channel { status.add_channel(Channel {
id: "rule34video".to_string(), id: "rule34video".to_string(),
@@ -720,41 +619,6 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
cacheDuration: Some(1800), cacheDuration: Some(1800),
}); });
// // hentaimoon
// status.add_channel(Channel {
// id: "hentaimoon".to_string(),
// name: "Hentai Moon".to_string(),
// description: "Your Hentai Sputnik".to_string(),
// premium: false,
// favicon: "https://www.google.com/s2/favicons?sz=64&domain=hentai-moon.com".to_string(),
// status: "active".to_string(),
// categories: vec![],
// options: vec![ChannelOption {
// id: "sort".to_string(),
// title: "Sort".to_string(),
// description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
// systemImage: "list.number".to_string(),
// colorName: "blue".to_string(),
// options: vec![
// FilterOption {
// id: "new".to_string(),
// title: "New".to_string(),
// },
// FilterOption {
// id: "popular".to_string(),
// title: "Popular".to_string(),
// },
// FilterOption {
// id: "top-rated".to_string(),
// title: "Top Rated".to_string(),
// },
// ],
// multiSelect: false,
// }],
// nsfw: true,
// cacheDuration: Some(1800),
// });
// xxthots // xxthots
status.add_channel(Channel { status.add_channel(Channel {
id: "xxthots".to_string(), id: "xxthots".to_string(),
@@ -925,20 +789,6 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
cacheDuration: None, cacheDuration: None,
}); });
// noodlemagazine
// status.add_channel(Channel {
// id: "noodlemagazine".to_string(),
// name: "Noodlemagazine".to_string(),
// description: "Discover the Best Adult Videos".to_string(),
// premium: false,
// favicon: "https://www.google.com/s2/favicons?sz=64&domain=noodlemagazine.com".to_string(),
// status: "active".to_string(),
// categories: vec![],
// options: vec![],
// nsfw: true,
// cacheDuration: Some(1800),
// });
//missav //missav
status.add_channel(Channel { status.add_channel(Channel {
id: "missav".to_string(), id: "missav".to_string(),
@@ -1138,7 +988,9 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
} }
for provider in ALL_PROVIDERS.values() { for provider in ALL_PROVIDERS.values() {
status.add_channel(provider.get_channel(clientversion.clone())); if let Some(channel) = provider.get_channel(clientversion.clone()){
status.add_channel(channel);
}
} }
status.iconUrl = format!("http://{}/favicon.ico", host).to_string(); status.iconUrl = format!("http://{}/favicon.ico", host).to_string();
Ok(web::HttpResponse::Ok().json(&status)) Ok(web::HttpResponse::Ok().json(&status))
@@ -1241,6 +1093,7 @@ async fn videos_post(
stars: Some(stars), stars: Some(stars),
categories: Some(categories), categories: Some(categories),
duration: Some(duration), duration: Some(duration),
sort: Some(sort.clone())
}; };
let video_items = provider let video_items = provider
.get_videos( .get_videos(
@@ -1308,7 +1161,6 @@ pub fn get_provider(channel: &str) -> Option<DynProvider> {
"perverzija" => Some(Arc::new(PerverzijaProvider::new())), "perverzija" => Some(Arc::new(PerverzijaProvider::new())),
"hanime" => Some(Arc::new(HanimeProvider::new())), "hanime" => Some(Arc::new(HanimeProvider::new())),
"pornhub" => Some(Arc::new(PornhubProvider::new())), "pornhub" => Some(Arc::new(PornhubProvider::new())),
"pmvhaven" => Some(Arc::new(PmvhavenProvider::new())),
"rule34video" => Some(Arc::new(Rule34videoProvider::new())), "rule34video" => Some(Arc::new(Rule34videoProvider::new())),
"redtube" => Some(Arc::new(RedtubeProvider::new())), "redtube" => Some(Arc::new(RedtubeProvider::new())),
"okporn" => Some(Arc::new(OkpornProvider::new())), "okporn" => Some(Arc::new(OkpornProvider::new())),
@@ -1334,3 +1186,19 @@ pub fn get_provider(channel: &str) -> Option<DynProvider> {
x => ALL_PROVIDERS.get(x).cloned(), x => ALL_PROVIDERS.get(x).cloned(),
} }
} }
pub async fn test() -> Result<impl web::Responder, web::Error> {
// Simply await the function instead of blocking the thread
let e = io::Error::new(io::ErrorKind::Other, "test error");
let _ = send_discord_error_report(
e.to_string(),
Some("chain_str".to_string()),
Some("Context"),
Some("xtra info"),
file!(),
line!(),
module_path!(),
).await;
Ok(web::HttpResponse::Ok())
}

View File

@@ -2,7 +2,7 @@
#![allow(non_snake_case)] #![allow(non_snake_case)]
use std::env; use std::{env, thread};
use diesel::{r2d2::{self, ConnectionManager}, SqliteConnection}; use diesel::{r2d2::{self, ConnectionManager}, SqliteConnection};
use dotenvy::dotenv; use dotenvy::dotenv;
@@ -49,9 +49,19 @@ async fn main() -> std::io::Result<()> {
let mut requester = util::requester::Requester::new(); let mut requester = util::requester::Requester::new();
requester.set_proxy(env::var("PROXY").unwrap_or("0".to_string()) != "0".to_string()); requester.set_proxy(env::var("PROXY").unwrap_or("0".to_string()) != "0".to_string());
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new(); let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new().max_size(100_000).to_owned();
thread::spawn(move || {
// Create a tiny runtime just for these async tasks
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.expect("build tokio runtime");
rt.block_on(async move {
providers::init_providers_now(); providers::init_providers_now();
});
});
web::HttpServer::new(move || { web::HttpServer::new(move || {
web::App::new() web::App::new()

View File

@@ -1,7 +1,9 @@
use std::fs; use std::fs;
use std::time::Duration;
use async_trait::async_trait; use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
use futures::future::join_all; use futures::StreamExt;
use futures::stream::FuturesUnordered;
use crate::api::{get_provider, ClientVersion}; use crate::api::{get_provider, ClientVersion};
use crate::providers::{DynProvider, Provider}; use crate::providers::{DynProvider, Provider};
use crate::status::Channel; use crate::status::Channel;
@@ -41,7 +43,7 @@ impl Provider for AllProvider {
per_page: String, per_page: String,
options: ServerOptions, options: ServerOptions,
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
let mut sites_str = options.clone().sites.unwrap(); let mut sites_str = options.clone().sites.unwrap_or_default();
if sites_str.is_empty() { if sites_str.is_empty() {
let files = fs::read_dir("./src/providers").unwrap(); let files = fs::read_dir("./src/providers").unwrap();
let providers = files.map(|entry| entry.unwrap().file_name()) let providers = files.map(|entry| entry.unwrap().file_name())
@@ -51,35 +53,69 @@ impl Provider for AllProvider {
.collect::<Vec<String>>(); .collect::<Vec<String>>();
sites_str = providers.join(","); sites_str = providers.join(",");
} }
let providers: Vec<DynProvider> = sites_str let providers: Vec<DynProvider> = sites_str
.split(',') .split(',')
.filter(|s| !s.is_empty()) .filter(|s| !s.is_empty())
.filter_map(|s| get_provider(s)) // assumes get_provider -> Option<DynProvider> .filter_map(|s| get_provider(s))
.collect(); .collect();
let futures = providers.iter().map(|provider| { let mut futures = FuturesUnordered::new();
provider.get_videos(
cache.clone(),
pool.clone(),
sort.clone(),
query.clone(),
page.clone(),
per_page.clone(),
options.clone()
)
}).collect::<Vec<_>>();
let results:Vec<Vec<VideoItem>> = join_all(futures).await;
let video_items: Vec<VideoItem> = interleave(&results);
for provider in providers {
let cache = cache.clone();
let pool = pool.clone();
let sort = sort.clone();
let query = query.clone();
let page = page.clone();
let per_page = per_page.clone();
let options = options.clone();
return video_items; // Spawn the task so it lives independently of this function
futures.push(tokio::spawn(async move {
provider.get_videos(cache, pool, sort, query, page, per_page, options).await
}));
} }
fn get_channel(&self,clientversion:ClientVersion) -> Channel { let mut all_results = Vec::new();
println!("Getting channel for placeholder with client version: {:?}",clientversion); let timeout_timer = tokio::time::sleep(Duration::from_secs(10));
tokio::pin!(timeout_timer);
// Collect what we can within 55 seconds
loop {
tokio::select! {
Some(result) = futures.next() => {
// Ignore errors (panics or task cancellations)
if let Ok(videos) = result {
all_results.push(videos);
}
},
_ = &mut timeout_timer => {
// 55 seconds passed. Stop waiting and return what we have.
// The tasks remaining in 'futures' will continue running in the
// background because they were 'tokio::spawn'ed.
break;
},
else => break, // All tasks finished before the timeout
}
}
interleave(&all_results)
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
let _ = clientversion; let _ = clientversion;
Channel { Some(Channel {
id:"placeholder".to_string(),name:"PLACEHOLDER".to_string(),description:"PLACEHOLDER FOR PARENT CLASS".to_string(),premium:false,favicon:"https://www.google.com/s2/favicons?sz=64&domain=missav.ws".to_string(),status:"active".to_string(),categories:vec![],options:vec![],nsfw:true,cacheDuration:None, id: "placeholder".to_string(),
} name: "PLACEHOLDER".to_string(),
description: "PLACEHOLDER FOR PARENT CLASS".to_string(),
premium: false,
favicon: "https://hottub.spacemoehre.de/favicon.ico".to_string(),
status: "active".to_string(),
categories: vec![],
options: vec![],
nsfw: true,
cacheDuration: None,
})
} }
} }

View File

@@ -3,7 +3,6 @@ use crate::api::ClientVersion;
use crate::providers::Provider; use crate::providers::Provider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::parse_abbreviated_number; use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem}; use crate::videos::{ServerOptions, VideoItem};
use crate::{status::*, util}; use crate::{status::*, util};
use async_trait::async_trait; use async_trait::async_trait;
@@ -18,6 +17,13 @@ error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
HttpRequest(wreq::Error); HttpRequest(wreq::Error);
Json(serde_json::Error);
}
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
} }
} }
@@ -27,24 +33,15 @@ pub struct BeegProvider {
stars: Arc<RwLock<Vec<FilterOption>>>, stars: Arc<RwLock<Vec<FilterOption>>>,
categories: Arc<RwLock<Vec<FilterOption>>>, categories: Arc<RwLock<Vec<FilterOption>>>,
} }
impl BeegProvider { impl BeegProvider {
pub fn new() -> Self { pub fn new() -> Self {
let provider = BeegProvider { let provider = BeegProvider {
sites: Arc::new(RwLock::new(vec![FilterOption { sites: Arc::new(RwLock::new(vec![FilterOption { id: "all".into(), title: "All".into() }])),
id: "all".to_string(), stars: Arc::new(RwLock::new(vec![FilterOption { id: "all".into(), title: "All".into() }])),
title: "All".to_string(), categories: Arc::new(RwLock::new(vec![FilterOption { id: "all".into(), title: "All".into() }])),
}])),
stars: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
categories: Arc::new(RwLock::new(vec![FilterOption {
id: "all".to_string(),
title: "All".to_string(),
}])),
}; };
// Kick off the background load but return immediately
provider.spawn_initial_load(); provider.spawn_initial_load();
provider provider
} }
@@ -55,160 +52,142 @@ impl BeegProvider {
let stars = Arc::clone(&self.stars); let stars = Arc::clone(&self.stars);
thread::spawn(move || { thread::spawn(move || {
// Create a tiny runtime just for these async tasks let rt = match tokio::runtime::Builder::new_current_thread().enable_all().build() {
let rt = tokio::runtime::Builder::new_current_thread() Ok(rt) => rt,
.enable_all() Err(e) => {
.build() eprintln!("beeg runtime init failed: {}", e);
.expect("build tokio runtime"); return;
}
};
rt.block_on(async move { rt.block_on(async move {
// If you have a streaming sites loader, call it here too
if let Err(e) = Self::load_sites(sites).await { if let Err(e) = Self::load_sites(sites).await {
eprintln!("beeg load_sites_into failed: {e}"); eprintln!("beeg load_sites failed: {}", e);
} }
if let Err(e) = Self::load_categories(categories).await { if let Err(e) = Self::load_categories(categories).await {
eprintln!("beeg load_categories failed: {e}"); eprintln!("beeg load_categories failed: {}", e);
}
if let Err(e) = Self::load_stars(stars).await {
eprintln!("beeg load_stars failed: {}", e);
}
});
});
} }
if let Err(e) = Self::load_stars(stars).await { async fn fetch_tags() -> Result<Value> {
eprintln!("beeg load_stars failed: {e}"); let mut requester = util::requester::Requester::new();
let text = match requester
.get("https://store.externulls.com/tag/facts/tags?get_original=true&slug=index", None)
.await {
Ok(text) => text,
Err(e) => {
eprintln!("beeg fetch_tags failed: {}", e);
return Err(ErrorKind::Parse("failed to fetch tags".into()).into());
} }
}); };
}); Ok(serde_json::from_str(&text)?)
} }
async fn load_stars(stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> { async fn load_stars(stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = util::requester::Requester::new(); let json = Self::fetch_tags().await?;
let text = requester let arr = json
.get("https://store.externulls.com/tag/facts/tags?get_original=true&slug=index") .get("human")
.await .and_then(|v| v.as_array().map(|v| v.as_slice()))
.unwrap(); .unwrap_or(&[]);
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap(); for s in arr {
let stars_array = json.get("human").unwrap().as_array().unwrap(); if let (Some(name), Some(id)) = (
for s in stars_array { s.get("tg_name").and_then(|v| v.as_str()),
let star_name = s.get("tg_name").unwrap().as_str().unwrap().to_string(); s.get("tg_slug").and_then(|v| v.as_str()),
let star_id = s.get("tg_slug").unwrap().as_str().unwrap().to_string(); ) {
Self::push_unique( Self::push_unique(&stars, FilterOption { id: id.into(), title: name.into() });
&stars,
FilterOption {
id: star_id,
title: star_name,
},
);
} }
return Ok(()); }
Ok(())
} }
async fn load_categories(categories: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> { async fn load_categories(categories: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = util::requester::Requester::new(); let json = Self::fetch_tags().await?;
let text = requester let arr = json
.get("https://store.externulls.com/tag/facts/tags?get_original=true&slug=index") .get("other")
.await .and_then(|v| v.as_array().map(|v| v.as_slice()))
.unwrap(); .unwrap_or(&[]);
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap(); for s in arr {
let stars_array = json.get("other").unwrap().as_array().unwrap(); if let (Some(name), Some(id)) = (
for s in stars_array { s.get("tg_name").and_then(|v| v.as_str()),
let star_name = s.get("tg_name").unwrap().as_str().unwrap().to_string(); s.get("tg_slug").and_then(|v| v.as_str()),
let star_id = s.get("tg_slug").unwrap().as_str().unwrap().to_string(); ) {
Self::push_unique( Self::push_unique(
&categories, &categories,
FilterOption { FilterOption {
id: star_id.replace("{","").replace("}",""), id: id.replace('{', "").replace('}', ""),
title: star_name.replace("{","").replace("}",""), title: name.replace('{', "").replace('}', ""),
}, },
); );
} }
return Ok(()); }
Ok(())
} }
async fn load_sites(sites: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> { async fn load_sites(sites: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = util::requester::Requester::new(); let json = Self::fetch_tags().await?;
let text = requester let arr = json
.get("https://store.externulls.com/tag/facts/tags?get_original=true&slug=index") .get("productions")
.await .and_then(|v| v.as_array().map(|v| v.as_slice()))
.unwrap(); .unwrap_or(&[]);
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap(); for s in arr {
let stars_array = json.get("productions").unwrap().as_array().unwrap(); if let (Some(name), Some(id)) = (
for s in stars_array { s.get("tg_name").and_then(|v| v.as_str()),
let star_name = s.get("tg_name").unwrap().as_str().unwrap().to_string(); s.get("tg_slug").and_then(|v| v.as_str()),
let star_id = s.get("tg_slug").unwrap().as_str().unwrap().to_string(); ) {
Self::push_unique( Self::push_unique(&sites, FilterOption { id: id.into(), title: name.into() });
&sites,
FilterOption {
id: star_id,
title: star_name,
},
);
} }
return Ok(()); }
Ok(())
} }
// Push one item with minimal lock time and dedup by id
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) { fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
if let Ok(mut vec) = target.write() { if let Ok(mut vec) = target.write() {
if !vec.iter().any(|x| x.id == item.id) { if !vec.iter().any(|x| x.id == item.id) {
vec.push(item); vec.push(item);
// Optional: keep it sorted for nicer UX
// vec.sort_by(|a,b| a.title.cmp(&b.title));
} }
} }
} }
fn build_channel(&self, clientversion: ClientVersion) -> Channel { fn build_channel(&self, _: ClientVersion) -> Channel {
let _ = clientversion;
let sites: Vec<FilterOption> = self
.sites
.read()
.map(|g| g.clone()) // or: .map(|g| g.to_vec())
.unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
let categories: Vec<FilterOption> = self
.categories
.read()
.map(|g| g.clone()) // or: .map(|g| g.to_vec())
.unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
let stars: Vec<FilterOption> = self
.stars
.read()
.map(|g| g.clone()) // or: .map(|g| g.to_vec())
.unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
Channel { Channel {
id: "beeg".to_string(), id: "beeg".into(),
name: "Beeg".to_string(), name: "Beeg".into(),
description: "Watch your favorite Porn on Beeg.com".to_string(), description: "Watch your favorite Porn on Beeg.com".into(),
premium: false, premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=beeg.com".to_string(), favicon: "https://www.google.com/s2/favicons?sz=64&domain=beeg.com".into(),
status: "active".to_string(), status: "active".into(),
categories: vec![], categories: vec![],
options: vec![ options: vec![
ChannelOption { ChannelOption {
id: "sites".to_string(), id: "sites".into(),
title: "Sites".to_string(), title: "Sites".into(),
description: "Filter for different Sites".to_string(), description: "Filter for different Sites".into(),
systemImage: "rectangle.stack".to_string(), systemImage: "rectangle.stack".into(),
colorName: "green".to_string(), colorName: "green".into(),
options: sites, options: self.sites.read().map(|v| v.clone()).unwrap_or_default(),
multiSelect: false, multiSelect: false,
}, },
ChannelOption { ChannelOption {
id: "categories".to_string(), id: "categories".into(),
title: "Categories".to_string(), title: "Categories".into(),
description: "Filter for different Networks".to_string(), description: "Filter for different Networks".into(),
systemImage: "list.dash".to_string(), systemImage: "list.dash".into(),
colorName: "purple".to_string(), colorName: "purple".into(),
options: categories, options: self.categories.read().map(|v| v.clone()).unwrap_or_default(),
multiSelect: false, multiSelect: false,
}, },
ChannelOption { ChannelOption {
id: "stars".to_string(), id: "stars".into(),
title: "Stars".to_string(), title: "Stars".into(),
description: "Filter for different Pornstars".to_string(), description: "Filter for different Pornstars".into(),
systemImage: "star.fill".to_string(), systemImage: "star.fill".into(),
colorName: "yellow".to_string(), colorName: "yellow".into(),
options: stars, options: self.stars.read().map(|v| v.clone()).unwrap_or_default(),
multiSelect: false, multiSelect: false,
}, },
], ],
@@ -252,7 +231,6 @@ impl BeegProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -263,7 +241,7 @@ impl BeegProvider {
} }
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap(); let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(json.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(json.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
@@ -304,7 +282,7 @@ impl BeegProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap(); let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(json.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(json.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
@@ -317,89 +295,60 @@ impl BeegProvider {
} }
fn get_video_items_from_html(&self, json: Value) -> Vec<VideoItem> { fn get_video_items_from_html(&self, json: Value) -> Vec<VideoItem> {
let mut items: Vec<VideoItem> = Vec::new(); let mut items = Vec::new();
let video_items = match json.as_array(){ let array = match json.as_array() {
Some(array) => array, Some(a) => a,
None => return items, None => return items,
}; };
for video in video_items {
// println!("video: {}\n\n\n", serde_json::to_string_pretty(&video).unwrap()); for video in array {
let file = match video.get("file"){ let file = match video.get("file") { Some(v) => v, None => continue };
let hls = match file.get("hls_resources") { Some(v) => v, None => continue };
let key = match hls.get("fl_cdn_multi").and_then(|v| v.as_str()) {
Some(v) => v, Some(v) => v,
None => continue, None => continue,
}; };
let hls_resources = match file.get("hls_resources"){
Some(v) => v, let id = file.get("id").and_then(|v| v.as_i64()).unwrap_or(0).to_string();
None => continue, let title = file
}; .get("data")
let video_key = match hls_resources.get("fl_cdn_multi"){ .and_then(|v| v.get(0))
Some(v) => v, .and_then(|v| v.get("cd_value"))
None => continue, .and_then(|v| v.as_str())
}; .map(|s| decode(s.as_bytes()).to_string().unwrap_or_default())
let video_url = format!( .unwrap_or_default();
"https://video.externulls.com/{}",
video_key.to_string().replace("\"","") let duration = file
); .get("fl_duration")
let data = match file.get("data") { .and_then(|v| v.as_u64())
Some(v) => v, .unwrap_or(0);
None => continue,
}; let views = video
let title = match data[0].get("cd_value") { .get("fc_facts")
Some(v) => decode(v.as_str().unwrap_or("").as_bytes()).to_string().unwrap_or(v.to_string()), .and_then(|v| v.get(0))
None => "".to_string(), .and_then(|v| v.get("fc_st_views"))
}; .and_then(|v| v.as_str())
let id = match file.get("id"){ .and_then(|s| parse_abbreviated_number(s))
Some(v) => v.as_i64().unwrap_or(0).to_string(), .unwrap_or(0);
None => title.clone(),
};
let fc_facts = match video.get("fc_facts") {
Some(v) => v[0].clone(),
None => continue,
};
let duration = match file.get("fl_duration") {
Some(v) => parse_time_to_seconds(v.as_str().unwrap_or("0")).unwrap_or(0),
None => 0,
};
let tags = match video.get("tags") {
Some(v) => {
// v should be an array of tag objects
v.as_array()
.map(|arr| {
arr.iter()
.map(|tag| {
tag.get("tg_name")
.and_then(|name| name.as_str())
.unwrap_or("")
.to_string()
})
.collect::<Vec<String>>()
})
.unwrap_or_default()
}
None => Vec::new(),
};
let thumb = format!("https://thumbs.externulls.com/videos/{}/0.webp?size=480x270", id); let thumb = format!("https://thumbs.externulls.com/videos/{}/0.webp?size=480x270", id);
let views = match fc_facts.get("fc_st_views") {
Some(v) => parse_abbreviated_number(v.as_str().unwrap_or("0")).unwrap_or(0), let mut item = VideoItem::new(
None => 0,
};
let mut video_item = VideoItem::new(
id, id,
title, title,
video_url.to_string(), format!("https://video.externulls.com/{}", key),
"beeg".to_string(), "beeg".into(),
thumb, thumb,
duration as u32, duration as u32,
); );
if views > 0 { if views > 0 {
video_item = video_item.views(views); item = item.views(views);
} }
if !tags.is_empty() {
video_item = video_item.tags(tags); items.push(item);
} }
items.push(video_item); items
}
return items;
} }
} }
@@ -408,32 +357,26 @@ impl Provider for BeegProvider {
async fn get_videos( async fn get_videos(
&self, &self,
cache: VideoCache, cache: VideoCache,
_pool: DbPool, _: DbPool,
_sort: String, _: String,
query: Option<String>, query: Option<String>,
page: String, page: String,
_per_page: String, _: String,
options: ServerOptions, options: ServerOptions,
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
let videos: std::result::Result<Vec<VideoItem>, Error> = match query { let page = page.parse::<u8>().unwrap_or(1);
Some(q) => { let result = match query {
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, options) Some(q) => self.query(cache, page, &q, options).await,
.await None => self.get(cache, page, options).await,
}
None => {
self.get(cache, page.parse::<u8>().unwrap_or(1), options)
.await
}
}; };
match videos {
Ok(v) => v, result.unwrap_or_else(|e| {
Err(e) => { eprintln!("beeg provider error: {}", e);
println!("Error fetching videos: {}", e);
vec![] vec![]
})
} }
}
} fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel { Some(self.build_channel(clientversion))
self.build_channel(clientversion)
} }
} }

View File

@@ -43,7 +43,6 @@ impl FreshpornoProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -56,7 +55,7 @@ impl FreshpornoProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -93,7 +92,7 @@ impl FreshpornoProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -193,11 +192,11 @@ impl Provider for FreshpornoProvider {
} }
} }
fn get_channel(&self,clientversion:ClientVersion) -> Channel { fn get_channel(&self,clientversion:ClientVersion) -> Option<Channel> {
println!("Getting channel for placeholder with client version: {:?}",clientversion); println!("Getting channel for placeholder with client version: {:?}",clientversion);
let _ = clientversion; let _ = clientversion;
Channel { Some(Channel {
id:"placeholder".to_string(),name:"PLACEHOLDER".to_string(),description:"PLACEHOLDER FOR PARENT CLASS".to_string(),premium:false,favicon:"https://www.google.com/s2/favicons?sz=64&domain=missav.ws".to_string(),status:"active".to_string(),categories:vec![],options:vec![],nsfw:true,cacheDuration:None, id:"placeholder".to_string(),name:"PLACEHOLDER".to_string(),description:"PLACEHOLDER FOR PARENT CLASS".to_string(),premium:false,favicon:"https://www.google.com/s2/favicons?sz=64&domain=missav.ws".to_string(),status:"active".to_string(),categories:vec![],options:vec![],nsfw:true,cacheDuration:None,
} })
} }
} }

View File

@@ -153,7 +153,7 @@ impl HanimeProvider {
let url = format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug); let url = format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug);
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&url).await.unwrap(); let text = requester.get(&url, None).await.unwrap();
let urls = text.split("\"servers\"").collect::<Vec<&str>>()[1]; let urls = text.split("\"servers\"").collect::<Vec<&str>>()[1];
let mut url_vec = vec![]; let mut url_vec = vec![];
@@ -189,7 +189,7 @@ impl HanimeProvider {
let old_items = match cache.get(&index) { let old_items = match cache.get(&index) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 * 12 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 * 12 {
println!("Cache hit for URL: {}", index); //println!("Cache hit for URL: {}", index);
return Ok(items.clone()); return Ok(items.clone());
} }
else{ else{
@@ -208,7 +208,7 @@ impl HanimeProvider {
.ordering(ordering); .ordering(ordering);
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let response = requester.post("https://search.htv-services.com/search", &search, vec![]).await.unwrap(); let response = requester.post_json("https://search.htv-services.com/search", &search, vec![]).await.unwrap();

View File

@@ -44,7 +44,6 @@ impl HomoxxxProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()

483
src/providers/hqporner.rs Normal file
View File

@@ -0,0 +1,483 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::Provider;
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::discord::{format_error_chain, send_discord_error_report};
use crate::util::requester::Requester;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoFormat, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{ICodedDataTrait, decode};
use std::sync::{Arc, RwLock};
use std::thread::sleep;
use std::{thread, vec};
use titlecase::Titlecase;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
Json(serde_json::Error);
}
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
}
}
#[derive(Debug, Clone)]
pub struct HqpornerProvider {
url: String,
stars: Arc<RwLock<Vec<FilterOption>>>,
categories: Arc<RwLock<Vec<FilterOption>>>,
}
impl HqpornerProvider {
pub fn new() -> Self {
let provider = HqpornerProvider {
url: "https://hqporner.com".to_string(),
stars: Arc::new(RwLock::new(vec![])),
categories: Arc::new(RwLock::new(vec![])),
};
provider.spawn_initial_load();
provider
}
fn spawn_initial_load(&self) {
let url = self.url.clone();
let stars = Arc::clone(&self.stars);
let categories = Arc::clone(&self.categories);
thread::spawn(move || {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_all()
.build();
if let Ok(runtime) = rt {
runtime.block_on(async move {
if let Err(e) = Self::load_stars(&url, stars).await {
eprintln!("load_stars failed: {e}");
}
if let Err(e) = Self::load_categories(&url, categories).await {
eprintln!("load_categories failed: {e}");
}
});
}
});
}
async fn load_stars(base_url: &str, stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = Requester::new();
let text = requester
.get(&format!("{}/girls", base_url), None)
.await
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
let stars_div = text
.split("<span>Girls</span>")
.last()
.and_then(|s| s.split("</ul>").next())
.ok_or_else(|| Error::from("Could not find stars div"))?;
for stars_element in stars_div.split("<li ").skip(1) {
let star_id = stars_element
.split("href=\"/actress/")
.nth(1)
.and_then(|s| s.split('"').next())
.map(|s| s.to_string());
let star_name = stars_element
.split("<a ")
.nth(1)
.and_then(|s| s.split('>').nth(1))
.and_then(|s| s.split('<').next())
.map(|s| s.to_string());
if let (Some(id), Some(name)) = (star_id, star_name) {
Self::push_unique(&stars, FilterOption { id, title: name });
}
}
Ok(())
}
async fn load_categories(
base_url: &str,
categories: Arc<RwLock<Vec<FilterOption>>>,
) -> Result<()> {
let mut requester = Requester::new();
let text = requester
.get(&format!("{}/categories", base_url), None)
.await
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
let categories_div = text
.split("<span>Categories</span>")
.last()
.and_then(|s| s.split("</ul>").next())
.ok_or_else(|| Error::from("Could not find categories div"))?;
for categories_element in categories_div.split("<li ").skip(1) {
let category_id = categories_element
.split("href=\"/category/")
.nth(1)
.and_then(|s| s.split('"').next())
.map(|s| s.to_string());
let category_name = categories_element
.split("<a ")
.nth(1)
.and_then(|s| s.split('>').nth(1))
.and_then(|s| s.split('<').next())
.map(|s| s.titlecase());
if let (Some(id), Some(name)) = (category_id, category_name) {
Self::push_unique(&categories, FilterOption { id, title: name });
}
}
Ok(())
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: "hqporner".to_string(),
name: "HQPorner".to_string(),
description: "HD Porn Videos Tube".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=hqporner.com".to_string(),
status: "active".to_string(),
categories: self
.categories
.read()
.map(|c| c.iter().map(|o| o.title.clone()).collect())
.unwrap_or_default(),
options: vec![],
nsfw: true,
cacheDuration: None,
}
}
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
if let Ok(mut vec) = target.write() {
if !vec.iter().any(|x| x.id == item.id) {
vec.push(item);
}
}
}
async fn get(
&self,
cache: VideoCache,
page: u8,
_sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = format!("{}/hdporn/{}", self.url, page);
if let Some((time, items)) = cache.get(&video_url) {
if time.elapsed().unwrap_or_default().as_secs() < 300 {
return Ok(items.clone());
}
}
let mut requester = options.requester.clone().ok_or("No requester")?;
let text = requester
.get(&video_url, None)
.await
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
let video_items = self.get_video_items_from_html(text, &mut requester).await;
if !video_items.is_empty() {
cache.insert(video_url, video_items.clone());
}
Ok(video_items)
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let search_string = query.trim().to_lowercase();
let mut video_url = format!("{}/?q={}&p={}", self.url, search_string, page);
if let Ok(stars) = self.stars.read() {
if let Some(star) = stars
.iter()
.find(|s| s.title.to_lowercase() == search_string)
{
video_url = format!("{}/actress/{}/{}", self.url, star.id, page);
}
}
if let Ok(cats) = self.categories.read() {
if let Some(cat) = cats
.iter()
.find(|c| c.title.to_lowercase() == search_string)
{
video_url = format!("{}/category/{}/{}", self.url, cat.id, page);
}
}
if let Some((time, items)) = cache.get(&video_url) {
if time.elapsed().unwrap_or_default().as_secs() < 300 {
return Ok(items.clone());
}
}
let mut requester = options.requester.clone().ok_or("No requester")?;
let text = requester
.get(&video_url, None)
.await
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
let video_items = self.get_video_items_from_html(text, &mut requester).await;
if !video_items.is_empty() {
cache.insert(video_url, video_items.clone());
}
Ok(video_items)
}
async fn get_video_items_from_html(
&self,
html: String,
requester: &mut Requester,
) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") {
return vec![];
}
let raw_videos: Vec<String> = html
.split("id=\"footer\"")
.next()
.and_then(|s| s.split("<section class=\"box features\">").nth(2))
.map(|s| {
s.split("<section class=\"box feature\">")
.skip(1)
.map(|v| v.to_string())
.collect()
})
.unwrap_or_default();
let futures = raw_videos
.into_iter()
.map(|el| self.get_video_item(el, requester.clone()));
join_all(futures)
.await
.into_iter()
.inspect(|r| {
if let Err(e) = r {
let msg = e.to_string();
let chain = format_error_chain(e);
tokio::spawn(async move {
let _ = send_discord_error_report(
msg,
Some(chain),
Some("Hqporner Provider"),
None,
file!(),
line!(),
module_path!(),
)
.await;
});
}
})
.filter_map(Result::ok)
.filter(|item| !item.formats.clone().unwrap().is_empty())
.collect()
}
async fn get_video_item(&self, seg: String, mut requester: Requester) -> Result<VideoItem> {
let video_url = format!(
"{}{}",
self.url,
seg.split("<a href=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.ok_or_else(|| ErrorKind::Parse(format!("url \n{seg}").into()))?
);
let title_raw = seg
.split("<h3 class=\"meta-data-title\">")
.nth(1)
.and_then(|s| s.split('>').nth(1))
.and_then(|s| s.split('<').next())
.ok_or_else(|| ErrorKind::Parse(format!("title \n{seg}").into()))?;
let title = decode(title_raw.as_bytes())
.to_string()
.unwrap_or_else(|_| title_raw.to_string())
.titlecase();
let id = video_url
.split('/')
.nth(4)
.and_then(|s| s.split('.').next())
.ok_or_else(|| ErrorKind::Parse(format!("id \n{seg}").into()))?
.to_string();
let thumb = format!(
"https:{}",
seg.split("onmouseleave='defaultImage(\"")
.nth(1)
.and_then(|s| s.split('"').next())
.ok_or_else(|| ErrorKind::Parse(format!("thumb \n{seg}").into()))?
);
let raw_duration = seg
.split("<span class=\"icon fa-clock-o meta-data\">")
.nth(1)
.and_then(|s| s.split("s<").next())
.map(|s| s.replace("m ", ":"))
.unwrap_or_default();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let (tags, formats) = self.extract_media(&video_url, &mut requester).await?;
Ok(
VideoItem::new(id, title, video_url, "hqporner".into(), thumb, duration)
.formats(formats)
.tags(tags),
)
}
async fn extract_media(
&self,
url: &str,
requester: &mut Requester,
) -> Result<(Vec<String>, Vec<VideoFormat>)> {
let mut formats = vec![];
let mut tags = vec![];
let resp = requester
.get_raw_with_headers(
url,
vec![("Referer".to_string(), "https://hqporner.com/".into())],
)
.await
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
let text = resp
.text()
.await
.map_err(|e| Error::from(format!("Text conversion failed: {}", e)))?;
if text.contains("Why do I see it?") {
return Ok((tags, formats));
}
// Extract Stars & Tags
if let Some(stars_block) = text
.split("icon fa-star-o")
.nth(1)
.and_then(|s| s.split("</li>").next())
{
for star_el in stars_block.split("href=\"/actress/").skip(1) {
let id = star_el.split('"').next().unwrap_or("").to_string();
let name = star_el
.split("\">")
.nth(1)
.and_then(|s| s.split('<').next())
.unwrap_or("")
.to_string();
if !name.is_empty() {
tags.push(name.clone());
Self::push_unique(&self.stars, FilterOption { id, title: name });
}
}
}
// Player / Video Extraction
let player_url = format!(
"https:{}",
text.split("url: '/blocks/altplayer.php?i=")
.nth(1)
.and_then(|s| s.split('\'').next())
.ok_or("No player link")?
);
let mut r = requester
.get_raw_with_headers(
&player_url,
vec![("Referer".to_string(), "https://hqporner.com/".into())],
).await;
if let Err(_e) = &r {
sleep(std::time::Duration::from_secs(1));
r = requester
.get_raw_with_headers(
&player_url,
vec![("Referer".to_string(), "https://hqporner.com/".into())],
).await;
}
let text2 = r
.unwrap()
.text()
.await
.map_err(|e| Error::from(format!("Text conversion failed: {}", e)))?;
// Check for error response
if text2.starts_with("ERR:"){
return Ok((tags, formats));
}
let video_element = text2
.split("<video ")
.nth(2)
.and_then(|s| s.split("</video>").next())
.ok_or(format!("No video element\n{player_url}\n{text2}"))?;
for source in video_element.split("<source ").skip(1) {
let title = source
.split("title=\\\"")
.nth(1)
.and_then(|s| s.split("\\\"").next())
.unwrap_or("")
.to_string();
let quality = title.split(' ').next().unwrap_or("HD").to_string();
let media_url = format!(
"https:{}",
source
.split("src=\\\"")
.nth(1)
.and_then(|s| s.split("\\\"").next())
.unwrap_or("")
);
formats.push(
VideoFormat::new(media_url, quality, "mp4".into())
.format_id(title.clone())
.format_note(title),
);
}
Ok((tags, formats))
}
}
#[async_trait]
impl Provider for HqpornerProvider {
async fn get_videos(
&self,
cache: VideoCache,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let page_num = page.parse::<u8>().unwrap_or(1);
let res = match query {
Some(q) => self.query(cache, page_num, &q, options).await,
None => self.get(cache, page_num, &sort, options).await,
};
res.unwrap_or_else(|e| {
eprintln!("Hqporner error: {e}");
let _ = send_discord_error_report(e.to_string(), Some(format_error_chain(&e)), None, None, file!(), line!(), module_path!());
vec![]
})
}
fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
Some(self.build_channel(v))
}
}

433
src/providers/hypnotube.rs Normal file
View File

@@ -0,0 +1,433 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::Provider;
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::discord::{format_error_chain, send_discord_error_report};
use crate::util::requester::Requester;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode};
use std::sync::{Arc, RwLock};
use std::{thread, vec};
use titlecase::Titlecase;
use wreq::Version;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
Json(serde_json::Error);
}
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
}
}
#[derive(Debug, Clone)]
pub struct HypnotubeProvider {
url: String,
categories: Arc<RwLock<Vec<FilterOption>>>,
}
impl HypnotubeProvider {
pub fn new() -> Self {
let provider = Self {
url: "https://hypnotube.com".to_string(),
categories: Arc::new(RwLock::new(vec![])),
};
provider.spawn_initial_load();
provider
}
fn spawn_initial_load(&self) {
let url = self.url.clone();
let categories = Arc::clone(&self.categories);
thread::spawn(async move || {
let rt = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(rt) => rt,
Err(e) => {
eprintln!("tokio runtime failed: {e}");
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("HypnoTube Provider"),
Some("Failed to create tokio runtime"),
file!(),
line!(),
module_path!(),
)
.await;
return;
}
};
rt.block_on(async {
if let Err(e) = Self::load_categories(&url, Arc::clone(&categories)).await {
eprintln!("load_categories failed: {e}");
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("HypnoTube Provider"),
Some("Failed to load categories during initial load"),
file!(),
line!(),
module_path!(),
)
.await;
}
});
});
}
async fn load_categories(base: &str, cats: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = Requester::new();
let text = requester
.get(&format!("{base}/channels/"), Some(Version::HTTP_11))
.await
.map_err(|e| Error::from(format!("{}", e)))?;
let block = text
.split(" title END ")
.last()
.ok_or_else(|| ErrorKind::Parse("categories block".into()))?
.split(" main END ")
.next()
.unwrap_or("");
for el in block.split("<!-- item -->").skip(1) {
let id = el
.split("<a href=\"https://hypnotube.com/channels/")
.nth(1)
.and_then(|s| s.split("/\"").next())
.ok_or_else(|| ErrorKind::Parse(format!("category id: {el}").into()))?
.to_string();
let title = el
.split("title=\"")
.nth(1)
.and_then(|s| s.split("\"").next())
.ok_or_else(|| ErrorKind::Parse(format!("category title: {el}").into()))?
.titlecase();
Self::push_unique(&cats, FilterOption { id, title });
}
Ok(())
}
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
let _ = clientversion;
Channel {
id: "hypnotube".to_string(),
name: "Hypnotube".to_string(),
description: "free video hypno tube for the sissy hypnosis porn fetish".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=hypnotube.com".to_string(),
status: "active".to_string(),
categories: self
.categories
.read()
.unwrap()
.iter()
.map(|c| c.title.clone())
.collect(),
options: vec![ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Sort the Videos".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "most recent".into(),
title: "Most Recent".into(),
},
FilterOption {
id: "most viewed".into(),
title: "Most Viewed".into(),
},
FilterOption {
id: "top rated".into(),
title: "Top Rated".into(),
},
FilterOption {
id: "longest".into(),
title: "Longest".into(),
},
],
multiSelect: false,
}],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
if let Ok(mut vec) = target.write() {
if !vec.iter().any(|x| x.id == item.id) {
vec.push(item);
}
}
}
async fn get(
&self,
cache: VideoCache,
page: u8,
sort: &str,
options: ServerOptions,
) -> Vec<VideoItem> {
let sort_string = match sort {
"top rated" => "top-rated",
"most viewed" => "most-viewed",
"longest" => "longest",
_ => "videos",
};
let video_url = format!("{}/{}/page{}.html", self.url, sort_string, page);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return items.clone();
} else {
items.clone()
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester
.get(&video_url, Some(Version::HTTP_11))
.await
.unwrap();
if text.contains("Sorry, no results were found.") {
return vec![];
}
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()).await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return old_items;
}
video_items
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
options: ServerOptions,
) -> Vec<VideoItem> {
let sort_string = match options.sort.as_deref().unwrap_or("") {
"top rated" => "rating",
"most viewed" => "views",
"longest" => "longest",
_ => "newest",
};
let video_url = format!(
"{}/search/videos/{}/{}/page{}.html",
self.url,
query.trim().replace(" ", "%20"),
sort_string,
page
);
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return items.clone();
} else {
let _ = cache.check().await;
return items.clone();
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = match requester
.post(
format!("{}/searchgate.php", self.url).as_str(),
format!("q={}&type=videos", query.replace(" ", "+")).as_str(),
vec![("Content-Type", "application/x-www-form-urlencoded")],
)
.await
.unwrap()
.text()
.await
{
Ok(t) => t,
Err(e) => {
eprint!("Hypnotube search POST request failed: {}", e);
return vec![];
}
};
// println!("Hypnotube search POST response status: {}", p.text().await.unwrap_or_default());
// let text = requester.get(&video_url, Some(Version::HTTP_11)).await.unwrap();
if text.contains("Sorry, no results were found.") {
return vec![];
}
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()).await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return old_items;
}
video_items
}
async fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") {
eprint!("Hypnotube returned empty or 404 html");
return vec![];
}
let block = match html
.split("pagination-col col pagination")
.next()
.and_then(|s| s.split(" title END ").last())
{
Some(b) => b,
None => {
eprint!("Hypnotube Provider: Failed to get block from html");
let e = Error::from(ErrorKind::Parse("html".into()));
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("Hypnotube Provider"),
Some(&format!("Failed to get block from html:\n```{html}\n```")),
file!(),
line!(),
module_path!(),
)
.await;
return vec![];
}
};
let mut items = vec![];
for seg in block.split("<!-- item -->").skip(1) {
let video_url = match seg
.split(" href=\"")
.nth(1)
.and_then(|s| s.split('"').next())
{
Some(url) => url.to_string(),
None => {
eprint!("Hypnotube Provider: Failed to parse video url from segment");
let e = Error::from(ErrorKind::Parse("video url".into()));
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("Hypnotube Provider"),
Some(&format!(
"Failed to parse video url from segment:\n```{seg}\n```"
)),
file!(),
line!(),
module_path!(),
)
.await;
continue;
}
};
let mut title = seg
.split(" title=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or_default()
.trim()
.to_string();
title = decode(title.clone().as_bytes())
.to_string()
.unwrap_or(title)
.titlecase();
let id = video_url
.split('/')
.nth(4)
.and_then(|s| s.split('.').next())
.ok_or_else(|| ErrorKind::Parse("video id".into()))
.unwrap_or_else(|_| &title.as_str());
let thumb = seg
.split("<img ")
.nth(1)
.and_then(|s| s.split("src=\"").nth(1))
.and_then(|s| s.split("\"").next())
.ok_or_else(|| ErrorKind::Parse("thumb block".into()))
.unwrap_or("")
.to_string();
let raw_duration = seg
.split("<span class=\"time\">")
.nth(1)
.and_then(|s| s.split('<').next())
.unwrap_or("")
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let views = seg
.split("<span class=\"icon i-eye\"></span>")
.nth(1)
.and_then(|s| s.split("span class=\"sub-desc\">").nth(1))
.and_then(|s| s.split("<").next())
.unwrap_or("0")
.replace(",", "")
.parse::<u32>()
.unwrap_or(0);
let video_item = VideoItem::new(
id.to_owned(),
title,
video_url,
"hypnotube".into(),
thumb,
duration,
)
.views(views);
items.push(video_item);
}
items
}
}
#[async_trait]
impl Provider for HypnotubeProvider {
async fn get_videos(
&self,
cache: VideoCache,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let page = page.parse::<u8>().unwrap_or(1);
let res = match query {
Some(q) => self.to_owned().query(cache, page, &q, options).await,
None => self.get(cache, page, &sort, options).await,
};
return res;
}
fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
Some(self.build_channel(v))
}
}

413
src/providers/javtiful.rs Normal file
View File

@@ -0,0 +1,413 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::Provider;
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::discord::{format_error_chain, send_discord_error_report};
use crate::util::requester::Requester;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoFormat, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{decode, ICodedDataTrait};
use std::sync::{Arc, RwLock};
use std::{vec};
use titlecase::Titlecase;
use wreq::Version;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
Json(serde_json::Error);
}
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
}
}
#[derive(Debug, Clone)]
pub struct JavtifulProvider {
url: String,
categories: Arc<RwLock<Vec<FilterOption>>>,
}
impl JavtifulProvider {
pub fn new() -> Self {
let provider = Self {
url: "https://javtiful.com".to_string(),
categories: Arc::new(RwLock::new(vec![])),
};
provider
}
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
let _ = clientversion;
Channel {
id: "javtiful".to_string(),
name: "Javtiful".to_string(),
description: "Watch Porn!".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=javtiful.com".to_string(),
status: "active".to_string(),
categories: self
.categories
.read()
.unwrap()
.iter()
.map(|c| c.title.clone())
.collect(),
options: vec![ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Sort the Videos".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "newest".into(),
title: "Newest".into(),
},
FilterOption {
id: "top rated".into(),
title: "Top Rated".into(),
},
FilterOption {
id: "most viewed".into(),
title: "Most Viewed".into(),
},
FilterOption {
id: "top favorites".into(),
title: "Top Favorites".into(),
},
],
multiSelect: false,
}],
nsfw: true,
cacheDuration: Some(1800),
}
}
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
if let Ok(mut vec) = target.write() {
if !vec.iter().any(|x| x.id == item.id) {
vec.push(item);
}
}
}
async fn get(
&self,
cache: VideoCache,
page: u8,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let sort_string = match sort {
"top rated" => "/sort=top_rated",
"most viewed" => "/sort=most_viewed",
_ => "",
};
let video_url = format!(
"{}/videos{}?page={}",
self.url, sort_string, page
);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url, Some(Version::HTTP_2)).await.unwrap();
if page > 1 && !text.contains(&format!("<li class=\"page-item active\"><span class=\"page-link\">{}</span>", page)) {
return Ok(vec![]);
}
let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone(), &mut requester)
.await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let sort_string = match options.sort.as_deref().unwrap_or("") {
"top rated" => "/sort=top_rated",
"most viewed" => "/sort=most_viewed",
_ => "",
};
let video_url = format!(
"{}/search/videos{}?search_query={}&page={}",
self.url, sort_string, query.replace(" ","+"), page
);
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone());
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url, Some(Version::HTTP_2)).await.unwrap();
if page > 1 && !text.contains(&format!("<li class=\"page-item active\"><span class=\"page-link\">{}</span>", page)) {
return Ok(vec![]);
}
let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone(), &mut requester)
.await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
async fn get_video_items_from_html(
&self,
html: String,
requester: &mut Requester,
) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") {
return vec![];
}
let block = match html
.split("pagination ")
.next()
.and_then(|s| s.split("row row-cols-1 row-cols-sm-2 row-cols-lg-3 row-cols-xl-4").nth(1))
{
Some(b) => b,
None => {
eprint!("Javtiful Provider: Failed to get block from html");
let e = Error::from(ErrorKind::Parse("html".into()));
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("Javtiful Provider"),
Some(&format!("Failed to get block from html:\n```{html}\n```")),
file!(),
line!(),
module_path!(),
).await;
return vec![]
},
};
let futures = block
.split("card ")
.skip(1)
.filter(|seg| !seg.contains("SPONSOR"))
.map(|el| self.get_video_item(el.to_string(), requester.clone()));
join_all(futures)
.await
.into_iter()
.inspect(|r| {
if let Err(e) = r {
eprint!("Javtiful Provider: Failed to get video item:{}\n", e);
// Prepare data to move into the background task
let msg = e.to_string();
let chain = format_error_chain(&e);
// Spawn the report into the background - NO .await here
tokio::spawn(async move {
let _ = send_discord_error_report(
msg,
Some(chain),
Some("Javtiful Provider"),
Some("Failed to get video item"),
file!(), // Note: these might report the utility line
line!(), // better to hardcode or pass from outside
module_path!(),
).await;
});
}
})
.filter_map(Result::ok)
.collect()
}
async fn get_video_item(
&self,
seg: String,
mut requester: Requester,
) -> Result<VideoItem> {
let video_url = seg
.split(" href=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.ok_or_else(|| ErrorKind::Parse("video url\n\n{seg}".into()))?
.to_string();
let mut title = seg
.split(" alt=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.ok_or_else(|| ErrorKind::Parse(format!("video title\n\n{seg}").into()))?
.trim()
.to_string();
title = decode(title.as_bytes()).to_string().unwrap_or(title).titlecase();
let id = video_url
.split('/')
.nth(5)
.and_then(|s| s.split('.').next())
.ok_or_else(|| ErrorKind::Parse("video id\n\n{seg}".into()))?
.to_string();
let thumb_block = seg
.split("<img ")
.nth(1)
.ok_or_else(|| ErrorKind::Parse("thumb block\n\n{seg}".into()))?;
let thumb = thumb_block
.split("data-src=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string();
let mut preview = seg
.split("data-trailer=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string();
let raw_duration = seg
.split("label-duration\">")
.nth(1)
.and_then(|s| s.split('<').next())
.unwrap_or("")
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let (tags, formats, views) =
self.extract_media(&video_url, &mut requester).await?;
if preview.len() == 0 {
preview = format!("https://trailers.jav.si/preview/{id}.mp4");
}
let video_item = VideoItem::new(
id,
title,
video_url,
"javtiful".into(),
thumb,
duration,
)
.formats(formats)
.tags(tags)
.preview(preview)
.views(views);
Ok(video_item)
}
async fn extract_media(
&self,
url: &str,
requester: &mut Requester,
) -> Result<(Vec<String>, Vec<VideoFormat>, u32)> {
let text = requester
.get(url, Some(Version::HTTP_2))
.await
.map_err(|e| Error::from(format!("{}", e)))?;
let tags = text.split("related-actress").next()
.and_then(|s| s.split("video-comments").next())
.and_then(|s| s.split(">Tags<").nth(1))
.map(|tag_block| {
tag_block
.split("<a ")
.skip(1)
.filter_map(|tag_el| {
tag_el
.split('>')
.nth(1)
.and_then(|s| s.split('<').next())
.map(|s| decode(s.as_bytes()).to_string().unwrap_or(s.to_string()).titlecase())
})
.collect()
})
.unwrap_or_else(|| vec![]);
for tag in &tags {
Self::push_unique(&self.categories, FilterOption {
id: tag.to_ascii_lowercase().replace(" ","+"),
title: tag.to_string(),
});
}
let views = text.split(" Views ")
.next()
.and_then(|s| s.split(" ").last())
.and_then(|s| s.replace(".","")
.parse::<u32>().ok())
.unwrap_or(0);
let quality="1080p".to_string();
let video_url = url.replace("javtiful.com","hottub.spacemoehre.de/proxy/javtiful");
Ok((
tags,
vec![VideoFormat::new(video_url, quality, "video/mp4".into())],
views,
))
}
}
#[async_trait]
impl Provider for JavtifulProvider {
async fn get_videos(
&self,
cache: VideoCache,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let page = page.parse::<u8>().unwrap_or(1);
let res = match query {
Some(q) => self.to_owned().query(cache, page, &q, options).await,
None => self.get(cache, page, &sort, options).await,
};
res.unwrap_or_else(|e| {
eprintln!("javtiful error: {e}");
vec![]
})
}
fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
Some(self.build_channel(v))
}
}

View File

@@ -1,22 +1,31 @@
use std::vec; use std::vec;
use async_trait::async_trait; use async_trait::async_trait;
use diesel::r2d2;
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{decode, ICodedDataTrait}; use htmlentity::entity::{decode, ICodedDataTrait};
use futures::future::join_all; use futures::future::join_all;
use wreq::Version;
use crate::db; use crate::db;
use crate::providers::Provider; use crate::providers::Provider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::discord::{format_error_chain, send_discord_error_report};
use crate::videos::ServerOptions; use crate::videos::ServerOptions;
use crate::videos::{VideoItem}; use crate::videos::{VideoItem};
use crate::DbPool; use crate::DbPool;
use crate::util::requester::Requester; use crate::util::requester::Requester;
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
HttpRequest(wreq::Error); HttpRequest(wreq::Error);
JsonError(serde_json::Error); JsonError(serde_json::Error);
Pool(r2d2::Error); // Assuming r2d2 or similar for pool
}
errors {
ParsingError(t: String) {
description("parsing error")
display("Parsing error: '{}'", t)
}
} }
} }
@@ -24,259 +33,189 @@ error_chain! {
pub struct MissavProvider { pub struct MissavProvider {
url: String, url: String,
} }
impl MissavProvider { impl MissavProvider {
pub fn new() -> Self { pub fn new() -> Self {
MissavProvider { MissavProvider {
url: "https://missav.ws".to_string() url: "https://missav.ws".to_string()
} }
} }
async fn get(&self, cache:VideoCache, pool:DbPool, page: u8, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
// Extract needed fields from options at the start async fn get(&self, cache: VideoCache, pool: DbPool, page: u8, mut sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
let language = options.language.clone().unwrap(); // Use ok_or to avoid unwrapping options
let filter = options.filter.clone().unwrap(); let language = options.language.as_ref().ok_or("Missing language")?;
let mut requester = options.requester.clone().unwrap(); let filter = options.filter.as_ref().ok_or("Missing filter")?;
let mut requester = options.requester.clone().ok_or("Missing requester")?;
let url_str = format!("{}/{}/{}?page={}&sort={}", self.url, language, filter, page, sort); if !sort.is_empty() {
sort = format!("&sort={}", sort);
}
let url_str = format!("{}/{}/{}?page={}{}", self.url, language, filter, page, sort);
let old_items = match cache.get(&url_str) { if let Some((time, items)) = cache.get(&url_str) {
Some((time, items)) => { if time.elapsed().unwrap_or_default().as_secs() < 3600 {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
return Ok(items.clone()); return Ok(items.clone());
} }
else{
items.clone()
} }
}
None => {
vec![]
}
};
let text = requester.get(&url_str).await.unwrap(); let text = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap_or_else(|e| {
// Pass a reference to options if needed, or reconstruct as needed eprintln!("Error fetching Missav URL {}: {}", url_str, e);
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), pool, requester).await; let _ = send_discord_error_report(e.to_string(), None, Some(&url_str), None, file!(), line!(), module_path!());
"".to_string()
});
let video_items = self.get_video_items_from_html(text, pool, requester).await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url_str); cache.insert(url_str, video_items.clone());
cache.insert(url_str.clone(), video_items.clone());
} else{
return Ok(old_items);
} }
Ok(video_items) Ok(video_items)
} }
async fn query(&self, cache: VideoCache, pool:DbPool, page: u8, query: &str, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> { async fn query(&self, cache: VideoCache, pool: DbPool, page: u8, query: &str, mut sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
// Extract needed fields from options at the start let language = options.language.as_ref().ok_or("Missing language")?;
let language = options.language.clone().unwrap(); let mut requester = options.requester.clone().ok_or("Missing requester")?;
let mut requester = options.requester.clone().unwrap();
let search_string = query.replace(" ", "%20"); let search_string = query.replace(" ", "%20");
let url_str = format!( if !sort.is_empty() {
"{}/{}/search/{}?page={}&sort={}", sort = format!("&sort={}", sort);
self.url, language, search_string, page, sort }
); let url_str = format!("{}/{}/search/{}?page={}{}", self.url, language, search_string, page, sort);
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&url_str) { if let Some((time, items)) = cache.get(&url_str) {
Some((time, items)) => { if time.elapsed().unwrap_or_default().as_secs() < 3600 {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
return Ok(items.clone()); return Ok(items.clone());
} }
else{
let _ = cache.check().await;
return Ok(items.clone())
} }
}
None => { let text = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap_or_else(|e| {
vec![] eprintln!("Error fetching Missav URL {}: {}", url_str, e);
} let _ = send_discord_error_report(e.to_string(), None, Some(&url_str), None, file!(), line!(), module_path!());
}; "".to_string()
let text = requester.get(&url_str).await.unwrap(); });
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), pool, requester).await; let video_items = self.get_video_items_from_html(text, pool, requester).await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url_str); cache.insert(url_str, video_items.clone());
cache.insert(url_str.clone(), video_items.clone());
} else{
return Ok(old_items);
} }
Ok(video_items) Ok(video_items)
} }
async fn get_video_items_from_html(&self, html: String, pool: DbPool, requester: Requester) -> Vec<VideoItem> { async fn get_video_items_from_html(&self, html: String, pool: DbPool, requester: Requester) -> Vec<VideoItem> {
if html.is_empty() { if html.is_empty() { return vec![]; }
println!("HTML is empty");
return vec![]; let segments: Vec<&str> = html.split("@mouseenter=\"setPreview(\'").collect();
if segments.len() < 2 { return vec![]; }
let mut urls = vec![];
for video_segment in &segments[1..] {
// Safer parsing: find start and end of href
if let Some(start) = video_segment.find("<a href=\"") {
let rest = &video_segment[start + 9..];
if let Some(end) = rest.find('\"') {
urls.push(rest[..end].to_string());
}
} }
let raw_videos = html
.split("@mouseenter=\"setPreview(\'")
.collect::<Vec<&str>>()[1..]
.to_vec();
let mut urls: Vec<String> = vec![];
for video_segment in &raw_videos {
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
// for (index, line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line.to_string().trim());
// }
let url_str = video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
urls.push(url_str.clone());
} }
let futures = urls.into_iter().map(|el| self.get_video_item(el.clone(), pool.clone(), requester.clone()));
let results: Vec<Result<VideoItem>> = join_all(futures).await;
let video_items: Vec<VideoItem> = results
.into_iter()
.filter_map(Result::ok)
.collect();
return video_items; let futures = urls.into_iter().map(|url| self.get_video_item(url, pool.clone(), requester.clone()));
join_all(futures).await.into_iter().filter_map(Result::ok).collect()
} }
async fn get_video_item(&self, url_str: String, pool: DbPool, mut requester: Requester) -> Result<VideoItem> { async fn get_video_item(&self, url_str: String, pool: DbPool, mut requester: Requester) -> Result<VideoItem> {
let mut conn = pool.get().expect("couldn't get db connection from pool"); // 1. Database Check
let db_result = db::get_video(&mut conn,url_str.clone()); {
match db_result { let mut conn = pool.get().map_err(|e| Error::from(format!("Pool error: {}", e)))?;
Ok(Some(entry)) => { if let Ok(Some(entry)) = db::get_video(&mut conn, url_str.clone()) {
let video_item: VideoItem = serde_json::from_str(entry.as_str()).unwrap(); if let Ok(video_item) = serde_json::from_str::<VideoItem>(entry.as_str()) {
return Ok(video_item) return Ok(video_item);
}
Ok(None) => {
}
Err(e) => {
println!("Error fetching video from database: {}", e);
} }
} }
drop(conn); }
let vid = requester.get(&url_str).await.unwrap();
let mut title = vid.split("<meta property=\"og:title\" content=\"").collect::<Vec<&str>>()[1] // 2. Fetch Page
.split("\"") let vid = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap_or_else(|e| {
.collect::<Vec<&str>>()[0].trim() eprintln!("Error fetching Missav URL {}: {}", url_str, e);
.to_string(); let _ = send_discord_error_report(e.to_string(), None, Some(&url_str), None, file!(), line!(), module_path!());
"".to_string()
});
// Helper closure to extract content between two strings
let extract = |html: &str, start_tag: &str, end_tag: &str| -> Option<String> {
let start = html.find(start_tag)? + start_tag.len();
let rest = &html[start..];
let end = rest.find(end_tag)?;
Some(rest[..end].to_string())
};
let mut title = extract(&vid, "<meta property=\"og:title\" content=\"", "\"")
.ok_or_else(|| ErrorKind::ParsingError(format!("title\n{:?}", vid)))?;
title = decode(title.as_bytes()).to_string().unwrap_or(title); title = decode(title.as_bytes()).to_string().unwrap_or(title);
if url_str.contains("uncensored") { if url_str.contains("uncensored") {
title = format!("[Uncensored] {}", title); title = format!("[Uncensored] {}", title);
} }
let thumb = vid.split("<meta property=\"og:image\" content=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let raw_duration = vid.split("<meta property=\"og:video:duration\" content=\"").collect::<Vec<&str>>()[1] let thumb = extract(&vid, "<meta property=\"og:image\" content=\"", "\"")
.split("\"") .unwrap_or_default();
.collect::<Vec<&str>>()[0]
.to_string();
let duration = raw_duration.parse::<u32>().unwrap_or(0);
let id = url_str.split("/").collect::<Vec<&str>>().last().unwrap() let duration = extract(&vid, "<meta property=\"og:video:duration\" content=\"", "\"")
.to_string(); .and_then(|d| d.parse::<u32>().ok())
.unwrap_or(0);
let id = url_str.split('/').last().ok_or("No ID found")?.to_string();
// 3. Extract Tags (Generic approach to avoid repetitive code)
let mut tags = vec![]; let mut tags = vec![];
if vid.contains("<span>Actress:</span>"){ for (label, prefix) in [("Actress:", "@actress"), ("Actor:", "@actor"), ("Maker:", "@maker"), ("Genre:", "@genre")] {
for actress_snippet in vid.split("<span>Actress:</span>").collect::<Vec<&str>>()[1] let marker = format!("<span>{}</span>", label);
.split("</div>").collect::<Vec<&str>>()[0].split("class=\"text-nord13 font-medium\">"){ if let Some(section) = extract(&vid, &marker, "</div>") {
let tag = actress_snippet.split("<").collect::<Vec<&str>>()[0].trim() for part in section.split("class=\"text-nord13 font-medium\">").skip(1) {
.to_string(); if let Some(val) = part.split('<').next() {
if !tag.is_empty(){ let clean = val.trim();
tags.push(format!("@actress:{}", tag)); if !clean.is_empty() {
tags.push(format!("{}:{}", prefix, clean));
} }
} }
} }
if vid.contains("<span>Actor:</span>"){
for actor_snippet in vid.split("<span>Actor:</span>").collect::<Vec<&str>>()[1]
.split("</div>").collect::<Vec<&str>>()[0].split("class=\"text-nord13 font-medium\">"){
let tag = actor_snippet.split("<").collect::<Vec<&str>>()[0].trim()
.to_string();
if !tag.is_empty(){
tags.push(format!("@actor:{}", tag));
}
} }
} }
if vid.contains("<span>Maker:</span>"){ // 4. Extract Video URL (The m3u8 logic)
for maker_snippet in vid.split("<span>Maker:</span>").collect::<Vec<&str>>()[1] let video_url = (|| {
.split("</div>").collect::<Vec<&str>>()[0] let parts_str = vid.split("m3u8").nth(1)?.split("https").next()?;
.split("class=\"text-nord13 font-medium\">"){ let mut parts: Vec<&str> = parts_str.split('|').collect();
let tag = maker_snippet.split("<").collect::<Vec<&str>>()[0].trim() parts.reverse();
.to_string(); if parts.len() < 8 { return None; }
if !tag.is_empty(){ Some(format!("https://{}.{}/{}-{}-{}-{}-{}/playlist.m3u8",
tags.push(format!("@maker:{}", tag)); parts[1], parts[2], parts[3], parts[4], parts[5], parts[6], parts[7]))
} })().ok_or_else(|| ErrorKind::ParsingError(format!("video_url\n{:?}", vid).to_string()))?;
}
}
if vid.contains("<span>Genre:</span>"){ let video_item = VideoItem::new(id, title, video_url, "missav".to_string(), thumb, duration)
for tag_snippet in vid.split("<span>Genre:</span>").collect::<Vec<&str>>()[1]
.split("</div>").collect::<Vec<&str>>()[0].split("class=\"text-nord13 font-medium\">"){
let tag = tag_snippet.split("<").collect::<Vec<&str>>()[0].trim()
.to_string();
if !tag.is_empty(){
tags.push(format!("@genre:{}", tag));
}
}
}
let preview = format!("https://fourhoi.com/{}/preview.mp4",id.clone());
let mut video_url_parts = vid.split("m3u8").collect::<Vec<&str>>()[1]
.split("https").collect::<Vec<&str>>()[0]
.split("|").collect::<Vec<&str>>();
video_url_parts.reverse();
let video_url = format!("https://{}.{}/{}-{}-{}-{}-{}/playlist.m3u8",
video_url_parts[1],
video_url_parts[2],
video_url_parts[3],
video_url_parts[4],
video_url_parts[5],
video_url_parts[6],
video_url_parts[7]
);
let video_item = VideoItem::new(
id,
title,
video_url.clone(),
"missav".to_string(),
thumb,
duration,
)
.tags(tags) .tags(tags)
.preview(preview) .preview(format!("https://fourhoi.com/{}/preview.mp4", url_str.split('/').last().unwrap_or_default()));
;
let mut conn = pool.get().expect("couldn't get db connection from pool"); // 5. Cache to DB
let insert_result = db::insert_video(&mut conn, &url_str, &serde_json::to_string(&video_item)?); if let Ok(mut conn) = pool.get() {
match insert_result{ let _ = db::insert_video(&mut conn, &url_str, &serde_json::to_string(&video_item).unwrap_or_default());
Ok(_) => (),
Err(e) => {println!("{:?}", e); }
} }
drop(conn);
return Ok(video_item); Ok(video_item)
} }
} }
#[async_trait] #[async_trait]
impl Provider for MissavProvider { impl Provider for MissavProvider {
async fn get_videos( async fn get_videos(&self, cache: VideoCache, pool: DbPool, sort: String, query: Option<String>, page: String, _per_page: String, options: ServerOptions) -> Vec<VideoItem> {
&self, let page_num = page.parse::<u8>().unwrap_or(1);
cache: VideoCache, let result = match query {
pool: DbPool, Some(q) => self.query(cache, pool, page_num, &q, sort, options).await,
sort: String, None => self.get(cache, pool, page_num, sort, options).await,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = per_page;
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => self.query(cache, pool, page.parse::<u8>().unwrap_or(1), &q, sort, options).await,
None => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), sort, options).await,
}; };
match videos {
Ok(v) => v, result.unwrap_or_else(|e| {
Err(e) => { eprintln!("Error fetching videos: {}", e);
println!("Error fetching videos: {}", e); let _ = send_discord_error_report(e.to_string(), Some(format_error_chain(&e)), None, None, file!(), line!(), module_path!());
vec![] vec![]
} })
}
} }
} }

View File

@@ -36,6 +36,11 @@ pub mod tnaflix;
pub mod pornxp; pub mod pornxp;
pub mod rule34gen; pub mod rule34gen;
pub mod xxdbx; pub mod xxdbx;
pub mod hqporner;
pub mod noodlemagazine;
pub mod pimpbunny;
pub mod javtiful;
pub mod hypnotube;
// convenient alias // convenient alias
pub type DynProvider = Arc<dyn Provider>; pub type DynProvider = Arc<dyn Provider>;
@@ -48,6 +53,12 @@ pub static ALL_PROVIDERS: Lazy<HashMap<&'static str, DynProvider>> = Lazy::new(|
m.insert("pornxp", Arc::new(pornxp::PornxpProvider::new()) as DynProvider); m.insert("pornxp", Arc::new(pornxp::PornxpProvider::new()) as DynProvider);
m.insert("rule34gen", Arc::new(rule34gen::Rule34genProvider::new()) as DynProvider); m.insert("rule34gen", Arc::new(rule34gen::Rule34genProvider::new()) as DynProvider);
m.insert("xxdbx", Arc::new(xxdbx::XxdbxProvider::new()) as DynProvider); m.insert("xxdbx", Arc::new(xxdbx::XxdbxProvider::new()) as DynProvider);
m.insert("hqporner", Arc::new(hqporner::HqpornerProvider::new()) as DynProvider);
m.insert("pmvhaven", Arc::new(pmvhaven::PmvhavenProvider::new()) as DynProvider);
m.insert("noodlemagazine", Arc::new(noodlemagazine::NoodlemagazineProvider::new()) as DynProvider);
m.insert("pimpbunny", Arc::new(pimpbunny::PimpbunnyProvider::new()) as DynProvider);
m.insert("javtiful", Arc::new(javtiful::JavtifulProvider::new()) as DynProvider);
m.insert("hypnotube", Arc::new(hypnotube::HypnotubeProvider::new()) as DynProvider);
// add more here as you migrate them // add more here as you migrate them
m m
}); });
@@ -71,13 +82,13 @@ pub trait Provider: Send + Sync {
options: ServerOptions, options: ServerOptions,
) -> Vec<VideoItem>; ) -> Vec<VideoItem>;
fn get_channel(&self, clientversion: ClientVersion) -> Channel { fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
println!( println!(
"Getting channel for placeholder with client version: {:?}", "Getting channel for placeholder with client version: {:?}",
clientversion clientversion
); );
let _ = clientversion; let _ = clientversion;
Channel { Some(Channel {
id: "placeholder".to_string(), id: "placeholder".to_string(),
name: "PLACEHOLDER".to_string(), name: "PLACEHOLDER".to_string(),
description: "PLACEHOLDER FOR PARENT CLASS".to_string(), description: "PLACEHOLDER FOR PARENT CLASS".to_string(),
@@ -88,6 +99,6 @@ pub trait Provider: Send + Sync {
options: vec![], options: vec![],
nsfw: true, nsfw: true,
cacheDuration: None, cacheDuration: None,
} })
} }
} }

View File

@@ -0,0 +1,308 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::Provider;
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::requester::Requester;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoFormat, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{decode, ICodedDataTrait};
use wreq::Version;
use titlecase::Titlecase;
use std::vec;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
}
errors {
Parse(msg: String)
}
}
#[derive(Debug, Clone)]
pub struct NoodlemagazineProvider {
url: String,
}
impl NoodlemagazineProvider {
pub fn new() -> Self {
Self {
url: "https://noodlemagazine.com".to_string(),
}
}
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
Channel {
id: "noodlemagazine".into(),
name: "Noodlemagazine".into(),
description: "The Best Search Engine of HD Videos".into(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=noodlemagazine.com".into(),
status: "active".into(),
categories: vec![],
options: vec![],
nsfw: true,
cacheDuration: Some(1800),
}
}
async fn get(
&self,
cache: VideoCache,
page: u8,
_sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let video_url = format!(
"{}/popular/recent?sort_by=views&sort_order=desc&p={}",
self.url,
page.saturating_sub(1)
);
let old_items = match cache.get(&video_url) {
Some((t, i)) if t.elapsed().unwrap_or_default().as_secs() < 300 => return Ok(i.clone()),
Some((_, i)) => i.clone(),
None => vec![],
};
let mut requester = match options.requester.clone() {
Some(r) => r,
None => return Ok(old_items),
};
let text = requester
.get(&video_url, Some(Version::HTTP_2))
.await
.unwrap_or_default();
let items = self.get_video_items_from_html(text, requester).await;
if items.is_empty() {
Ok(old_items)
} else {
cache.remove(&video_url);
cache.insert(video_url, items.clone());
Ok(items)
}
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let q = query.trim().replace(' ', "%20");
let video_url = format!("{}/video/{}?p={}", self.url, q, page.saturating_sub(1));
let old_items = match cache.get(&video_url) {
Some((t, i)) if t.elapsed().unwrap_or_default().as_secs() < 300 => return Ok(i.clone()),
Some((_, i)) => i.clone(),
None => vec![],
};
let mut requester = match options.requester.clone() {
Some(r) => r,
None => return Ok(old_items),
};
let text = requester
.get(&video_url, Some(Version::HTTP_2))
.await
.unwrap_or_default();
let items = self.get_video_items_from_html(text, requester).await;
if items.is_empty() {
Ok(old_items)
} else {
cache.remove(&video_url);
cache.insert(video_url, items.clone());
Ok(items)
}
}
async fn get_video_items_from_html(
&self,
html: String,
requester: Requester,
) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") {
return vec![];
}
let section = match html.split(">Show more</div>").next() {
Some(s) => s,
None => return vec![],
};
let list = match section
.split("<div class=\"list_videos\" id=\"list_videos\">")
.nth(1)
{
Some(l) => l,
None => return vec![],
};
let raw_videos = list
.split("<div class=\"item\">")
.skip(1)
.map(|s| s.to_string());
let futures = raw_videos.map(|v| self.get_video_item(v, requester.clone()));
let results = join_all(futures).await;
results.into_iter().filter_map(Result::ok).collect()
}
async fn get_video_item(
&self,
video_segment: String,
requester: Requester,
) -> Result<VideoItem> {
let href = video_segment
.split("<a href=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.ok_or_else(|| Error::from("missing href"))?;
let video_url = format!("{}{}", self.url, href);
let mut title = video_segment
.split("<div class=\"title\">")
.nth(1)
.and_then(|s| s.split('<').next())
.unwrap_or("")
.trim()
.to_string();
title = decode(title.as_bytes())
.to_string()
.unwrap_or(title)
.titlecase();
let id = video_url
.split('/')
.nth(4)
.and_then(|s| s.split('.').next())
.ok_or_else(|| Error::from("missing id"))?
.to_string();
let thumb = video_segment
.split("data-src=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string();
let raw_duration = video_segment
.split("#clock-o\"></use></svg>")
.nth(1)
.and_then(|s| s.split('<').next())
.unwrap_or("0:00");
let duration = parse_time_to_seconds(raw_duration).unwrap_or(0) as u32;
let views = video_segment
.split("#eye\"></use></svg>")
.nth(1)
.and_then(|s| s.split('<').next())
.and_then(|v| parse_abbreviated_number(v.trim()))
.unwrap_or(0);
let formats = self
.extract_media(&video_url, requester)
.await
.ok_or_else(|| Error::from("media extraction failed"))?;
Ok(
VideoItem::new(
id,
title,
video_url,
"noodlemagazine".into(),
thumb,
duration,
)
.views(views)
.formats(formats),
)
}
async fn extract_media(
&self,
video_url: &String,
mut requester: Requester,
) -> Option<Vec<VideoFormat>> {
let text = requester
.get(video_url, Some(Version::HTTP_2))
.await
.unwrap_or_default();
let json_str = text
.split("window.playlist = ")
.nth(1)?
.split(';')
.next()?;
let json: serde_json::Value = serde_json::from_str(json_str).ok()?;
let sources = json["sources"].as_array()?;
let mut formats = vec![];
for s in sources {
let file = s["file"].as_str()?.to_string();
let label = s["label"].as_str().unwrap_or("unknown").to_string();
formats.push(
VideoFormat::new(file, label.clone(), "video/mp4".into())
.format_id(label.clone())
.format_note(label.clone())
.http_header("Referer".into(), video_url.clone()),
);
}
Some(formats.into_iter().rev().collect())
}
}
#[async_trait]
impl Provider for NoodlemagazineProvider {
async fn get_videos(
&self,
cache: VideoCache,
pool: DbPool,
sort: String,
query: Option<String>,
page: String,
per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let _ = pool;
let _ = per_page;
let page = page.parse::<u8>().unwrap_or(1);
let res = match query {
Some(q) => self.query(cache, page, &q, options).await,
None => self.get(cache, page, &sort, options).await,
};
res.unwrap_or_else(|e| {
eprintln!("Noodlemagazine error: {e}");
vec![]
})
}
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
}

View File

@@ -44,7 +44,6 @@ impl OkpornProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()

View File

@@ -45,7 +45,6 @@ impl OkxxxProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()

View File

@@ -84,24 +84,31 @@ impl OmgxxxProvider {
let mut requester = util::requester::Requester::new(); let mut requester = util::requester::Requester::new();
for page in [1..10].into_iter().flatten() { for page in [1..10].into_iter().flatten() {
let text = requester let text = requester
.get(format!("{}/models/total-videos/{}/?gender_id=0", &base_url, page).as_str()) .get(
format!("{}/models/total-videos/{}/?gender_id=0", &base_url, page).as_str(),
None,
)
.await .await
.unwrap(); .unwrap();
if text.contains("404 Not Found") || text.is_empty() { if text.contains("404 Not Found") || text.is_empty() {
break; break;
} }
let stars_div = text let stars_div = text
.split("id=\"list_models_models_list_items\"") .split("<div class=\"list-models\">")
.collect::<Vec<&str>>()[1] .collect::<Vec<&str>>()
.split("class=\"pagination\"") .last()
.unwrap()
.split("custom_list_models_models_list_pagination")
.collect::<Vec<&str>>()[0]; .collect::<Vec<&str>>()[0];
for stars_element in stars_div.split("<a ").collect::<Vec<&str>>()[1..].to_vec() { for stars_element in stars_div.split("<a ").collect::<Vec<&str>>()[1..].to_vec() {
let star_url = stars_element.split("href=\"").collect::<Vec<&str>>()[1] let star_url = stars_element.split("href=\"").collect::<Vec<&str>>()[1]
.split("\"") .split("\"")
.collect::<Vec<&str>>()[0]; .collect::<Vec<&str>>()[0];
let star_id = star_url.split("/").collect::<Vec<&str>>()[4].to_string(); let star_id = star_url.split("/").collect::<Vec<&str>>()[4].to_string();
let star_name = stars_element.split("title=\"").collect::<Vec<&str>>()[1] let star_name = stars_element
.split("\"") .split("<strong class=\"title\">")
.collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0] .collect::<Vec<&str>>()[0]
.to_string(); .to_string();
Self::push_unique( Self::push_unique(
@@ -122,7 +129,7 @@ impl OmgxxxProvider {
loop { loop {
page += 1; page += 1;
let text = requester let text = requester
.get(format!("{}/sites/{}/", &base_url, page).as_str()) .get(format!("{}/sites/{}/", &base_url, page).as_str(), None)
.await .await
.unwrap(); .unwrap();
if text.contains("404 Not Found") || text.is_empty() { if text.contains("404 Not Found") || text.is_empty() {
@@ -158,7 +165,7 @@ impl OmgxxxProvider {
async fn load_networks(base_url: &str, networks: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> { async fn load_networks(base_url: &str, networks: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = util::requester::Requester::new(); let mut requester = util::requester::Requester::new();
let text = requester.get(&base_url).await.unwrap(); let text = requester.get(&base_url, None).await.unwrap();
let networks_div = text.split("class=\"sites__list\"").collect::<Vec<&str>>()[1] let networks_div = text.split("class=\"sites__list\"").collect::<Vec<&str>>()[1]
.split("</div>") .split("</div>")
.collect::<Vec<&str>>()[0]; .collect::<Vec<&str>>()[0];
@@ -333,7 +340,6 @@ impl OmgxxxProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -345,7 +351,7 @@ impl OmgxxxProvider {
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -364,18 +370,35 @@ impl OmgxxxProvider {
options: ServerOptions, options: ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
let mut search_type = "search"; let mut search_type = "search";
let mut search_string = query.to_string().to_lowercase().trim().replace(" ", "-"); let mut search_string = query.to_string().to_ascii_lowercase().trim().to_string();
if query.starts_with("@") { match self
search_type = query.split(":").collect::<Vec<&str>>()[0].trim_start_matches("@"); .stars
search_string = search_string.split(":").collect::<Vec<&str>>()[1].to_string(); .read()
.unwrap()
.iter()
.find(|s| s.title.to_ascii_lowercase() == search_string)
{
Some(star) => {
search_type = "models";
search_string = star.id.clone();
} }
let video_url = format!( _ => {}
"{}/{}/{}/{}/", }
self.url, match self
search_type, .sites
search_string, .read()
page .unwrap()
); .iter()
.find(|s| s.title.to_ascii_lowercase() == search_string)
{
Some(site) => {
search_type = "sites";
search_string = site.id.clone();
}
_ => {}
}
let mut video_url = format!("{}/{}/{}/{}/", self.url, search_type, search_string, page);
video_url = video_url.replace(" ", "+");
// Check our Video Cache. If the result is younger than 1 hour, we return it. // Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
@@ -393,7 +416,7 @@ impl OmgxxxProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -517,17 +540,36 @@ impl OmgxxxProvider {
.collect::<Vec<&str>>()[1..] .collect::<Vec<&str>>()[1..]
.into_iter() .into_iter()
.map(|s| { .map(|s| {
format!( Self::push_unique(
"@models:{}", &self.stars,
s.split("/").collect::<Vec<&str>>()[4].to_string() FilterOption {
) id: s.split("/").collect::<Vec<&str>>()[4].to_string(),
title: s.split(">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.trim()
.to_string(),
},
);
s.split(">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.trim()
.to_string()
}) })
.collect::<Vec<String>>() .collect::<Vec<String>>()
.to_vec(), .to_vec(),
false => vec![], false => vec![],
}; };
if !site_id.is_empty() { if !site_id.is_empty() {
tags.push(format!("@sites:{}", site_id)); Self::push_unique(
&self.sites,
FilterOption {
id: site_id,
title: site_name.to_string(),
},
);
tags.push(site_name.to_string());
} }
let video_item = VideoItem::new( let video_item = VideoItem::new(
@@ -579,7 +621,7 @@ impl Provider for OmgxxxProvider {
} }
} }
} }
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel { fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
self.build_channel(clientversion) Some(self.build_channel(clientversion))
} }
} }

View File

@@ -51,7 +51,7 @@ impl ParadisehillProvider {
} }
}; };
let text = requester.get(&url_str).await.unwrap(); let text = requester.get(&url_str, None).await.unwrap();
// Pass a reference to options if needed, or reconstruct as needed // Pass a reference to options if needed, or reconstruct as needed
let video_items: Vec<VideoItem> = self let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone(), requester) .get_video_items_from_html(text.clone(), requester)
@@ -93,7 +93,7 @@ impl ParadisehillProvider {
vec![] vec![]
} }
}; };
let text = requester.get(&url_str).await.unwrap(); let text = requester.get(&url_str, None).await.unwrap();
let video_items: Vec<VideoItem> = self let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone(), requester) .get_video_items_from_html(text.clone(), requester)
.await; .await;
@@ -143,7 +143,7 @@ impl ParadisehillProvider {
} }
async fn get_video_item(&self, url_str: String, mut requester: Requester) -> Result<VideoItem> { async fn get_video_item(&self, url_str: String, mut requester: Requester) -> Result<VideoItem> {
let vid = requester.get(&url_str).await.unwrap(); let vid = requester.get(&url_str, None).await.unwrap();
let mut title = vid let mut title = vid
.split("<meta property=\"og:title\" content=\"") .split("<meta property=\"og:title\" content=\"")
.collect::<Vec<&str>>()[1] .collect::<Vec<&str>>()[1]

View File

@@ -45,7 +45,6 @@ impl PerfectgirlsProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()

View File

@@ -11,6 +11,7 @@ use futures::future::join_all;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use serde::Deserialize; use serde::Deserialize;
use serde::Serialize; use serde::Serialize;
use wreq::Version;
use std::vec; use std::vec;
use wreq::Client; use wreq::Client;
use wreq_util::Emulation; use wreq_util::Emulation;
@@ -59,7 +60,7 @@ impl PerverzijaProvider {
let old_items = match cache.get(&url_str) { let old_items = match cache.get(&url_str) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
println!("Cache hit for URL: {}", url_str); //println!("Cache hit for URL: {}", url_str);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -71,7 +72,7 @@ impl PerverzijaProvider {
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&url_str).await.unwrap(); let text = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), pool); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), pool);
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url_str); cache.remove(&url_str);
@@ -122,7 +123,7 @@ impl PerverzijaProvider {
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&url_str).await.unwrap(); let text = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap();
let video_items: Vec<VideoItem> = match query_parse { let video_items: Vec<VideoItem> = match query_parse {
true => { true => {
self.get_video_items_from_html_query(text.clone(), pool) self.get_video_items_from_html_query(text.clone(), pool)

537
src/providers/pimpbunny.rs Normal file
View File

@@ -0,0 +1,537 @@
use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::Provider;
use crate::status::*;
use crate::util::cache::VideoCache;
use crate::util::discord::{format_error_chain, send_discord_error_report};
use crate::util::requester::Requester;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoFormat, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain;
use futures::future::join_all;
use htmlentity::entity::{decode, ICodedDataTrait};
use std::sync::{Arc, RwLock};
use std::{thread, vec};
use titlecase::Titlecase;
use wreq::Version;
error_chain! {
foreign_links {
Io(std::io::Error);
HttpRequest(wreq::Error);
Json(serde_json::Error);
}
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
}
}
#[derive(Debug, Clone)]
pub struct PimpbunnyProvider {
url: String,
stars: Arc<RwLock<Vec<FilterOption>>>,
categories: Arc<RwLock<Vec<FilterOption>>>,
}
impl PimpbunnyProvider {
pub fn new() -> Self {
let provider = Self {
url: "https://pimpbunny.com".to_string(),
stars: Arc::new(RwLock::new(vec![])),
categories: Arc::new(RwLock::new(vec![])),
};
provider.spawn_initial_load();
provider
}
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
let _ = clientversion;
Channel {
id: "pimpbunny".to_string(),
name: "Pimpbunny".to_string(),
description: "Watch Porn!".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=pimpbunny.com".to_string(),
status: "active".to_string(),
categories: self
.categories
.read()
.unwrap()
.iter()
.map(|c| c.title.clone())
.collect(),
options: vec![ChannelOption {
id: "sort".to_string(),
title: "Sort".to_string(),
description: "Sort the Videos".to_string(),
systemImage: "list.number".to_string(),
colorName: "blue".to_string(),
options: vec![
FilterOption {
id: "featured".into(),
title: "Featured".into(),
},
FilterOption {
id: "most recent".into(),
title: "Most Recent".into(),
},
FilterOption {
id: "most viewed".into(),
title: "Most Viewed".into(),
},
FilterOption {
id: "best rated".into(),
title: "Best Rated".into(),
},
],
multiSelect: false,
}],
nsfw: true,
cacheDuration: None,
}
}
fn spawn_initial_load(&self) {
let url = self.url.clone();
let stars = Arc::clone(&self.stars);
let categories = Arc::clone(&self.categories);
thread::spawn(async move || {
let rt = match tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
{
Ok(rt) => rt,
Err(e) => {
eprintln!("tokio runtime failed: {e}");
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("Pimpbunny Provider"),
Some("Failed to create tokio runtime"),
file!(),
line!(),
module_path!(),
).await;
return;
}
};
rt.block_on(async {
if let Err(e) = Self::load_stars(&url, Arc::clone(&stars)).await {
eprintln!("load_stars failed: {e}");
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("Pimpbunny Provider"),
Some("Failed to load stars during initial load"),
file!(),
line!(),
module_path!(),
).await;
}
if let Err(e) = Self::load_categories(&url, Arc::clone(&categories)).await {
eprintln!("load_categories failed: {e}");
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("Pimpbunny Provider"),
Some("Failed to load categories during initial load"),
file!(),
line!(),
module_path!(),
).await;
}
});
});
}
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
if let Ok(mut vec) = target.write() {
if !vec.iter().any(|x| x.id == item.id) {
vec.push(item);
}
}
}
async fn load_stars(base: &str, stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = Requester::new();
let text = requester
.get(
&format!("{base}/onlyfans-models/?models_per_page=20"),
Some(Version::HTTP_2),
)
.await
.map_err(|e| Error::from(format!("{}", e)))?;
let block = text
.split("vt_list_models_with_advertising_custom_models_list_items")
.last()
.ok_or_else(|| ErrorKind::Parse("missing stars block".into()))?
.split("pb-page-description")
.next()
.unwrap_or("");
for el in block.split("<div class=\"col\">").skip(1) {
if el.contains("pb-promoted-link") || !el.contains("href=\"https://pimpbunny.com/onlyfans-models/") {
continue;
}
let id = el
.split("href=\"https://pimpbunny.com/onlyfans-models/")
.nth(1)
.and_then(|s| s.split("/\"").next())
.ok_or_else(|| ErrorKind::Parse(format!("star id: {el}").into()))?
.to_string();
let title = el
.split("ui-card-title")
.nth(1)
.and_then(|s| s.split('<').next())
.ok_or_else(|| ErrorKind::Parse(format!("star title: {el}").into()))?
.to_string();
Self::push_unique(&stars, FilterOption { id, title });
}
Ok(())
}
async fn load_categories(base: &str, cats: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
let mut requester = Requester::new();
let text = requester
.get(
&format!("{base}/categories/?items_per_page=120"),
Some(Version::HTTP_2),
)
.await
.map_err(|e| Error::from(format!("{}", e)))?;
let block = text
.split("list_categories_categories_list_items")
.last()
.ok_or_else(|| ErrorKind::Parse("categories block".into()))?
.split("pb-pagination-wrapper")
.next()
.unwrap_or("");
for el in block.split("<div class=\"col\">").skip(1) {
let id = el
.split("href=\"https://pimpbunny.com/categories/")
.nth(1)
.and_then(|s| s.split("/\"").next())
.ok_or_else(|| ErrorKind::Parse(format!("category id: {el}").into()))?
.to_string();
let title = el
.split("ui-heading-h3")
.nth(1)
.and_then(|s| s.split('<').next())
.ok_or_else(|| ErrorKind::Parse(format!("category title: {el}").into()))?
.titlecase();
Self::push_unique(&cats, FilterOption { id, title });
}
Ok(())
}
async fn get(
&self,
cache: VideoCache,
page: u8,
sort: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let sort_string = match sort {
"best rated" => "&sort_by=rating",
"most viewed" => "&sort_by=video_viewed",
_ => "&sort_by=post_date",
};
let video_url = format!(
"{}/videos/{}/?videos_per_page=20{}",
self.url, page, sort_string
);
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url, Some(Version::HTTP_11)).await.unwrap();
let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone(), &mut requester)
.await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
async fn query(
&self,
cache: VideoCache,
page: u8,
query: &str,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let search_string = query.trim().to_string();
let mut video_url = format!(
"{}/search/{}/?mode=async&function=get_block&block_id=list_videos_videos_list_search_result&videos_per_page=20&from_videos={}",
self.url, search_string.replace(" ","-"), page
);
let sort_string = match options.sort.as_deref().unwrap_or("") {
"best rated" => "&sort_by=rating",
"most viewed" => "&sort_by=video_viewed",
_ => "&sort_by=post_date",
};
if let Some(star) = self
.stars
.read()
.unwrap()
.iter()
.find(|s| s.title.to_ascii_lowercase() == search_string.to_ascii_lowercase())
{
video_url = format!(
"{}/onlyfans-models/{}/{}/?videos_per_page=20{}",
self.url, star.id, page, sort_string
);
}
if let Some(cat) = self
.categories
.read()
.unwrap()
.iter()
.find(|c| c.title.to_ascii_lowercase() == search_string.to_ascii_lowercase())
{
video_url = format!(
"{}/categories/{}/{}/?videos_per_page=20{}",
self.url, cat.id, page, sort_string
);
}
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone());
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url, Some(Version::HTTP_2)).await.unwrap();
let video_items: Vec<VideoItem> = self
.get_video_items_from_html(text.clone(), &mut requester)
.await;
if !video_items.is_empty() {
cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items)
}
async fn get_video_items_from_html(
&self,
html: String,
requester: &mut Requester,
) -> Vec<VideoItem> {
if html.is_empty() || html.contains("404 Not Found") {
return vec![];
}
let block = match html
.split("-pagination-wrapper")
.next()
.and_then(|s| s.split("video_list").nth(2))
{
Some(b) => b,
None => return vec![],
};
let futures = block
.split("<div class=\"col\">")
.skip(1)
.map(|el| self.get_video_item(el.to_string(), requester.clone()));
join_all(futures)
.await
.into_iter()
.filter_map(Result::ok)
.collect()
}
async fn get_video_item(
&self,
seg: String,
mut requester: Requester,
) -> Result<VideoItem> {
let video_url = seg
.split(" href=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.ok_or_else(|| ErrorKind::Parse("video url".into()))?
.to_string();
let mut title = seg
.split("card-title")
.nth(1)
.and_then(|s| s.split('>').nth(1))
.and_then(|s| s.split('<').next())
.ok_or_else(|| ErrorKind::Parse("video title".into()))?
.trim()
.to_string();
title = decode(title.as_bytes()).to_string().unwrap_or(title).titlecase();
let id = video_url
.split('/')
.nth(4)
.and_then(|s| s.split('.').next())
.ok_or_else(|| ErrorKind::Parse("video id".into()))?
.to_string();
let thumb_block = seg
.split("card-thumbnail")
.nth(1)
.ok_or_else(|| ErrorKind::Parse("thumb block".into()))?;
let mut thumb = thumb_block
.split("src=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string();
if thumb.starts_with("data:image") {
thumb = thumb_block
.split("data-webp=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string();
}
let preview = thumb_block
.split("data-preview=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string();
let (tags, formats, views, duration) =
self.extract_media(&video_url, &mut requester).await?;
Ok(VideoItem::new(
id,
title,
video_url,
"pimpbunny".into(),
thumb,
duration,
)
.formats(formats)
.tags(tags)
.preview(preview)
.views(views))
}
async fn extract_media(
&self,
url: &str,
requester: &mut Requester,
) -> Result<(Vec<String>, Vec<VideoFormat>, u32, u32)> {
let text = requester
.get(url, Some(Version::HTTP_2))
.await
.map_err(|e| Error::from(format!("{}", e)))?;
let json_str = text
.split("application/ld+json\">")
.nth(1)
.and_then(|s| s.split("</script>").next())
.ok_or_else(|| ErrorKind::Parse("ld+json".into()))?;
let json: serde_json::Value = serde_json::from_str(json_str)?;
let video_url = json["contentUrl"].as_str().unwrap_or("").to_string();
let quality = video_url
.split('_')
.last()
.and_then(|s| s.split('.').next())
.unwrap_or("")
.to_string();
let views = json["interactionStatistic"]
.as_array()
.and_then(|a| a.first())
.and_then(|v| v["userInteractionCount"].as_str())
.and_then(|v| v.parse().ok())
.unwrap_or(0);
let duration = json["duration"]
.as_str()
.map(|d| parse_time_to_seconds(&d.replace(['P','T','H','M','S'], "")).unwrap_or(0))
.unwrap_or(0) as u32;
Ok((
vec![],
vec![VideoFormat::new(video_url, quality, "video/mp4".into())],
views,
duration,
))
}
}
#[async_trait]
impl Provider for PimpbunnyProvider {
async fn get_videos(
&self,
cache: VideoCache,
_pool: DbPool,
sort: String,
query: Option<String>,
page: String,
_per_page: String,
options: ServerOptions,
) -> Vec<VideoItem> {
let page = page.parse::<u8>().unwrap_or(1);
let res = match query {
Some(q) => self.to_owned().query(cache, page, &q, options).await,
None => self.get(cache, page, &sort, options).await,
};
res.unwrap_or_else(|e| {
eprintln!("pimpbunny error: {e}");
vec![]
})
}
fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
Some(self.build_channel(v))
}
}

View File

@@ -1,12 +1,17 @@
use crate::DbPool; use crate::DbPool;
use crate::api::ClientVersion;
use crate::providers::Provider; use crate::providers::Provider;
use crate::status::*;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::discord::send_discord_error_report;
use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem}; use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait; use async_trait::async_trait;
use cute::c;
use error_chain::error_chain; use error_chain::error_chain;
// use percent_encoding::{AsciiSet, CONTROLS, utf8_percent_encode}; use htmlentity::entity::{decode, ICodedDataTrait};
use std::sync::{Arc, RwLock};
use std::vec; use std::vec;
use std::fmt::Write;
error_chain! { error_chain! {
foreign_links { foreign_links {
@@ -15,294 +20,116 @@ error_chain! {
} }
} }
#[derive(serde::Serialize)]
struct PmvhavenRequest {
all: bool, //true,
pmv: bool, //false,
hmv: bool, //false,
hypno: bool, //false,
tiktok: bool, //false,
koreanbj: bool, //false,
other: bool, // false,
explicitContent: Option<bool>, //null,
sameSexContent: Option<bool>, //null,
transContent: Option<String>, //null
seizureWarning: Option<bool>, //null,
tags: Vec<String>, //[],
music: Vec<String>, //[],
stars: Vec<String>, //[],
creators: Vec<String>, //[],
range: Vec<u32>, //[0,40],
activeTime: String, //"All time",
activeQuality: String, //"Quality",
aspectRatio: String, //"Aspect Ratio",
activeView: String, //"Newest",
index: u32, //2,
showSubscriptionsOnly: bool, //false,
query: String, //"no",
profile: Option<String>, //null
}
impl PmvhavenRequest {
pub fn new(page: u32) -> Self {
PmvhavenRequest {
all: true,
pmv: false,
hmv: false,
hypno: false,
tiktok: false,
koreanbj: false,
other: false,
explicitContent: None,
sameSexContent: None,
transContent: None,
seizureWarning: None,
tags: vec![],
music: vec![],
stars: vec![],
creators: vec![],
range: vec![0, 40],
activeTime: "All time".to_string(),
activeQuality: "Quality".to_string(),
aspectRatio: "Aspect Ratio".to_string(),
activeView: "Newest".to_string(),
index: page,
showSubscriptionsOnly: false,
query: "no".to_string(),
profile: None,
}
}
fn hypno(&mut self) -> &mut Self {
self.all = false;
self.pmv = false;
self.hmv = false;
self.tiktok = false;
self.koreanbj = false;
self.other = false;
self.hypno = true;
self
}
fn pmv(&mut self) -> &mut Self {
self.all = false;
self.pmv = true;
self.hmv = false;
self.tiktok = false;
self.koreanbj = false;
self.other = false;
self.hypno = false;
self
}
fn hmv(&mut self) -> &mut Self {
self.all = false;
self.pmv = false;
self.hmv = true;
self.tiktok = false;
self.koreanbj = false;
self.other = false;
self.hypno = false;
self
}
fn tiktok(&mut self) -> &mut Self {
self.all = false;
self.pmv = false;
self.hmv = false;
self.tiktok = true;
self.koreanbj = false;
self.other = false;
self.hypno = false;
self
}
fn koreanbj(&mut self) -> &mut Self {
self.all = false;
self.pmv = false;
self.hmv = false;
self.tiktok = false;
self.koreanbj = true;
self.other = false;
self.hypno = false;
self
}
fn other(&mut self) -> &mut Self {
self.all = false;
self.pmv = false;
self.hmv = false;
self.tiktok = false;
self.koreanbj = false;
self.other = true;
self.hypno = false;
self
}
}
#[derive(serde::Serialize)]
struct PmvhavenSearch {
mode: String, //"DefaultMoreSearch",
data: String, //"pmv",
index: u32,
}
impl PmvhavenSearch {
fn new(search: String, page: u32) -> PmvhavenSearch {
PmvhavenSearch {
mode: "DefaultMoreSearch".to_string(),
data: search,
index: page,
}
}
}
#[derive(serde::Deserialize)]
struct PmvhavenVideo {
title: String, //JAV Addiction Therapy",
_uploader: Option<String>, //itonlygetsworse",
duration: f32, //259.093333,
_width: Option<String>, //3840",
_height: Option<String>, //2160",
_ratio: Option<u32>, //50,
thumbnails: Vec<Option<String>>, //[
// "placeholder",
// "https://storage.pmvhaven.com/686f24e96f7124f3dfbe90ab/thumbnail/JAV Addiction Therapy_686f24e96f7124f3dfbe90ab.png",
// "https://storage.pmvhaven.com/686f24e96f7124f3dfbe90ab/thumbnail/webp320_686f24e96f7124f3dfbe90ab.webp"
// ],
views: u32, //1971,
_url: Option<String>, //https://storage.pmvhaven.com/686f24e96f7124f3dfbe90ab/JAV Addiction Therapy_686f24e96f7124f3dfbe90ab.mp4",
previewUrlCompressed: Option<String>, //https://storage.pmvhaven.com/686f24e96f7124f3dfbe90ab/videoPreview/comus_686f24e96f7124f3dfbe90ab.mp4",
_seizureWarning: Option<bool>, //false,
_isoDate: Option<String>, //2025-07-10T02:52:26.000Z",
_gayContent: Option<bool>, //false,
_transContent: Option<bool>, //false,
creator: Option<String>, //itonlygetsworse",
_id: String, //686f2aeade2062f93d72931f",
_totalRaters: Option<u32>, //42,
_rating: Option<u32>, //164
}
impl PmvhavenVideo {
fn to_videoitem(self) -> VideoItem {
// let encoded_title = percent_encode_emojis(&self.title);
let thumbnail = self.thumbnails[self.thumbnails.len() - 1]
.clone()
.unwrap_or("".to_string());
// let video_id = thumbnail.split("_").collect::<Vec<&str>>().last().unwrap_or(&"").to_string().split('.').next().unwrap_or("").to_string();
let mut item = VideoItem::new(
self._id.clone(),
self.title.clone(),
format!(
"https://pmvhaven.com/video/{}_{}",
self.title.replace(" ", "-"),
self._id
),
"pmvhaven".to_string(),
thumbnail,
self.duration as u32,
)
.views(self.views);
item = match self.creator {
Some(c) => item.uploader(c),
_ => item,
};
item = match self.previewUrlCompressed {
Some(u) => item.preview(u),
_ => item,
};
return item;
}
}
#[derive(serde::Deserialize)]
struct PmvhavenResponse {
data: Vec<PmvhavenVideo>,
_count: Option<u32>,
}
impl PmvhavenResponse {
fn to_videoitems(self) -> Vec<VideoItem> {
return c![video.to_videoitem(), for video in self.data];
}
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct PmvhavenProvider { pub struct PmvhavenProvider {
url: String, url: String,
stars: Arc<RwLock<Vec<String>>>,
categories: Arc<RwLock<Vec<String>>>,
} }
impl PmvhavenProvider { impl PmvhavenProvider {
pub fn new() -> Self { pub fn new() -> Self {
PmvhavenProvider { Self {
url: "https://pmvhaven.com".to_string(), url: "https://pmvhaven.com".to_string(),
stars: Arc::new(RwLock::new(vec![])),
categories: Arc::new(RwLock::new(vec![])),
} }
} }
async fn get(
&self,
cache: VideoCache,
page: u8,
sort: String,
options: ServerOptions,
) -> Result<Vec<VideoItem>> {
let category = options.category.unwrap_or("".to_string());
let index = format!("pmvhaven:{}:{}", page, category);
let url = format!("{}/api/getmorevideos", self.url);
let mut request = PmvhavenRequest::new(page as u32);
request.activeView = sort;
request = match category.as_str() {
"hypno" => {
request.hypno();
request
}
"pmv" => {
request.pmv();
request
}
"hmv" => {
request.hmv();
request
}
"tiktok" => {
request.tiktok();
request
}
"koreanbj" => {
request.koreanbj();
request
}
"other" => {
request.other();
request
}
_ => request,
};
let old_items = match cache.get(&index) { fn build_channel(&self, clientversion: ClientVersion) -> Channel {
Some((time, items)) => { let _ = clientversion;
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", url);
return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
}
};
let mut requester = options.requester.clone().unwrap(); let categories = self
let response = requester.post(&url, &request, vec![("Content-Type".to_string(),"text/plain;charset=UTF-8".to_string())]).await.unwrap(); .categories
let videos = match response.json::<PmvhavenResponse>().await { .read()
Ok(resp) => resp, .map(|g| g.clone())
Err(e) => { .unwrap_or_default();
println!("Failed to parse PmvhavenResponse: {}", e);
return Ok(old_items); Channel {
id: "pmvhaven".to_string(),
name: "PMVHaven".to_string(),
description: "Best PMV Videos".to_string(),
premium: false,
favicon: "https://www.google.com/s2/favicons?sz=64&domain=pmvhaven.com".to_string(),
status: "active".to_string(),
categories,
options: vec![
ChannelOption {
id: "sort".into(),
title: "Sort".into(),
description: "Sort the Videos".into(),
systemImage: "list.number".into(),
colorName: "blue".into(),
options: vec![
FilterOption {
id: "relevance".into(),
title: "Relevance".into(),
},
FilterOption {
id: "newest".into(),
title: "Newest".into(),
},
FilterOption {
id: "oldest".into(),
title: "Oldest".into(),
},
FilterOption {
id: "most viewed".into(),
title: "Most Viewed".into(),
},
FilterOption {
id: "most liked".into(),
title: "Most Liked".into(),
},
FilterOption {
id: "most disliked".into(),
title: "Most Disliked".into(),
},
],
multiSelect: false,
},
ChannelOption {
id: "duration".into(),
title: "Duration".into(),
description: "Length of the Videos".into(),
systemImage: "timer".into(),
colorName: "green".into(),
options: vec![
FilterOption {
id: "any".into(),
title: "Any".into(),
},
FilterOption {
id: "<4 min".into(),
title: "<4 min".into(),
},
FilterOption {
id: "4-20 min".into(),
title: "4-20 min".into(),
},
FilterOption {
id: "20-60 min".into(),
title: "20-60 min".into(),
},
FilterOption {
id: ">1 hour".into(),
title: ">1 hour".into(),
},
],
multiSelect: false,
},
],
nsfw: true,
cacheDuration: None,
}
}
fn push_unique(target: &Arc<RwLock<Vec<String>>>, item: String) {
if let Ok(mut vec) = target.write() {
if !vec.iter().any(|x| x == &item) {
vec.push(item);
} }
};
let video_items: Vec<VideoItem> = videos.to_videoitems();
if !video_items.is_empty() {
cache.remove(&url);
cache.insert(url.clone(), video_items.clone());
} else {
return Ok(old_items);
} }
return Ok(video_items);
} }
async fn query( async fn query(
@@ -312,41 +139,123 @@ impl PmvhavenProvider {
query: &str, query: &str,
options: ServerOptions, options: ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
let index = format!("pmvhaven:{}:{}", query, page); let search = query.trim().to_string();
let url = format!("{}/api/v2/search", self.url);
let request = PmvhavenSearch::new(query.to_string(), page as u32); let sort = match options.sort.as_deref() {
// Check our Video Cache. If the result is younger than 1 hour, we return it. Some("newest") => "&sort=-uploadDate",
let old_items = match cache.get(&index) { Some("oldest") => "&sort=uploadDate",
Some((time, items)) => { Some("most viewed") => "&sort=-views",
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { Some("most liked") => "&sort=-likes",
return Ok(items.clone()); Some("most disliked") => "&sort=-dislikes",
} else { _ => "",
let _ = cache.check().await;
return Ok(items.clone());
}
}
None => {
vec![]
}
}; };
let mut requester = options.requester.clone().unwrap(); let duration = match options.duration.as_deref() {
let response = requester.post(&url, &request, vec![("Content-Type".to_string(),"text/plain;charset=UTF-8".to_string())]).await.unwrap(); Some("<4 min") => "&durationMax=240",
let videos = match response.json::<PmvhavenResponse>().await { Some("4-20 min") => "&durationMin=240&durationMax=1200",
Ok(resp) => resp, Some("20-60 min") => "&durationMin=1200&durationMax=3600",
Err(e) => { Some(">1 hour") => "&durationMin=3600",
println!("Failed to parse PmvhavenResponse: {}", e); _ => "",
return Ok(old_items);
}
}; };
let video_items: Vec<VideoItem> = videos.to_videoitems();
if !video_items.is_empty() { let endpoint = if search.is_empty() {
cache.remove(&url); "api/videos"
cache.insert(url.clone(), video_items.clone());
} else { } else {
return Ok(old_items); "api/videos/search"
};
let mut url = format!(
"{}/{endpoint}?limit=100&page={page}{duration}{sort}",
self.url
);
if let Ok(stars) = self.stars.read() {
if let Some(star) = stars.iter().find(|s| s.eq_ignore_ascii_case(&search)) {
url.push_str(&format!("&stars={star}"));
} }
return Ok(video_items); }
if let Ok(cats) = self.categories.read() {
if let Some(cat) = cats.iter().find(|c| c.eq_ignore_ascii_case(&search)) {
url.push_str(&format!("&tagMode=OR&tags={cat}&expandTags=false"));
}
}
if !search.is_empty() {
url.push_str(&format!("&q={search}"));
}
if let Some((time, items)) = cache.get(&url) {
if time.elapsed().unwrap_or_default().as_secs() < 300 {
return Ok(items.clone());
}
}
let mut requester = match options.requester {
Some(r) => r,
None => return Ok(vec![]),
};
let text = requester.get(&url, None).await.unwrap_or_default();
let json = serde_json::from_str(&text).unwrap_or(serde_json::Value::Null);
let items = self.get_video_items_from_json(json).await;
if !items.is_empty() {
cache.remove(&url);
cache.insert(url, items.clone());
}
Ok(items)
}
async fn get_video_items_from_json(&self, json: serde_json::Value) -> Vec<VideoItem> {
let mut items = vec![];
if !json.get("success").and_then(|v| v.as_bool()).unwrap_or(false) {
return items;
}
let videos = json.get("data").and_then(|v| v.as_array()).cloned().unwrap_or_default();
for video in videos {
let title = decode(video.get("title").and_then(|v| v.as_str()).unwrap_or("").as_bytes())
.to_string()
.unwrap_or_default();
let id = video
.get("_id")
.and_then(|v| v.as_str())
.unwrap_or(&title)
.to_string();
let video_url = video.get("videoUrl").and_then(|v| v.as_str()).unwrap_or("").to_string();
let thumb = video.get("thumbnailUrl").and_then(|v| v.as_str()).unwrap_or("").to_string();
let preview = video.get("previewUrl").and_then(|v| v.as_str()).unwrap_or("").to_string();
let views = video.get("views").and_then(|v| v.as_u64()).unwrap_or(0);
let duration = parse_time_to_seconds(video.get("duration").and_then(|v| v.as_str()).unwrap_or("0")).unwrap_or(0);
let tags = video.get("tags").and_then(|v| v.as_array()).cloned().unwrap_or_default();
let stars = video.get("starsTags").and_then(|v| v.as_array()).cloned().unwrap_or_default();
for t in tags.iter() {
if let Some(s) = t.as_str() {
let decoded = decode(s.as_bytes()).to_string().unwrap_or_default();
Self::push_unique(&self.categories, decoded.clone());
}
}
for t in stars.iter() {
if let Some(s) = t.as_str() {
let decoded = decode(s.as_bytes()).to_string().unwrap_or_default();
Self::push_unique(&self.stars, decoded.clone());
}
}
items.push(
VideoItem::new(id, title, video_url.replace(' ', "%20"), "pmvhaven".into(), thumb, duration as u32)
.views(views as u32)
.preview(preview)
);
}
items
} }
} }
@@ -355,36 +264,39 @@ impl Provider for PmvhavenProvider {
async fn get_videos( async fn get_videos(
&self, &self,
cache: VideoCache, cache: VideoCache,
pool: DbPool, _pool: DbPool,
sort: String, _sort: String,
query: Option<String>, query: Option<String>,
page: String, page: String,
per_page: String, _per_page: String,
options: ServerOptions, options: ServerOptions,
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
let _ = per_page; let page = page.parse::<u8>().unwrap_or(1);
let _ = pool; // Ignored in this implementation let query = query.unwrap_or_default();
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
Some(q) => { match self.query(cache, page, &query, options).await {
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, options)
.await
}
None => {
self.get(
cache,
page.parse::<u8>().unwrap_or(1),
sort,
options,
)
.await
}
};
match videos {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
println!("Error fetching videos: {}", e); eprintln!("pmvhaven error: {e}");
let mut chain_str = String::new();
for (i, cause) in e.iter().enumerate() {
let _ = writeln!(chain_str, "{}. {}", i + 1, cause);
}
send_discord_error_report(
e.to_string(),
Some(chain_str),
Some("PMVHaven Provider"),
Some("Failed to load videos from PMVHaven"),
file!(),
line!(),
module_path!(),
).await;
vec![] vec![]
} }
} }
} }
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
Some(self.build_channel(clientversion))
}
} }

View File

@@ -49,7 +49,6 @@ impl Porn00Provider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -62,7 +61,7 @@ impl Porn00Provider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -99,7 +98,7 @@ impl Porn00Provider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);

View File

@@ -42,7 +42,6 @@ impl PornhatProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -53,7 +52,7 @@ impl PornhatProvider {
} }
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -92,7 +91,7 @@ impl PornhatProvider {
} }
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);

View File

@@ -4,61 +4,74 @@ use crate::providers::Provider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem}; use crate::videos::{ServerOptions, VideoItem};
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{decode, ICodedDataTrait};
use std::vec;
use async_trait::async_trait; use async_trait::async_trait;
use std::vec;
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
HttpRequest(wreq::Error); HttpRequest(wreq::Error);
} }
errors {
Parse(msg: String) {
description("parse error")
display("parse error: {}", msg)
}
}
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct PornhubProvider { pub struct PornhubProvider {
url: String, url: String,
} }
impl PornhubProvider { impl PornhubProvider {
pub fn new() -> Self { pub fn new() -> Self {
PornhubProvider { Self {
url: "https://www.pornhub.com".to_string(), url: "https://www.pornhub.com".to_string(),
} }
} }
async fn get( async fn get(
&self, &self,
cache: VideoCache, cache: VideoCache,
page: u8, page: u8,
sort: &str, sort: &str,
options:ServerOptions options: ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
let video_url = format!("{}/video?o={}&page={}", self.url, sort, page); let video_url = format!("{}/video?o={}&page={}", self.url, sort, page);
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else {
items.clone()
}
}
None => {
vec![]
} }
Some((_, items)) => items.clone(),
None => vec![],
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = match options.requester.clone() {
let text = requester.get(&video_url).await.unwrap(); Some(r) => r,
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(),"<ul id=\"video"); None => return Ok(old_items),
if !video_items.is_empty() { };
let text = match requester.get(&video_url, None).await {
Ok(t) => t,
Err(_) => return Ok(old_items),
};
let video_items = self.get_video_items_from_html(text, "<ul id=\"video");
if video_items.is_empty() {
Ok(old_items)
} else {
cache.remove(&video_url); cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone()); cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items) Ok(video_items)
} }
}
async fn query( async fn query(
&self, &self,
@@ -66,177 +79,179 @@ impl PornhubProvider {
page: u8, page: u8,
query: &str, query: &str,
sort: &str, sort: &str,
options:ServerOptions options: ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
let mut split_string = "<ul id=\"video"; let mut split_string = "<ul id=\"video";
let search_string = query.to_lowercase().trim().replace(" ", "+"); let search_string = query.to_lowercase().trim().replace(' ', "+");
let mut video_url = format!("{}/video/search?search={}&page={}", self.url, search_string, page);
if query.starts_with("@"){ let mut video_url =
let url_parts = query[1..].split(":").collect::<Vec<&str>>(); format!("{}/video/search?search={}&page={}", self.url, search_string, page);
video_url = [self.url.to_string(), url_parts[0].to_string(), url_parts[1].replace(" ", "-").to_string(), "videos?page=".to_string()].join("/");
video_url += &page.to_string(); if query.starts_with('@') {
if query.contains("@model") || query.contains("@pornstar"){ let mut parts = query[1..].split(':');
let a = parts.next().unwrap_or("");
let b = parts.next().unwrap_or("");
video_url = format!("{}/{}/{}/videos?page={}", self.url, a, b.replace(' ', "-"), page);
if query.contains("@model") || query.contains("@pornstar") {
split_string = "mostRecentVideosSection"; split_string = "mostRecentVideosSection";
} }
if query.contains("@channels"){ if query.contains("@channels") {
split_string = "<ul class=\"videos row-5-thumbs"; split_string = "<ul class=\"videos row-5-thumbs";
} }
} }
if query.contains("@channels"){ video_url.push_str(match (query.contains("@channels"), sort) {
video_url += match sort { (true, "mv") => "&o=vi",
"mr" => "", (true, "tr") => "&o=ra",
"mv" => "&o=vi", (false, "mv") => "&o=mv",
"tr" => "&o=ra", (false, "tr") => "&o=tr",
(false, "lg") => "&o=lg",
_ => "", _ => "",
} });
} else{
video_url += match sort {
"mr" => "",
"mv" => "&o=mv",
"tr" => "&o=tr",
"lg" => "&o=lg",
_ => "&o=mv",
}
}
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone()); return Ok(items.clone());
} }
} Some((_, items)) => items.clone(),
None => { None => vec![],
vec![]
}
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = match options.requester.clone() {
let text = requester.get(&video_url).await.unwrap(); Some(r) => r,
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(),split_string); None => return Ok(old_items),
if !video_items.is_empty() { };
let text = match requester.get(&video_url, None).await {
Ok(t) => t,
Err(_) => return Ok(old_items),
};
let video_items = self.get_video_items_from_html(text, split_string);
if video_items.is_empty() {
Ok(old_items)
} else {
cache.remove(&video_url); cache.remove(&video_url);
cache.insert(video_url.clone(), video_items.clone()); cache.insert(video_url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items) Ok(video_items)
}
} }
fn get_video_items_from_html(&self, html: String, split_string: &str) -> Vec<VideoItem> { fn get_video_items_from_html(&self, html: String, split_string: &str) -> Vec<VideoItem> {
if html.is_empty() { let content = match html.split(split_string).nth(1) {
println!("HTML is empty"); Some(c) => c,
return vec![]; None => return vec![],
}
let mut items: Vec<VideoItem> = Vec::new();
let video_listing_content = html.split(split_string).collect::<Vec<&str>>()[1].split("Porn in German").collect::<Vec<&str>>()[0];
let raw_videos = video_listing_content
.split("class=\"pcVideoListItem ")
.collect::<Vec<&str>>()[1..]
.to_vec();
for video_segment in &raw_videos {
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
// for (index, line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line);
// }
if video_segment.contains("wrapVideoBlock"){
continue; // Skip if the segment is a wrapVideoBlock
}
let video_url: String;
if !video_segment.contains("<a href=\"") {
let url_part = video_segment.split("data-video-vkey=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0];
video_url = format!("{}{}", self.url, url_part);
}
else{
let url_part = video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0];
if url_part.is_empty() || url_part == "javascript:void(0)" {
continue;
}
video_url = format!("{}{}", self.url, url_part);
}
if video_url.starts_with("https://www.pornhub.comjavascript:void(0)") {
continue;
}
let mut title = video_segment.split("\" title=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
// html decode
title = decode(title.as_bytes()).to_string().unwrap_or(title);
let id = video_segment.split("data-video-id=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let raw_duration = video_segment.split("duration").collect::<Vec<&str>>()[1].split(">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let view_part = match video_segment.split("iews\">").collect::<Vec<&str>>().len(){
2 => video_segment.split("iews\">").collect::<Vec<&str>>()[1],
3 => video_segment.split("iews\">").collect::<Vec<&str>>()[2],
_ => "<var>0<", // Skip if the format is unexpected
}; };
let views = parse_abbreviated_number(view_part
.split("<var>").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]).unwrap_or(0);
let thumb = video_segment.split("src=\"").collect::<Vec<&str>>()[1] let content = content.split("Porn in German").next().unwrap_or("");
.split("\"")
.collect::<Vec<&str>>()[0] let mut items = Vec::new();
for seg in content
.split("class=\"pcVideoListItem ")
.skip(1)
.filter(|s| !s.contains("wrapVideoBlock"))
{
let url_part = seg
.split("<a href=\"")
.nth(1)
.or_else(|| seg.split("data-video-vkey=\"").nth(1))
.and_then(|s| s.split('"').next());
let video_url = match url_part {
Some(u) if !u.is_empty() && u != "javascript:void(0)" => format!("{}{}", self.url, u),
_ => continue,
};
let mut title = seg
.split("\" title=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string(); .to_string();
let uploaderBlock; title = decode(title.as_bytes()).to_string().unwrap_or(title);
let uploader_href;
let mut tag = String::new();
if video_segment.contains("videoUploaderBlock") {
uploaderBlock = video_segment.split("videoUploaderBlock").collect::<Vec<&str>>()[1] let id = match seg
.split("data-video-id=\"")
.nth(1)
.and_then(|s| s.split('"').next())
{
Some(id) => id.to_string(),
None => continue,
};
let raw_duration = seg
.split("duration")
.nth(1)
.and_then(|s| s.split('>').nth(1))
.and_then(|s| s.split('<').next())
.unwrap_or("0:00");
let duration = parse_time_to_seconds(raw_duration).unwrap_or(0) as u32;
let views = seg
.split("iews\">")
.filter_map(|p| p.split("<var>").nth(1))
.next()
.and_then(|v| v.split('<').next())
.and_then(|v| parse_abbreviated_number(v))
.unwrap_or(0);
let thumb = seg
.split("src=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string(); .to_string();
uploader_href = uploaderBlock.split("href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.split("/").collect::<Vec<&str>>();
tag = format!("@{}:{}", uploader_href[1], uploader_href[2].replace("-", " "));
let (tag, uploader) = if seg.contains("videoUploaderBlock") {
let href = seg
.split("videoUploaderBlock")
.nth(1)
.and_then(|s| s.split("href=\"").nth(1))
.and_then(|s| s.split('"').next())
.unwrap_or("");
let parts: Vec<&str> = href.split('/').collect();
if parts.len() >= 3 {
(
Some(format!("@{}:{}", parts[1], parts[2].replace('-', " "))),
Some(parts[2].to_string()),
)
} else {
(None, None)
} }
else{ } else {
uploader_href = vec![]; (None, None)
} };
let mut item = VideoItem::new(
let mut video_item = VideoItem::new(
id, id,
title, title,
video_url.to_string(), video_url,
"pornhub".to_string(), "pornhub".into(),
thumb, thumb,
duration, duration,
) );
;
if views > 0 { if views > 0 {
video_item = video_item.views(views); item = item.views(views);
} }
if !tag.is_empty() { if let Some(t) = tag {
video_item = video_item.tags(vec![tag]) item = item.tags(vec![t]);
.uploader(uploader_href[2].to_string());
} }
items.push(video_item); if let Some(u) = uploader {
} item = item.uploader(u);
return items;
} }
items.push(item);
}
items
}
} }
#[async_trait] #[async_trait]
@@ -251,29 +266,29 @@ impl Provider for PornhubProvider {
per_page: String, per_page: String,
options: ServerOptions, options: ServerOptions,
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
let _ = options; let _ = pool;
let _ = per_page; let _ = per_page;
let _ = pool; // Ignored in this implementation
let mut sort = sort.to_lowercase(); let page = page.parse::<u8>().unwrap_or(1);
if sort.contains("date"){ let mut sort = match sort.as_str() {
sort = "mr".to_string(); "mv" => "mv",
} "tr" => "tr",
let videos: std::result::Result<Vec<VideoItem>, Error> = match query { "cm" => "cm",
Some(q) => { "lg" => "lg",
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, &sort, options) _ => "mr",
.await
}
None => {
self.get(cache, page.parse::<u8>().unwrap_or(1), &sort, options)
.await
}
}; };
match videos { if sort.contains("date") {
Ok(v) => v, sort = "mr".into();
Err(e) => { }
println!("Error fetching videos: {}", e);
let res = match query {
Some(q) => self.query(cache, page, &q, &sort, options).await,
None => self.get(cache, page, &sort, options).await,
};
res.unwrap_or_else(|e| {
eprintln!("PornhubProvider error: {e}");
vec![] vec![]
} })
}
} }
} }

View File

@@ -84,7 +84,6 @@ impl PornxpProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -96,7 +95,7 @@ impl PornxpProvider {
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -142,7 +141,7 @@ impl PornxpProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -266,7 +265,7 @@ impl Provider for PornxpProvider {
} }
} }
} }
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel { fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
self.build_channel(clientversion) Some(self.build_channel(clientversion))
} }
} }

View File

@@ -63,8 +63,7 @@ impl PornzogProvider {
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
println!("Fetching URL: {}", video_url); let text = requester.get(&video_url, None).await.unwrap();
let text = requester.get(&video_url).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -77,7 +76,6 @@ impl PornzogProvider {
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> { fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.is_empty() { if html.is_empty() {
println!("HTML is empty");
return vec![]; return vec![];
} }
let mut items: Vec<VideoItem> = Vec::new(); let mut items: Vec<VideoItem> = Vec::new();

View File

@@ -39,7 +39,6 @@ impl RedtubeProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -50,7 +49,7 @@ impl RedtubeProvider {
} }
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -87,7 +86,7 @@ impl RedtubeProvider {
} }
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html_query(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html_query(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);

View File

@@ -103,7 +103,7 @@ fn build_channel(&self, clientversion: ClientVersion) -> Channel {
old_items = match cache.get(&index) { old_items = match cache.get(&index) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", url); // println!("Cache hit for URL: {}", url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -115,7 +115,7 @@ fn build_channel(&self, clientversion: ClientVersion) -> Channel {
}; };
} }
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&url).await.unwrap(); let text = requester.get(&url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url); cache.remove(&url);
@@ -163,7 +163,7 @@ fn build_channel(&self, clientversion: ClientVersion) -> Channel {
} }
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&url).await.unwrap(); let text = requester.get(&url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url); cache.remove(&url);
@@ -282,7 +282,7 @@ impl Provider for Rule34genProvider {
} }
} }
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel { fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
self.build_channel(clientversion) Some(self.build_channel(clientversion))
} }
} }

View File

@@ -1,203 +1,241 @@
use crate::util::parse_abbreviated_number;
use crate::DbPool; use crate::DbPool;
use crate::providers::Provider; use crate::providers::Provider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::discord::send_discord_error_report;
use crate::util::parse_abbreviated_number;
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::videos::{ServerOptions, VideoItem}; use crate::videos::{ServerOptions, VideoItem};
use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use std::vec;
use std::time::{SystemTime, UNIX_EPOCH}; use std::time::{SystemTime, UNIX_EPOCH};
use async_trait::async_trait; use std::vec;
error_chain! { error_chain! {
foreign_links { foreign_links {
Io(std::io::Error); Io(std::io::Error);
HttpRequest(wreq::Error); HttpRequest(wreq::Error);
} }
errors {
ParsingError(t: String) {
description("html parsing error")
display("HTML parsing error: '{}'", t)
}
}
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Rule34videoProvider { pub struct Rule34videoProvider {
url: String, url: String,
} }
impl Rule34videoProvider { impl Rule34videoProvider {
pub fn new() -> Self { pub fn new() -> Self {
Rule34videoProvider { Rule34videoProvider {
url: "https://rule34video.com".to_string(), url: "https://rule34video.com".to_string(),
} }
} }
/// Helper to safely extract a string between two delimiters
fn extract_between<'a>(content: &'a str, start_pat: &str, end_pat: &str) -> Option<&'a str> {
let start_idx = content.find(start_pat)? + start_pat.len();
let sub = &content[start_idx..];
let end_idx = sub.find(end_pat)?;
Some(&sub[..end_idx])
}
async fn get( async fn get(
&self, &self,
cache: VideoCache, cache: VideoCache,
page: u8, page: u8,
sort: &str, sort: &str,
options: ServerOptions options: ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
let now = SystemTime::now() let timestamp_millis = SystemTime::now()
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
.expect("Time went backwards"); .map(|d| d.as_millis())
.unwrap_or(0);
let timestamp_millis = now.as_millis(); // u128 let expected_sorts = vec![
let expected_sorts = vec!["post_date", "video_viewed", "rating", "duration", "pseudo_random"]; "post_date",
let sort = if expected_sorts.contains(&sort) { "video_viewed",
"rating",
"duration",
"pseudo_random",
];
let sort_val = if expected_sorts.contains(&sort) {
sort sort
} else { } else {
"post_date" "post_date"
}; };
let index = format!("rule34video:{}:{}", page, sort); let index = format!("rule34video:{}:{}", page, sort_val);
let url = format!("{}/?mode=async&function=get_block&block_id=custom_list_videos_most_recent_videos&tag_ids=&sort_by={}&from={}&_={}", self.url, sort, page, timestamp_millis); if sort_val != "pseudo_random" {
if let Some((time, items)) = cache.get(&index) {
let mut old_items: Vec<VideoItem> = vec![]; if time.elapsed().unwrap_or_default().as_secs() < 300 {
if !(sort == "pseudo_random") {
old_items = match cache.get(&index) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", url);
return Ok(items.clone()); return Ok(items.clone());
} else {
items.clone()
} }
} }
None => {
vec![]
} }
};
} let mut requester = options.requester.clone().ok_or("Requester missing")?;
let mut requester = options.requester.clone().unwrap(); let url = format!(
let text = requester.get(&url).await.unwrap(); "{}/?mode=async&function=get_block&block_id=custom_list_videos_most_recent_videos&tag_ids=&sort_by={}&from={}&_={}",
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); self.url, sort_val, page, timestamp_millis
);
let text = requester.get(&url, None).await.unwrap_or_else(|e| {
eprintln!("Error fetching rule34video URL {}: {}", url, e);
let _ = send_discord_error_report(e.to_string(), None, Some(&url), None, file!(), line!(), module_path!());
"".to_string()
});
let video_items = self.get_video_items_from_html(text);
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url); cache.insert(index, video_items.clone());
cache.insert(url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items) Ok(video_items)
} else {
// Return empty or old items if available
Ok(cache
.get(&index)
.map(|(_, items)| items)
.unwrap_or_default())
} }
}
async fn query( async fn query(
&self, &self,
cache: VideoCache, cache: VideoCache,
page: u8, page: u8,
query: &str, query: &str,
sort: &str, sort: &str,
options: ServerOptions options: ServerOptions,
) -> Result<Vec<VideoItem>> { ) -> Result<Vec<VideoItem>> {
let now = SystemTime::now() let timestamp_millis = SystemTime::now()
.duration_since(UNIX_EPOCH) .duration_since(UNIX_EPOCH)
.expect("Time went backwards"); .map(|d| d.as_millis())
let timestamp_millis = now.as_millis(); // u128 .unwrap_or(0);
let expected_sorts = vec!["post_date", "video_viewed", "rating", "duration", "pseudo_random"];
let sort = if expected_sorts.contains(&sort) { let expected_sorts = vec![
"post_date",
"video_viewed",
"rating",
"duration",
"pseudo_random",
];
let sort_val = if expected_sorts.contains(&sort) {
sort sort
} else { } else {
"post_date" "post_date"
}; };
let index = format!("rule34video:{}:{}:{}", page, sort, query); let index = format!("rule34video:{}:{}:{}", page, sort_val, query);
let url = format!("{}/search/{}/?mode=async&function=get_block&block_id=custom_list_videos_videos_list_search&tag_ids=&sort_by={}&from_videos={}&from_albums={}&_={}", self.url, query.replace(" ","-"), sort, page, page, timestamp_millis); if let Some((time, items)) = cache.get(&index) {
if time.elapsed().unwrap_or_default().as_secs() < 300 {
// Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&index) {
Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
return Ok(items.clone());
} else {
let _ = cache.check().await;
return Ok(items.clone()); return Ok(items.clone());
} }
} }
None => {
vec![] let mut requester = options.requester.clone().ok_or("Requester missing")?;
} let url = format!(
}; "{}/search/{}/?mode=async&function=get_block&block_id=custom_list_videos_videos_list_search&tag_ids=&sort_by={}&from_videos={}&from_albums={}&_={}",
let mut requester = options.requester.clone().unwrap(); self.url,
let text = requester.get(&url).await.unwrap(); query.replace(" ", "-"),
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); sort_val,
page,
page,
timestamp_millis
);
let text = requester.get(&url, None).await.unwrap_or_else(|e| {
eprintln!("Error fetching rule34video URL {}: {}", url, e);
let _ = send_discord_error_report(e.to_string(), None, Some(&url), None, file!(), line!(), module_path!());
"".to_string()
});
let video_items = self.get_video_items_from_html(text);
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url); cache.insert(index, video_items.clone());
cache.insert(url.clone(), video_items.clone());
} else {
return Ok(old_items);
}
Ok(video_items) Ok(video_items)
} else {
Ok(cache
.get(&index)
.map(|(_, items)| items)
.unwrap_or_default())
}
} }
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> { fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
if html.is_empty() { if html.is_empty() {
println!("HTML is empty");
return vec![]; return vec![];
} }
let mut items: Vec<VideoItem> = Vec::new();
let video_listing_content = html.split("<div class=\"thumbs clearfix\" id=\"custom_list_videos").collect::<Vec<&str>>()[1].split("<div class=\"pagination\"").collect::<Vec<&str>>()[0].to_string();
let raw_videos = video_listing_content
.split("<div class=\"item thumb video_")
.collect::<Vec<&str>>()[1..]
.to_vec();
for video_segment in &raw_videos {
// let vid = video_segment.split("\n").collect::<Vec<&str>>()[1]
// for (index, line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line);
// }
if video_segment.contains("https://rule34video.com/images/advertisements"){ // Safely isolate the video listing section
let video_listing = match Self::extract_between(
&html,
"id=\"custom_list_videos",
"<div class=\"pagination\"",
) {
Some(content) => content,
None => return vec![],
};
let mut items = Vec::new();
// Skip the first split result as it's the preamble
let raw_videos = video_listing
.split("<div class=\"item thumb video_")
.skip(1);
for video_segment in raw_videos {
if video_segment.contains("title=\"Advertisement\"") {
continue; continue;
} }
let mut title = video_segment.split("<div class=\"thumb_title\">").collect::<Vec<&str>>()[1] // Title extraction
.split("<") let title_raw =
.collect::<Vec<&str>>()[0] Self::extract_between(video_segment, "<div class=\"thumb_title\">", "<")
.to_string(); .unwrap_or("Unknown");
// html decode let title = decode(title_raw.as_bytes())
title = decode(title.as_bytes()).to_string().unwrap_or(title); .to_string()
let id = video_segment.split("https://rule34video.com/video/").collect::<Vec<&str>>()[1].split("/").collect::<Vec<&str>>()[0].to_string(); .unwrap_or_else(|_| title_raw.to_string());
let raw_duration = video_segment.split("<div class=\"time\">").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]
.to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
let views = parse_abbreviated_number(&video_segment
.split("<div class=\"views\">").collect::<Vec<&str>>()[1].split("</svg>").collect::<Vec<&str>>()[1]
.split("<")
.collect::<Vec<&str>>()[0]).unwrap_or(0);
//https://rule34video.com/get_file/47/5e71602b7642f9b997f90c979a368c99b8aad90d89/3942000/3942353/3942353_preview.mp4/
//https://rule34video.com/get_file/47/5e71602b7642f9b997f90c979a368c99b8aad90d89/3942000/3942353/3942353_preview.mp4/
let thumb = video_segment.split("<img class=\"thumb lazy-load\" src=\"").collect::<Vec<&str>>()[1].split("data-original=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
let url = video_segment.split("<a class=\"th js-open-popup\" href=\"").collect::<Vec<&str>>()[1]
.split("\"")
.collect::<Vec<&str>>()[0]
.to_string();
// let preview = video_segment.split("<div class=\"img wrap_image\" data-preview=\"").collect::<Vec<&str>>()[1]
// .split("\"")
// .collect::<Vec<&str>>()[0]
// .to_string();
// ID extraction
let id = Self::extract_between(video_segment, "https://rule34video.com/video/", "/")
.unwrap_or("0")
.to_string();
let video_item = VideoItem::new( // Duration extraction
id, let raw_duration =
title, Self::extract_between(video_segment, "<div class=\"time\">", "<").unwrap_or("0:00");
url.to_string(), let duration = parse_time_to_seconds(raw_duration).unwrap_or(0) as u32;
"Rule34video".to_string(),
thumb,
duration,
)
.views(views)
// .preview(preview)
;
// Views extraction
let views_segment = Self::extract_between(video_segment, "<div class=\"views\">", "<");
let views_count_str = views_segment
.and_then(|s| s.split("</svg>").nth(1))
.unwrap_or("0");
let views = parse_abbreviated_number(views_count_str.trim()).unwrap_or(0);
items.push(video_item); // Thumbnail extraction
let thumb = Self::extract_between(video_segment, "data-original=\"", "\"")
.unwrap_or("")
.to_string();
// URL extraction
let url =
Self::extract_between(video_segment, "<a class=\"th js-open-popup\" href=\"", "\"")
.unwrap_or("")
.to_string();
items.push(
VideoItem::new(id, title, url, "Rule34video".to_string(), thumb, duration)
.views(views),
);
} }
return items; items
} }
} }
#[async_trait] #[async_trait]
@@ -205,30 +243,24 @@ impl Provider for Rule34videoProvider {
async fn get_videos( async fn get_videos(
&self, &self,
cache: VideoCache, cache: VideoCache,
pool: DbPool, _pool: DbPool,
sort: String, sort: String,
query: Option<String>, query: Option<String>,
page: String, page: String,
per_page: String, _per_page: String,
options: ServerOptions, options: ServerOptions,
) -> Vec<VideoItem> { ) -> Vec<VideoItem> {
let _ = options; let page_num = page.parse::<u8>().unwrap_or(1);
let _ = per_page;
let _ = pool; // Ignored in this implementation let result = match query {
let videos: std::result::Result<Vec<VideoItem>, Error> = match query { Some(q) => self.query(cache, page_num, &q, &sort, options).await,
Some(q) => { None => self.get(cache, page_num, &sort, options).await,
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, &sort, options)
.await
}
None => {
self.get(cache, page.parse::<u8>().unwrap_or(1), &sort, options)
.await
}
}; };
match videos {
match result {
Ok(v) => v, Ok(v) => v,
Err(e) => { Err(e) => {
println!("Error fetching videos: {}", e); eprintln!("Error fetching videos: {}", e);
vec![] vec![]
} }
} }

View File

@@ -1,15 +1,17 @@
use crate::DbPool; use crate::DbPool;
use crate::providers::Provider; use crate::providers::Provider;
use crate::util::cache::VideoCache; use crate::util::cache::VideoCache;
use crate::util::discord::format_error_chain;
use crate::util::discord::send_discord_error_report;
use crate::util::requester::Requester; use crate::util::requester::Requester;
use crate::util::time::parse_time_to_seconds; use crate::util::time::parse_time_to_seconds;
use crate::videos::VideoItem;
use crate::videos::ServerOptions; use crate::videos::ServerOptions;
use crate::videos::VideoItem;
use async_trait::async_trait;
use error_chain::error_chain; use error_chain::error_chain;
use htmlentity::entity::{ICodedDataTrait, decode}; use htmlentity::entity::{ICodedDataTrait, decode};
use scraper::{Html, Selector}; use scraper::{Html, Selector};
use std::vec; use std::vec;
use async_trait::async_trait;
error_chain! { error_chain! {
foreign_links { foreign_links {
@@ -17,16 +19,14 @@ error_chain! {
HttpRequest(wreq::Error); HttpRequest(wreq::Error);
JsonError(serde_json::Error); JsonError(serde_json::Error);
} }
errors {
Parse(msg: String) {
description("html parse error")
display("html parse error: {}", msg)
}
}
} }
// fn has_blacklisted_class(element: &ElementRef, blacklist: &[&str]) -> bool {
// element
// .value()
// .attr("class")
// .map(|classes| classes.split_whitespace().any(|c| blacklist.contains(&c)))
// .unwrap_or(false)
// }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct SxyprnProvider { pub struct SxyprnProvider {
url: String, url: String,
@@ -81,11 +81,30 @@ impl SxyprnProvider {
} }
}; };
let text = requester.get(&url_str).await.unwrap(); let text = requester.get(&url_str, None).await.unwrap();
// Pass a reference to options if needed, or reconstruct as needed // Pass a reference to options if needed, or reconstruct as needed
let video_items: Vec<VideoItem> = self let video_items = match self
.get_video_items_from_html(text.clone(), pool, requester) .get_video_items_from_html(text.clone(), pool, requester)
.await; .await
{
Ok(items) => items,
Err(e) => {
println!("Error parsing video items: {}", e);
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("Sxyprn Provider"),
Some(&format!("URL: {}", url_str)),
file!(),
line!(),
module_path!(),
).await;
return Ok(old_items);
}
};
// let video_items: Vec<VideoItem> = self
// .get_video_items_from_html(text.clone(), pool, requester)
// .await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url_str); cache.remove(&url_str);
cache.insert(url_str.clone(), video_items.clone()); cache.insert(url_str.clone(), video_items.clone());
@@ -115,7 +134,10 @@ impl SxyprnProvider {
let search_string = query.replace(" ", "-"); let search_string = query.replace(" ", "-");
let url_str = format!( let url_str = format!(
"{}/{}.html?page={}&sm={}", "{}/{}.html?page={}&sm={}",
self.url, search_string, ((page as u32) - 1) * 20, sort_string self.url,
search_string,
((page as u32) - 1) * 20,
sort_string
); );
// Check our Video Cache. If the result is younger than 1 hour, we return it. // Check our Video Cache. If the result is younger than 1 hour, we return it.
let old_items = match cache.get(&url_str) { let old_items = match cache.get(&url_str) {
@@ -131,10 +153,31 @@ impl SxyprnProvider {
vec![] vec![]
} }
}; };
let text = requester.get(&url_str).await.unwrap(); let text = requester.get(&url_str, None).await.unwrap();
let video_items: Vec<VideoItem> = self
let video_items = match self
.get_video_items_from_html(text.clone(), pool, requester) .get_video_items_from_html(text.clone(), pool, requester)
.await; .await
{
Ok(items) => items,
Err(e) => {
println!("Error parsing video items: {}", e);// 1. Convert the error to a string immediately
send_discord_error_report(
e.to_string(),
Some(format_error_chain(&e)),
Some("Sxyprn Provider"),
Some(&format!("URL: {}", url_str)),
file!(),
line!(),
module_path!(),
).await;
return Ok(old_items);
}
};
// let video_items: Vec<VideoItem> = self
// .get_video_items_from_html(text.clone(), pool, requester)
// .await;
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&url_str); cache.remove(&url_str);
cache.insert(url_str.clone(), video_items.clone()); cache.insert(url_str.clone(), video_items.clone());
@@ -147,41 +190,50 @@ impl SxyprnProvider {
async fn get_video_items_from_html( async fn get_video_items_from_html(
&self, &self,
html: String, html: String,
pool: DbPool, _pool: DbPool,
requester: Requester, _requester: Requester,
) -> Vec<VideoItem> { ) -> Result<Vec<VideoItem>> {
let _ = requester;
let _ = pool;
if html.is_empty() { if html.is_empty() {
println!("HTML is empty"); return Ok(vec![]);
return vec![];
} }
let raw_videos = html.split("<script async").collect::<Vec<&str>>()[0]
.split("post_el_small'")
.collect::<Vec<&str>>()[1..]
.to_vec();
let mut items: Vec<VideoItem> = Vec::new();
for video_segment in &raw_videos {
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
// for (index, line) in vid.iter().enumerate() {
// println!("Line {}: {}", index, line.to_string().trim());
// }
// println!("\n\n\n");
let url = video_segment.split("/post/").collect::<Vec<&str>>()[1] // take content before "<script async"
.split("'") let before_script = html
.collect::<Vec<&str>>()[0] .split("<script async")
.next()
.ok_or_else(|| ErrorKind::Parse("missing '<script async' split point".into()))?;
// split into video segments (skip the first chunk)
let raw_videos: Vec<&str> = before_script.split("post_el_small'").skip(1).collect();
if raw_videos.is_empty() {
return Err(ErrorKind::Parse("no 'post_el_small\\'' segments found".into()).into());
}
let mut items = Vec::new();
for video_segment in raw_videos {
// url id
let url = video_segment
.split("/post/")
.nth(1)
.and_then(|s| s.split('\'').next())
.ok_or_else(|| ErrorKind::Parse("failed to extract /post/ url".into()))?
.to_string(); .to_string();
let video_url = format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}", url); let video_url = format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}", url);
let title_parts = video_segment.split("post_text").collect::<Vec<&str>>()[1] // title parts
.split("style=''>") let title_parts = video_segment
.collect::<Vec<&str>>()[1] .split("post_text")
.split("</div>") .nth(1)
.collect::<Vec<&str>>()[0]; .and_then(|s| s.split("style=''>").nth(1))
.and_then(|s| s.split("</div>").next())
.ok_or_else(|| ErrorKind::Parse("failed to extract title_parts".into()))?;
let document = Html::parse_document(title_parts); let document = Html::parse_document(title_parts);
let selector = Selector::parse("*").unwrap(); let selector = Selector::parse("*")
.map_err(|e| ErrorKind::Parse(format!("selector parse failed: {e}")))?;
let mut texts = Vec::new(); let mut texts = Vec::new();
for element in document.select(&selector) { for element in document.select(&selector) {
@@ -190,86 +242,96 @@ impl SxyprnProvider {
texts.push(text.trim().to_string()); texts.push(text.trim().to_string());
} }
} }
let mut title = texts[0].clone();
// html decode let mut title = texts.join(" ");
title = decode(title.as_bytes()) title = decode(title.as_bytes())
.to_string() .to_string()
.unwrap_or(title) .unwrap_or(title)
.replace(" ", " "); .replace(" ", " ");
title = title.replace(" + ", " ").replace(" ", " ");
if title.to_ascii_lowercase().starts_with("new "){ title = title
.replace('\n', "")
.replace(" + ", " ")
.replace(" ", " ")
.trim()
.to_string();
if title.to_ascii_lowercase().starts_with("new ") {
title = title[4..].to_string(); title = title[4..].to_string();
} }
// println!("Title: {}", title);
let id = video_url.split("/").collect::<Vec<&str>>()[6].to_string();
let thumb = format!( // id (DON'T index [6])
"https:{}", let id = video_url
video_segment .split('/')
.last()
.ok_or_else(|| ErrorKind::Parse("failed to extract id from video_url".into()))?
.split('?')
.next()
.unwrap_or("")
.to_string();
// thumb
let thumb_path = video_segment
.split("<img class='mini_post_vid_thumb lazyload'") .split("<img class='mini_post_vid_thumb lazyload'")
.collect::<Vec<&str>>()[1] .nth(1)
.split("data-src='") .and_then(|s| s.split("data-src='").nth(1))
.collect::<Vec<&str>>()[1] .and_then(|s| s.split('\'').next())
.split("'") .ok_or_else(|| ErrorKind::Parse("failed to extract thumb".into()))?;
.collect::<Vec<&str>>()[0]
.to_string()
);
let preview = match video_segment.contains("class='hvp_player'") { let thumb = format!("https:{thumb_path}");
true => Some(format!(
// preview
let preview = if video_segment.contains("class='hvp_player'") {
Some(format!(
"https:{}", "https:{}",
video_segment video_segment
.split("class='hvp_player'") .split("class='hvp_player'")
.collect::<Vec<&str>>()[1] .nth(1)
.split(" src='") .and_then(|s| s.split(" src='").nth(1))
.collect::<Vec<&str>>()[1] .and_then(|s| s.split('\'').next())
.split("'") .ok_or_else(|| ErrorKind::Parse("failed to extract preview src".into()))?
.collect::<Vec<&str>>()[0] ))
.to_string() } else {
)), None
false => None,
}; };
// views
let views = video_segment let views = video_segment
.split("<strong>·</strong> ") .split("<strong>·</strong> ")
.collect::<Vec<&str>>()[1] .nth(1)
.split(" ") .and_then(|s| s.split_whitespace().next())
.collect::<Vec<&str>>()[0] .ok_or_else(|| ErrorKind::Parse("failed to extract views".into()))?
.to_string(); .to_string();
let raw_duration = video_segment.split("duration_small").collect::<Vec<&str>>()[1] // duration
.split("title='") let raw_duration = video_segment
.collect::<Vec<&str>>()[1] .split("duration_small")
.split("'") .nth(1)
.collect::<Vec<&str>>()[1] .and_then(|s| s.split("title='").nth(1))
.split(">") .and_then(|s| s.split('\'').nth(1))
.collect::<Vec<&str>>()[1] .and_then(|s| s.split('>').nth(1))
.split("<") .and_then(|s| s.split('<').next())
.collect::<Vec<&str>>()[0] .ok_or_else(|| ErrorKind::Parse("failed to extract duration".into()))?
.to_string(); .to_string();
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32; let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
// stream urls (your filter condition looks suspicious; leaving as-is)
let stream_urls = video_segment let stream_urls = video_segment
.split("extlink_icon extlink") .split("extlink_icon extlink")
.collect::<Vec<&str>>() .filter_map(|part| {
.iter() part.split("href='")
.map(|part| {
let url = part
.split("href='")
.collect::<Vec<&str>>()
.last() .last()
.unwrap_or(&"") .and_then(|s| s.split('\'').next())
.split("'") .map(|u| u.to_string())
.collect::<Vec<&str>>()[0]
.to_string();
url
}) })
.filter(|url| url.starts_with("http") && !url.starts_with("https://bigwarp.io/")) .filter(|url| url.starts_with("https://lulustream."))
.collect::<Vec<String>>(); .collect::<Vec<String>>();
let video_item_url = match stream_urls.first() {
Some(u) => u.clone(), let video_item_url = stream_urls.first().cloned().unwrap_or_else(|| {
None => video_url.clone(), format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}", id)
}; });
let mut video_item = VideoItem::new( let mut video_item = VideoItem::new(
id, id,
title, title,
@@ -279,12 +341,15 @@ impl SxyprnProvider {
duration, duration,
) )
.views(views.parse::<u32>().unwrap_or(0)); .views(views.parse::<u32>().unwrap_or(0));
if let Some(p) = preview { if let Some(p) = preview {
video_item = video_item.preview(p); video_item = video_item.preview(p);
} }
items.push(video_item); items.push(video_item);
} }
return items;
Ok(items)
} }
} }

View File

@@ -368,7 +368,6 @@ impl TnaflixProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -380,7 +379,7 @@ impl TnaflixProvider {
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -427,7 +426,7 @@ impl TnaflixProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -555,7 +554,7 @@ impl Provider for TnaflixProvider {
} }
} }
} }
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel { fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
self.build_channel(clientversion) Some(self.build_channel(clientversion))
} }
} }

View File

@@ -94,7 +94,6 @@ impl XxdbxProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -106,7 +105,7 @@ impl XxdbxProvider {
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -156,7 +155,7 @@ impl XxdbxProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -279,7 +278,7 @@ impl Provider for XxdbxProvider {
} }
} }
} }
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel { fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
self.build_channel(clientversion) Some(self.build_channel(clientversion))
} }
} }

View File

@@ -51,7 +51,6 @@ impl XxthotsProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -62,7 +61,7 @@ impl XxthotsProvider {
} }
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -99,7 +98,7 @@ impl XxthotsProvider {
} }
}; };
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);

View File

@@ -46,7 +46,6 @@ impl YoujizzProvider {
let old_items = match cache.get(&video_url) { let old_items = match cache.get(&video_url) {
Some((time, items)) => { Some((time, items)) => {
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 { if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
println!("Cache hit for URL: {}", video_url);
return Ok(items.clone()); return Ok(items.clone());
} else { } else {
items.clone() items.clone()
@@ -59,7 +58,7 @@ impl YoujizzProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);
@@ -95,7 +94,7 @@ impl YoujizzProvider {
let mut requester = options.requester.clone().unwrap(); let mut requester = options.requester.clone().unwrap();
let text = requester.get(&video_url).await.unwrap(); let text = requester.get(&video_url, None).await.unwrap();
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()); let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
if !video_items.is_empty() { if !video_items.is_empty() {
cache.remove(&video_url); cache.remove(&video_url);

64
src/proxies/javtiful.rs Normal file
View File

@@ -0,0 +1,64 @@
use ntex::web;
use wreq::Version;
use crate::util::requester::Requester;
#[derive(Debug, Clone)]
pub struct JavtifulProxy {
}
impl JavtifulProxy {
pub fn new() -> Self {
JavtifulProxy {
}
}
pub async fn get_video_url(
&self,
url: String,
requester: web::types::State<Requester>,
) -> String {
let mut requester = requester.get_ref().clone();
let url = "https://javtiful.com/".to_string() + &url;
let text = requester.get(&url, None).await.unwrap_or("".to_string());
if text.is_empty() {
return "".to_string();
}
let video_id = url
.split('/')
.nth(4)
.unwrap_or("")
.to_string();
let token = text.split("data-csrf-token=\"")
.nth(1)
.and_then(|s| s.split('"').next())
.unwrap_or("")
.to_string();
let form = wreq::multipart::Form::new()
.text("video_id", video_id.clone())
.text("pid_c", "".to_string())
.text("token", token.clone());
let resp = match requester
.post_multipart(
"https://javtiful.com/ajax/get_cdn",
form,
vec![("Referer".to_string(), url.to_string())],
Some(Version::HTTP_11),
)
.await
{
Ok(r) => r,
Err(_) => return "".to_string(),
};
let text = resp.text().await.unwrap_or_default();
let json: serde_json::Value = serde_json::from_str(&text).unwrap_or(serde_json::Value::Null);
let video_url = json.get("playlists")
.map(|v| v.to_string().replace("\"", ""))
.unwrap_or_default();
return video_url;
}
}

View File

@@ -4,10 +4,12 @@ use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester};
pub mod sxyprn; pub mod sxyprn;
pub mod hanimecdn; pub mod hanimecdn;
pub mod javtiful;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum AnyProxy { pub enum AnyProxy {
Sxyprn(SxyprnProxy), Sxyprn(SxyprnProxy),
Javtiful(javtiful::JavtifulProxy),
} }
pub trait Proxy { pub trait Proxy {
@@ -26,12 +28,8 @@ impl Proxy for AnyProxy {
requester: web::types::State<Requester>, requester: web::types::State<Requester>,
) -> String { ) -> String {
match self { match self {
AnyProxy::Sxyprn(p) => { AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await,
p.get_video_url( AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await,
url,
requester,
).await
}
} }
} }
} }

View File

@@ -40,30 +40,34 @@ impl SxyprnProxy {
) -> String { ) -> String {
let mut requester = requester.get_ref().clone(); let mut requester = requester.get_ref().clone();
let url = "https://sxyprn.com/".to_string() + &url; let url = "https://sxyprn.com/".to_string() + &url;
let text = requester.get(&url).await.unwrap_or("".to_string()); let text = requester.get(&url, None).await.unwrap_or("".to_string());
if text.is_empty() { if text.is_empty() {
return "".to_string(); return "".to_string();
} }
let data_string = text.split("data-vnfo='").collect::<Vec<&str>>()[1] let data_string = text.split("data-vnfo='").collect::<Vec<&str>>()[1]
.split("\":\"").collect::<Vec<&str>>()[1] .split("\":\"").collect::<Vec<&str>>()[1]
.split("\"}").collect::<Vec<&str>>()[0].replace("\\",""); .split("\"}").collect::<Vec<&str>>()[0].replace("\\","");
//println!("src: {}",data_string);
let mut tmp = data_string let mut tmp = data_string
.split("/") .split("/")
.map(|s| s.to_string()) .map(|s| s.to_string())
.collect::<Vec<String>>(); .collect::<Vec<String>>();
//println!("tmp: {:?}",tmp);
tmp[1] = format!("{}8/{}", tmp[1], boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str()))); tmp[1] = format!("{}8/{}", tmp[1], boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str())));
//println!("tmp[1]: {:?}",tmp[1]);
//preda //preda
tmp[5] = format!( tmp[5] = format!(
"{}", "{}",
tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str()) tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str())
); );
//println!("tmp: {:?}",tmp);
let sxyprn_video_url = format!("https://sxyprn.com{}",tmp.join("/")); let sxyprn_video_url = format!("https://sxyprn.com{}",tmp.join("/"));
let response = requester.get_raw(&sxyprn_video_url).await; let response = requester.get_raw(&sxyprn_video_url).await;
match response { match response {
Ok(resp) => { Ok(resp) => {
return resp.headers().get("Location").unwrap().to_str().unwrap_or("").to_string(); return format!("https:{}", resp.headers().get("Location").unwrap().to_str().unwrap_or("").to_string());
}, },
Err(e) => { Err(e) => {
println!("Error fetching video URL: {}", e); println!("Error fetching video URL: {}", e);

View File

@@ -1,5 +1,6 @@
use ntex::web::{self, HttpRequest}; use ntex::web::{self, HttpRequest};
use crate::proxies::javtiful::JavtifulProxy;
use crate::proxies::sxyprn::SxyprnProxy; use crate::proxies::sxyprn::SxyprnProxy;
use crate::util::requester::Requester; use crate::util::requester::Requester;
use crate::proxies::*; use crate::proxies::*;
@@ -7,24 +8,24 @@ use crate::proxies::*;
pub fn config(cfg: &mut web::ServiceConfig) { pub fn config(cfg: &mut web::ServiceConfig) {
cfg.service( cfg.service(
web::resource("/sxyprn/{endpoint}*") web::resource("/sxyprn/{endpoint}*")
.route(web::post().to(sxyprn)) .route(web::post().to(proxy2redirect))
.route(web::get().to(sxyprn)), .route(web::get().to(proxy2redirect)),
)
.service(
web::resource("/javtiful/{endpoint}*")
.route(web::post().to(proxy2redirect))
.route(web::get().to(proxy2redirect)),
) )
.service( .service(
web::resource("/hanime-cdn/{endpoint}*") web::resource("/hanime-cdn/{endpoint}*")
.route(web::post().to(crate::proxies::hanimecdn::get_image)) .route(web::post().to(crate::proxies::hanimecdn::get_image))
.route(web::get().to(crate::proxies::hanimecdn::get_image)), .route(web::get().to(crate::proxies::hanimecdn::get_image)),
) )
// .service(
// web::resource("/videos")
// // .route(web::get().to(videos_get))
// .route(web::post().to(videos_post)),
// )
; ;
} }
async fn sxyprn(req: HttpRequest, async fn proxy2redirect(req: HttpRequest,
requester: web::types::State<Requester>,) -> Result<impl web::Responder, web::Error> { requester: web::types::State<Requester>,) -> Result<impl web::Responder, web::Error> {
let proxy = get_proxy(req.uri().to_string().split("/").collect::<Vec<&str>>()[2]).unwrap(); let proxy = get_proxy(req.uri().to_string().split("/").collect::<Vec<&str>>()[2]).unwrap();
let endpoint = req.match_info().query("endpoint").to_string(); let endpoint = req.match_info().query("endpoint").to_string();
@@ -33,13 +34,14 @@ async fn sxyprn(req: HttpRequest,
_ => "Error".to_string(), _ => "Error".to_string(),
}; };
Ok(web::HttpResponse::Found() Ok(web::HttpResponse::Found()
.header("Location", format!("https:{}", video_url)) .header("Location", video_url)
.finish()) .finish())
} }
fn get_proxy(proxy: &str) -> Option<AnyProxy> { fn get_proxy(proxy: &str) -> Option<AnyProxy> {
match proxy { match proxy {
"sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())), "sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())),
"javtiful" => Some(AnyProxy::Javtiful(JavtifulProxy::new())),
_ => None, _ => None,
} }
} }

View File

@@ -1,22 +1,28 @@
use std::time::{SystemTime}; use std::time::SystemTime;
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::time::Duration; use std::time::Duration;
use crate::videos::VideoItem; use crate::videos::VideoItem;
#[derive(Clone)] #[derive(Clone)]
pub struct VideoCache{ pub struct VideoCache {
cache: Arc<Mutex<std::collections::HashMap<String, (SystemTime, Vec<VideoItem>)>>>, // url -> time+Items cache: Arc<Mutex<std::collections::HashMap<String, (SystemTime, Vec<VideoItem>)>>>, // url -> time+Items
max_size: usize,
} }
impl VideoCache { impl VideoCache {
pub fn new() -> Self { pub fn new() -> Self {
VideoCache { VideoCache {
cache: Arc::new(Mutex::new(std::collections::HashMap::new())), cache: Arc::new(Mutex::new(std::collections::HashMap::new())),
max_size: 100,
} }
} }
pub fn max_size(&mut self, size: usize) -> &mut Self {
self.max_size = size;
self
}
pub fn get(&self, key: &str) -> Option<(SystemTime, Vec<VideoItem>)> { pub fn get(&self, key: &str) -> Option<(SystemTime, Vec<VideoItem>)> {
let cache = self.cache.lock().ok()?; let cache = self.cache.lock().ok()?;
cache.get(key).cloned() cache.get(key).cloned()
@@ -24,14 +30,22 @@ impl VideoCache {
pub fn insert(&self, key: String, value: Vec<VideoItem>) { pub fn insert(&self, key: String, value: Vec<VideoItem>) {
if let Ok(mut cache) = self.cache.lock() { if let Ok(mut cache) = self.cache.lock() {
if cache.len() >= self.max_size {
// Simple eviction policy: remove a random entry
if let Some(first_key) = cache.keys().next().cloned() {
cache.remove(&first_key);
}
}
cache.insert(key.clone(), (SystemTime::now(), value.clone())); cache.insert(key.clone(), (SystemTime::now(), value.clone()));
} }
} }
pub fn remove(&self, key: &str) { pub fn remove(&self, key: &str) {
if let Ok(mut cache) = self.cache.lock() { if let Ok(mut cache) = self.cache.lock() {
cache.remove(key); cache.remove(key);
} }
} }
pub fn entries(&self) -> Option<Vec<(String, (SystemTime, Vec<VideoItem>))>> { pub fn entries(&self) -> Option<Vec<(String, (SystemTime, Vec<VideoItem>))>> {
if let Ok(cache) = self.cache.lock() { if let Ok(cache) = self.cache.lock() {
// Return a cloned vector of the cache entries // Return a cloned vector of the cache entries
@@ -40,21 +54,24 @@ impl VideoCache {
None None
} }
pub async fn check(&self) -> Result<(), Box<dyn std::error::Error>>{ pub async fn check(&self) -> Result<(), Box<dyn std::error::Error>> {
let iter = match self.entries(){ let iter = match self.entries() {
Some(iter) => iter, Some(iter) => iter,
None => return Err(Box::new(std::io::Error::new(std::io::ErrorKind::Other, "Could not get entries"))) None => {
return Err(Box::new(std::io::Error::new(
std::io::ErrorKind::Other,
"Could not get entries",
)));
}
}; };
for (key, (time, _items)) in iter { for (key, (time, _items)) in iter {
if let Ok(elapsed) = time.elapsed() { if let Ok(elapsed) = time.elapsed() {
if elapsed > Duration::from_secs(60*60){ if elapsed > Duration::from_secs(60 * 60) {
println!("Key: {}, elapsed: {:?}", key, elapsed);
self.remove(&key); self.remove(&key);
} }
} }
} }
Ok(()) Ok(())
} }
} }

107
src/util/discord.rs Normal file
View File

@@ -0,0 +1,107 @@
use std::error::Error;
use std::fmt::Write as _;
use std::time::{SystemTime, UNIX_EPOCH};
use dashmap::DashMap;
use once_cell::sync::Lazy;
use serde_json::json;
use crate::util::requester;
// Global cache: Map<ErrorSignature, LastSentTimestamp>
static ERROR_CACHE: Lazy<DashMap<String, u64>> = Lazy::new(DashMap::new);
// const COOLDOWN_SECONDS: u64 = 3600; // 1 Hour cooldown
pub fn format_error_chain(err: &dyn Error) -> String {
let mut chain_str = String::new();
let mut current_err: Option<&dyn Error> = Some(err);
let mut index = 1;
while let Some(e) = current_err {
let _ = writeln!(chain_str, "{}. {}", index, e);
current_err = e.source();
index += 1;
}
chain_str
}
pub async fn send_discord_error_report(
error_msg: String,
error_chain: Option<String>,
context: Option<&str>,
extra_info: Option<&str>,
file: &str,
line: u32,
module: &str,
) {
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs())
.unwrap_or(0);
// --- Deduplication Logic ---
// Create a unique key based on error content and location
let error_signature = format!("{}-{}-{}", error_msg, file, line);
if let Some(_) = ERROR_CACHE.get(&error_signature) {
// if now - *last_sent < COOLDOWN_SECONDS {
// Error is still in cooldown, skip sending
return;
// }
}
// Update the cache with the current timestamp
ERROR_CACHE.insert(error_signature, now);
// ---------------------------
let webhook_url = match std::env::var("DISCORD_WEBHOOK") {
Ok(url) => url,
Err(_) => return,
};
const MAX_FIELD: usize = 1024;
let truncate = |s: &str| {
if s.len() > MAX_FIELD {
format!("{}", &s[..MAX_FIELD - 1])
} else {
s.to_string()
}
};
let payload = json!({
"embeds": [{
"title": "🚨 Rust Error Report",
"color": 0xE74C3C,
"fields": [
{
"name": "Error",
"value": format!("```{}```", truncate(&error_msg)),
"inline": false
},
{
"name": "Error Chain",
"value": truncate(&error_chain.unwrap_or_else(|| "No chain provided".to_string())),
"inline": false
},
{
"name": "Location",
"value": format!("`{}`:{}\n`{}`", file, line, module),
"inline": true
},
{
"name": "Context",
"value": truncate(context.unwrap_or("n/a")),
"inline": true
},
{
"name": "Extra Info",
"value": truncate(extra_info.unwrap_or("n/a")),
"inline": false
}
],
"footer": {
"text": format!("Unix time: {} | Cooldown active", now)
}
}]
});
let mut requester = requester::Requester::new();
let _ = requester.post_json(&webhook_url, &payload, vec![]).await;
}

View File

@@ -13,20 +13,20 @@ pub struct FlareSolverrRequest {
#[derive(serde::Serialize, serde::Deserialize, Debug)] #[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlaresolverrCookie { pub struct FlaresolverrCookie {
pub name: String, //"cf_clearance", pub name: String,
pub value: String, //"lnKoXclrIp_mDrWJFfktPGm8GDyxjSpzy9dx0qDTiRg-1748689259-1.2.1.1-AIFERAPCdCSvvdu1mposNdUpKV9wHZXBpSI2L9k9TaKkPcqmomON_XEb6ZtRBtrmQu_DC8AzKllRg2vNzVKOUsvv9ndjQ.vv8Z7cNkgzpIbGFy96kXyAYH2mUk3Q7enZovDlEbK5kpV3Sbmd2M3_bUCBE1WjAMMdXlyNElH1LOpUm149O9hrluXjAffo4SwHI4HO0UckBPWBlBqhznKPgXxU0g8VHLDeYnQKViY8rP2ud4tyzKnJUxuYXzr4aWBNMp6TESp49vesRiel_Y5m.rlTY4zSb517S9iPbEQiYHRI.uH5mMHVI3jvJl0Mx94tPrpFnkhDdmzL3DRSllJe9k786Lf21I9WBoH2cCR3yHw", pub value: String,
pub domain: String, //".discord.com", pub domain: String,
pub path: String, //"/", pub path: String,
pub expires: f64, //1780225259.237105, pub expires: f64,
pub size: u64, //438, pub size: u64,
pub httpOnly: bool, //true, pub httpOnly: bool,
pub secure: bool, //true, pub secure: bool,
pub session: bool, //false, pub session: bool,
pub sameSite: Option<String>, //"None", pub sameSite: Option<String>,
pub priority: String, //"Medium", pub priority: String,
pub sameParty: bool, //false, pub sameParty: bool,
pub sourceScheme: String, //"Secure", pub sourceScheme: String,
pub sourcePort: u32, //443, pub sourcePort: u32,
pub partitionKey: Option<String>, pub partitionKey: Option<String>,
} }
@@ -39,39 +39,26 @@ pub struct FlareSolverrSolution {
pub cookies: Vec<FlaresolverrCookie>, pub cookies: Vec<FlaresolverrCookie>,
pub userAgent: String, pub userAgent: String,
} }
// impl FlareSolverrSolution {
// fn to_client(&self,){
// let mut headers = header::HeaderMap::new();
// for (h, v) in &self.headers {
// println!("{}: {}", h, v);
// headers.insert(
// header::HeaderName::from_bytes(h.as_bytes()).unwrap(),
// header::HeaderValue::from_str(v).unwrap(),
// );
// }
// // let client = reqwest::Client::builder()
// // .danger_accept_invalid_certs(true)
// // .
// // .build().unwrap();
// }
// }
#[derive(serde::Serialize, serde::Deserialize, Debug)] #[derive(serde::Serialize, serde::Deserialize, Debug)]
pub struct FlareSolverrResponse { pub struct FlareSolverrResponse {
status: String, pub status: String,
message: String, pub message: String,
pub solution: FlareSolverrSolution, pub solution: FlareSolverrSolution,
startTimestamp: u64, pub startTimestamp: u64,
endTimestamp: u64, pub endTimestamp: u64,
version: String, pub version: String,
} }
pub struct Flaresolverr { pub struct Flaresolverr {
url: String, url: String,
proxy: bool, proxy: bool,
} }
impl Flaresolverr { impl Flaresolverr {
pub fn new(url: String) -> Self { pub fn new(url: String) -> Self {
Flaresolverr { Self {
url: url, url,
proxy: false, proxy: false,
} }
} }
@@ -88,7 +75,7 @@ impl Flaresolverr {
.emulation(Emulation::Firefox136) .emulation(Emulation::Firefox136)
.build()?; .build()?;
let mut request = client let mut req = client
.post(&self.url) .post(&self.url)
.header("Content-Type", "application/json") .header("Content-Type", "application/json")
.json(&json!({ .json(&json!({
@@ -99,14 +86,20 @@ impl Flaresolverr {
if self.proxy { if self.proxy {
if let Ok(proxy_url) = env::var("BURP_URL") { if let Ok(proxy_url) = env::var("BURP_URL") {
let proxy = Proxy::all(&proxy_url).unwrap(); match Proxy::all(&proxy_url) {
request = request.proxy(proxy.clone()); Ok(proxy) => {
req = req.proxy(proxy);
}
Err(e) => {
eprintln!("Invalid proxy URL '{}': {}", proxy_url, e);
}
}
} }
} }
let response = request.send().await?; let response = req.send().await?;
let body: FlareSolverrResponse = response.json::<FlareSolverrResponse>().await?; let body = response.json::<FlareSolverrResponse>().await?;
Ok(body) Ok(body)
} }
} }

View File

@@ -2,6 +2,7 @@ pub mod time;
pub mod flaresolverr; pub mod flaresolverr;
pub mod cache; pub mod cache;
pub mod requester; pub mod requester;
pub mod discord;
pub fn parse_abbreviated_number(s: &str) -> Option<u32> { pub fn parse_abbreviated_number(s: &str) -> Option<u32> {
let s = s.trim(); let s = s.trim();

View File

@@ -1,4 +1,5 @@
use serde::Serialize; use serde::Serialize;
use wreq::multipart::Form;
use std::env; use std::env;
use wreq::Client; use wreq::Client;
use wreq::Proxy; use wreq::Proxy;
@@ -46,23 +47,6 @@ impl Requester {
self.proxy = proxy; self.proxy = proxy;
} }
// pub fn set_flaresolverr_session(&mut self, session: String) {
// self.flaresolverr_session = Some(session);
// }
// fn get_url_from_location_header(&self, prev_url: &str, location: &str) -> String {
// if location.starts_with("http://") || location.starts_with("https://") {
// location.to_string()
// } else if location.starts_with("//") {
// format!("{}{}", "https:", location)
// } else if location.starts_with('/') {
// let base_url = prev_url.split('/').take(3).collect::<Vec<&str>>().join("/");
// format!("{}{}", base_url, location)
// } else {
// format!("{}/{}", prev_url, location)
// }
// }
pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> { pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> {
let client = Client::builder() let client = Client::builder()
.cert_verification(false) .cert_verification(false)
@@ -83,7 +67,11 @@ impl Requester {
request.send().await request.send().await
} }
pub async fn get_raw_with_headers(&mut self, url: &str, headers: Vec<(String, String)>) -> Result<Response, wreq::Error> { pub async fn get_raw_with_headers(
&mut self,
url: &str,
headers: Vec<(String, String)>,
) -> Result<Response, wreq::Error> {
let client = Client::builder() let client = Client::builder()
.cert_verification(false) .cert_verification(false)
.emulation(Emulation::Firefox136) .emulation(Emulation::Firefox136)
@@ -106,18 +94,16 @@ impl Requester {
request.send().await request.send().await
} }
pub async fn post<S>(&mut self, url: &str, data: &S, headers: Vec<(String, String)>) -> Result<Response, wreq::Error> pub async fn post_json<S>(
&mut self,
url: &str,
data: &S,
headers: Vec<(String, String)>,
) -> Result<Response, wreq::Error>
where where
S: Serialize + ?Sized, S: Serialize + ?Sized,
{ {
let client = Client::builder() let mut request = self.client.post(url).version(Version::HTTP_11).json(data);
.cert_verification(false)
.emulation(Emulation::Firefox136)
.cookie_store(true)
.build()
.expect("Failed to create HTTP client");
let mut request = client.post(url).version(Version::HTTP_11).json(data);
// Set custom headers // Set custom headers
for (key, value) in headers.iter() { for (key, value) in headers.iter() {
@@ -134,8 +120,18 @@ impl Requester {
request.send().await request.send().await
} }
pub async fn get(&mut self, url: &str) -> Result<String, AnyErr> { pub async fn post(
let mut request = self.client.get(url).version(Version::HTTP_11); &mut self,
url: &str,
data: &str,
headers: Vec<(&str, &str)>,
) -> Result<Response, wreq::Error> {
let mut request = self.client.post(url).version(Version::HTTP_11).body(data.to_string());
// Set custom headers
for (key, value) in headers.iter() {
request = request.header(key.to_string(), value.to_string());
}
if self.proxy { if self.proxy {
if let Ok(proxy_url) = env::var("BURP_URL") { if let Ok(proxy_url) = env::var("BURP_URL") {
@@ -144,11 +140,69 @@ impl Requester {
} }
} }
let response = request.send().await?; request.send().await
}
pub async fn post_multipart(
&mut self,
url: &str,
form: Form,
headers: Vec<(String, String)>,
_http_version: Option<Version>,
) -> Result<Response, wreq::Error>
{
let http_version = match _http_version {
Some(v) => v,
None => Version::HTTP_11,
};
let mut request = self.client.post(url).multipart(form).version(http_version);
// Set custom headers
for (key, value) in headers.iter() {
request = request.header(key, value);
}
if self.proxy {
if let Ok(proxy_url) = env::var("BURP_URL") {
let proxy = Proxy::all(&proxy_url).unwrap();
request = request.proxy(proxy);
}
}
request.send().await
}
pub async fn get(&mut self, url: &str, _http_version: Option<Version>) -> Result<String, AnyErr> {
let http_version = match _http_version {
Some(v) => v,
None => Version::HTTP_11,
};
loop {
let mut request = self.client.get(url).version(http_version);
if self.proxy {
if let Ok(proxy_url) = env::var("BURP_URL") {
let proxy = Proxy::all(&proxy_url).unwrap();
request = request.proxy(proxy);
}
}
let response = request.send().await?;
if response.status().is_success() || response.status().as_u16() == 404 { if response.status().is_success() || response.status().as_u16() == 404 {
return Ok(response.text().await?); return Ok(response.text().await?);
} }
if response.status().as_u16() == 429 {
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
continue;
} else {
println!(
"Direct request to {} failed with status: {}",
url,
response.status()
);
break;
}
}
// If direct request failed, try FlareSolverr. Map its error to a Send+Sync error immediately, // If direct request failed, try FlareSolverr. Map its error to a Send+Sync error immediately,
// so no non-Send error value lives across later `.await`s. // so no non-Send error value lives across later `.await`s.

View File

@@ -48,6 +48,7 @@ pub struct ServerOptions {
pub stars: Option<String>, // pub stars: Option<String>, //
pub categories: Option<String>, // pub categories: Option<String>, //
pub duration: Option<String>, // pub duration: Option<String>, //
pub sort: Option<String>, //
} }
#[derive(serde::Serialize, Debug)] #[derive(serde::Serialize, Debug)]
@@ -128,6 +129,9 @@ impl VideoItem {
} }
} }
pub fn tags(mut self, tags: Vec<String>) -> Self { pub fn tags(mut self, tags: Vec<String>) -> Self {
if tags.is_empty(){
return self;
}
self.tags = Some(tags); self.tags = Some(tags);
self self
} }
@@ -156,6 +160,9 @@ impl VideoItem {
self self
} }
pub fn formats(mut self, formats: Vec<VideoFormat>) -> Self { pub fn formats(mut self, formats: Vec<VideoFormat>) -> Self {
if formats.is_empty(){
return self;
}
self.formats = Some(formats); self.formats = Some(formats);
self self
} }
@@ -270,6 +277,15 @@ impl VideoFormat {
headers.insert(key, value); headers.insert(key, value);
} }
} }
pub fn http_header(&mut self, key: String, value: String) -> Self {
if self.http_headers.is_none() {
self.http_headers = Some(HashMap::new());
}
if let Some(headers) = &mut self.http_headers {
headers.insert(key, value);
}
self.to_owned()
}
pub fn format_id(mut self, format_id: String) -> Self { pub fn format_id(mut self, format_id: String) -> Self {
self.format_id = Some(format_id); self.format_id = Some(format_id);
self self