Compare commits
63 Commits
f91f06c45e
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5a08d2afe7 | ||
|
|
5224a2eb47 | ||
|
|
e7fb0ed723 | ||
|
|
6a7bc68849 | ||
|
|
27e2bcdbba | ||
|
|
182eb8ac01 | ||
|
|
e2f3bc2ecb | ||
|
|
4f9c7835bf | ||
|
|
87b9d20240 | ||
|
|
708560d2e8 | ||
|
|
cacd45d893 | ||
|
|
602dbe50f0 | ||
|
|
cce6104df3 | ||
|
|
34992242b7 | ||
|
|
aaff7d00c6 | ||
|
|
eb49998593 | ||
|
|
cf04441a69 | ||
|
|
6fac9d6d45 | ||
|
|
2edb12a024 | ||
|
|
7f3ae83b1b | ||
|
|
0b3f1fdc1d | ||
|
|
792e246121 | ||
|
|
0fc3bed6a7 | ||
|
|
c0368b2876 | ||
|
|
4a7528c516 | ||
|
|
97eeccf2bd | ||
|
|
5ab2afa967 | ||
|
|
262b908692 | ||
|
|
89eecbe790 | ||
|
|
27bb3daec4 | ||
|
|
f1eb3c236b | ||
|
|
e7854ac1ac | ||
|
|
ca67eff142 | ||
|
|
0e347234b3 | ||
|
|
11c8c1a48f | ||
|
|
6536fb13b3 | ||
|
|
9789afb12b | ||
|
|
b986faa1d4 | ||
|
|
7124b388fa | ||
|
|
632931f515 | ||
|
|
9739560c03 | ||
|
|
80d874a004 | ||
|
|
64dc7455ee | ||
|
|
9e30eedc77 | ||
|
|
75e28608bd | ||
|
|
e22a3f2d6d | ||
|
|
07b812be64 | ||
|
|
61e38caed5 | ||
|
|
e5a6c8decc | ||
|
|
d856ade32b | ||
|
|
2de6a7d42b | ||
|
|
39e38249b7 | ||
|
|
e924c89573 | ||
|
|
3f57569511 | ||
|
|
23190ee05c | ||
|
|
12053ce6db | ||
|
|
5522f2e37d | ||
|
|
8f885c79d4 | ||
|
|
d7e7f70bd2 | ||
|
|
0e02a1b821 | ||
|
|
cafb990fd4 | ||
|
|
53ac33f856 | ||
|
|
ef57172fdd |
@@ -15,8 +15,8 @@ ntex = { version = "2.15.1", features = ["tokio"] }
|
||||
ntex-files = "2.0.0"
|
||||
serde = "1.0.228"
|
||||
serde_json = "1.0.145"
|
||||
tokio = { version = "1.47.1", features = ["full"] }
|
||||
wreq = { version = "5.3.0", features = ["full", "cookies"] }
|
||||
tokio = { version = "1.49", features = ["full"] }
|
||||
wreq = { version = "5.3.0", features = ["full", "cookies", "multipart"] }
|
||||
wreq-util = "2"
|
||||
percent-encoding = "2.3.2"
|
||||
capitalize = "0.3.4"
|
||||
@@ -27,6 +27,8 @@ once_cell = "1.21.3"
|
||||
rustc-hash = "2.1.1"
|
||||
async-trait = "0.1"
|
||||
regex = "1.12.2"
|
||||
titlecase = "3.6.0"
|
||||
dashmap = "6.1.0"
|
||||
|
||||
[lints.rust]
|
||||
unexpected_cfgs = "allow"
|
||||
|
||||
@@ -51,7 +51,6 @@ impl HentaimoonProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
|
||||
@@ -1,197 +0,0 @@
|
||||
use crate::util::parse_abbreviated_number;
|
||||
use crate::DbPool;
|
||||
use crate::providers::Provider;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::flaresolverr::{FlareSolverrRequest, Flaresolverr};
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoItem};
|
||||
use error_chain::error_chain;
|
||||
use htmlentity::entity::{ICodedDataTrait, decode};
|
||||
use std::env;
|
||||
use std::vec;
|
||||
use wreq::{Client, Proxy};
|
||||
use wreq_util::Emulation;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NoodlemagazineProvider {
|
||||
url: String,
|
||||
}
|
||||
impl NoodlemagazineProvider {
|
||||
pub fn new() -> Self {
|
||||
NoodlemagazineProvider {
|
||||
url: "https://noodlemagazine.com".to_string(),
|
||||
}
|
||||
}
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
|
||||
let video_url = format!("{}/popular/recent?p={}", self.url, page-1);
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn query(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
query: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let search_string = query.replace(" ", "%20");
|
||||
let video_url = format!("{}/video/{}?p={}", self.url, search_string, page);
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
let _ = cache.check().await;
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
|
||||
if html.is_empty() {
|
||||
println!("HTML is empty");
|
||||
return vec![];
|
||||
}
|
||||
let mut items: Vec<VideoItem> = Vec::new();
|
||||
let raw_videos = html.split("- Made with <svg ").collect::<Vec<&str>>()[0]
|
||||
.split("<div class=\"item\">")
|
||||
.collect::<Vec<&str>>()[1..]
|
||||
.to_vec();
|
||||
for video_segment in &raw_videos {
|
||||
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
|
||||
// for (index, line) in vid.iter().enumerate() {
|
||||
// println!("Line {}: {}", index, line);
|
||||
// }
|
||||
let video_url: String = format!("{}{}",self.url, video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0].to_string());
|
||||
let mut title = video_segment.split("\"title\">").collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
// html decode
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title);
|
||||
let id = video_url.split("/").collect::<Vec<&str>>()[4].to_string();
|
||||
let raw_duration = video_segment.split("#clock-o").collect::<Vec<&str>>()[1]
|
||||
.split("</svg>").collect::<Vec<&str>>()[1]
|
||||
.split("<").collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
|
||||
|
||||
let thumb = video_segment.split("<img ").collect::<Vec<&str>>()[1]
|
||||
.split("data-src=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
|
||||
let views_part = video_segment.split("#eye").collect::<Vec<&str>>()[1]
|
||||
.split("</svg>").collect::<Vec<&str>>()[1]
|
||||
.split("<").collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
|
||||
let views = parse_abbreviated_number(&views_part).unwrap_or(0) as u32;
|
||||
|
||||
let video_item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url.to_string(),
|
||||
"noodlemagazine".to_string(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
.views(views)
|
||||
;
|
||||
items.push(video_item);
|
||||
}
|
||||
return items;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
impl Provider for NoodlemagazineProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
pool: DbPool,
|
||||
sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let _ = per_page;
|
||||
let _ = pool;
|
||||
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
|
||||
Some(q) => {
|
||||
self.query(cache, page.parse::<u8>().unwrap_or(1), &q,options)
|
||||
.await
|
||||
}
|
||||
None => {
|
||||
self.get(cache, page.parse::<u8>().unwrap_or(1), options)
|
||||
.await
|
||||
}
|
||||
};
|
||||
match videos {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
println!("Error fetching videos: {}", e);
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -38,7 +38,7 @@ impl SpankbangProvider {
|
||||
let old_items = match cache.get(&url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
|
||||
println!("Cache hit for URL: {}", url);
|
||||
// println!("Cache hit for URL: {}", url);
|
||||
return Ok(items.clone());
|
||||
}
|
||||
else{
|
||||
@@ -123,7 +123,7 @@ impl SpankbangProvider {
|
||||
let old_items = match cache.get(&url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
|
||||
println!("Cache hit for URL: {}", url);
|
||||
// println!("Cache hit for URL: {}", url);
|
||||
return Ok(items.clone());
|
||||
}
|
||||
else{
|
||||
|
||||
@@ -14,17 +14,33 @@ services:
|
||||
environment:
|
||||
- RUST_LOG=info
|
||||
- BURP_URL=http://127.0.0.1:8081 # local burpsuite proxy for crawler analysis
|
||||
- PROXY=1 # 1 for enable, else disabled
|
||||
- PROXY=0 # 1 for enable, else disabled
|
||||
- DATABASE_URL=hottub.db # sqlite db to store hard to get videos for easy access
|
||||
- FLARE_URL=http://flaresolverr:8191/v1 # flaresolverr to get around cloudflare 403 codes
|
||||
- DOMAIN=hottub.spacemoehre.de # optional for the 302 forward on "/"
|
||||
- DOMAIN=hottub.spacemoehre.de # optional for the 302 forward on "/" to
|
||||
restart: unless-stopped
|
||||
working_dir: /app
|
||||
ports:
|
||||
- 80:18080
|
||||
- 6901:6901 # vnc port to access burpsuite
|
||||
- 8081:8080 # burpsuite port of http proxy
|
||||
- 8081:8080 # burpsuite port of http(s) proxy
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m" # Maximum size of each log file (e.g., 10MB)
|
||||
max-file: "3" # Maximum number of log files to keep
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "curl -s -o /dev/null -w '%{http_code}' http://127.0.0.1:18080/api/status | grep -q 200"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 1s
|
||||
ulimits:
|
||||
nofile:
|
||||
soft: 65536
|
||||
hard: 65536
|
||||
|
||||
# flaresolverr to bypass cloudflare protections
|
||||
flaresolverr:
|
||||
container_name: flaresolverr
|
||||
ports:
|
||||
@@ -39,9 +55,4 @@ services:
|
||||
max-size: "10m" # Maximum size of each log file (e.g., 10MB)
|
||||
max-file: "3" # Maximum number of log files to keep
|
||||
|
||||
restarter: # restarts the flaresolverr so its always ready for work
|
||||
image: docker:cli
|
||||
container_name: flaresolverr-restarter
|
||||
volumes: ["/var/run/docker.sock:/var/run/docker.sock"]
|
||||
command: ["/bin/sh", "-c", "while true; do sleep 26400; docker restart flaresolverr; done"]
|
||||
restart: unless-stopped
|
||||
|
||||
|
||||
196
src/api.rs
196
src/api.rs
@@ -3,19 +3,19 @@ use ntex::http::header;
|
||||
use ntex::web;
|
||||
use ntex::web::HttpRequest;
|
||||
use std::cmp::Ordering;
|
||||
use std::fs;
|
||||
use std::{fs, io};
|
||||
use tokio::task;
|
||||
|
||||
use crate::providers::all::AllProvider;
|
||||
use crate::providers::hanime::HanimeProvider;
|
||||
use crate::providers::okporn::OkpornProvider;
|
||||
use crate::providers::perverzija::PerverzijaProvider;
|
||||
use crate::providers::pmvhaven::PmvhavenProvider;
|
||||
use crate::providers::pornhub::PornhubProvider;
|
||||
use crate::providers::redtube::RedtubeProvider;
|
||||
use crate::providers::rule34video::Rule34videoProvider;
|
||||
// use crate::providers::spankbang::SpankbangProvider;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::send_discord_error_report;
|
||||
use crate::util::requester::Requester;
|
||||
use crate::{DbPool, db, status::*, videos::*};
|
||||
use cute::c;
|
||||
@@ -105,7 +105,12 @@ pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
web::resource("/videos")
|
||||
// .route(web::get().to(videos_get))
|
||||
.route(web::post().to(videos_post)),
|
||||
);
|
||||
)
|
||||
.service(
|
||||
web::resource("/test")
|
||||
.route(web::get().to(test))
|
||||
)
|
||||
;
|
||||
}
|
||||
|
||||
async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
|
||||
@@ -158,88 +163,16 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
|
||||
id: "lg".to_string(),
|
||||
title: "Longest".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "cm".to_string(),
|
||||
title: "Newest".to_string(),
|
||||
},
|
||||
],
|
||||
multiSelect: false,
|
||||
}],
|
||||
nsfw: true,
|
||||
cacheDuration: Some(1800),
|
||||
});
|
||||
if clientversion >= ClientVersion::new(22, 101, "22e".to_string()) {
|
||||
// pmvhaven
|
||||
status.add_channel(Channel {
|
||||
id: "pmvhaven".to_string(),
|
||||
name: "Pmvhaven".to_string(),
|
||||
description: "Explore a curated collection of captivating PMV".to_string(),
|
||||
premium: false,
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=pmvhaven.com".to_string(),
|
||||
status: "active".to_string(),
|
||||
categories: vec![],
|
||||
options: vec![
|
||||
ChannelOption {
|
||||
id: "category".to_string(),
|
||||
title: "Category".to_string(),
|
||||
description: "Category of PMV Video get".to_string(), //"Sort the videos by Date or Name.".to_string(),
|
||||
systemImage: "folder".to_string(),
|
||||
colorName: "yellow".to_string(),
|
||||
options: vec![
|
||||
FilterOption {
|
||||
id: "all".to_string(),
|
||||
title: "All".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "pmv".to_string(),
|
||||
title: "PMV".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "hmv".to_string(),
|
||||
title: "HMV".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "tiktok".to_string(),
|
||||
title: "Tiktok".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "koreanbj".to_string(),
|
||||
title: "KoreanBJ".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "hypno".to_string(),
|
||||
title: "Hypno".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "other".to_string(),
|
||||
title: "Other".to_string(),
|
||||
},
|
||||
],
|
||||
multiSelect: false,
|
||||
},
|
||||
ChannelOption {
|
||||
id: "sort".to_string(),
|
||||
title: "Filter".to_string(),
|
||||
description: "Filter PMV Videos".to_string(),
|
||||
systemImage: "list.number".to_string(),
|
||||
colorName: "blue".to_string(),
|
||||
options: vec![
|
||||
FilterOption {
|
||||
id: "Newest".to_string(),
|
||||
title: "Newest".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "Top Rated".to_string(),
|
||||
title: "Top Rated".to_string(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "Most Viewed".to_string(),
|
||||
title: "Most Viewed".to_string(),
|
||||
},
|
||||
],
|
||||
multiSelect: false,
|
||||
},
|
||||
],
|
||||
nsfw: true,
|
||||
cacheDuration: Some(1800),
|
||||
});
|
||||
}
|
||||
if clientversion >= ClientVersion::new(22, 97, "22a".to_string()) {
|
||||
// perverzija
|
||||
status.add_channel(Channel {
|
||||
@@ -418,40 +351,6 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
|
||||
cacheDuration: None,
|
||||
});
|
||||
|
||||
// status.add_channel(Channel {
|
||||
// id: "spankbang".to_string(),
|
||||
// name: "SpankBang".to_string(),
|
||||
// description: "Popular Porn Videos - SpankBang".to_string(),
|
||||
// premium: false,
|
||||
// favicon: "https://www.google.com/s2/favicons?sz=64&domain=spankbang.com".to_string(),
|
||||
// status: "active".to_string(),
|
||||
// categories: vec![],
|
||||
// options: vec![ChannelOption {
|
||||
// id: "sort".to_string(),
|
||||
// title: "Sort".to_string(),
|
||||
// description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
|
||||
// systemImage: "list.number".to_string(),
|
||||
// colorName: "blue".to_string(),
|
||||
// options: vec![
|
||||
// FilterOption {
|
||||
// id: "trending_videos".to_string(),
|
||||
// title: "Trending".to_string(),
|
||||
// },
|
||||
// FilterOption {
|
||||
// id: "new_videos".to_string(),
|
||||
// title: "New".to_string(),
|
||||
// },
|
||||
// FilterOption {
|
||||
// id: "most_popular".to_string(),
|
||||
// title: "Popular".to_string(),
|
||||
// },
|
||||
// ],
|
||||
// multiSelect: false,
|
||||
// }],
|
||||
// nsfw: true,
|
||||
//cacheDuration: Some(1800),
|
||||
// });
|
||||
|
||||
// rule34video
|
||||
status.add_channel(Channel {
|
||||
id: "rule34video".to_string(),
|
||||
@@ -720,41 +619,6 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
|
||||
cacheDuration: Some(1800),
|
||||
});
|
||||
|
||||
// // hentaimoon
|
||||
// status.add_channel(Channel {
|
||||
// id: "hentaimoon".to_string(),
|
||||
// name: "Hentai Moon".to_string(),
|
||||
// description: "Your Hentai Sputnik".to_string(),
|
||||
// premium: false,
|
||||
// favicon: "https://www.google.com/s2/favicons?sz=64&domain=hentai-moon.com".to_string(),
|
||||
// status: "active".to_string(),
|
||||
// categories: vec![],
|
||||
// options: vec![ChannelOption {
|
||||
// id: "sort".to_string(),
|
||||
// title: "Sort".to_string(),
|
||||
// description: "Sort the Videos".to_string(), //"Sort the videos by Date or Name.".to_string(),
|
||||
// systemImage: "list.number".to_string(),
|
||||
// colorName: "blue".to_string(),
|
||||
// options: vec![
|
||||
// FilterOption {
|
||||
// id: "new".to_string(),
|
||||
// title: "New".to_string(),
|
||||
// },
|
||||
// FilterOption {
|
||||
// id: "popular".to_string(),
|
||||
// title: "Popular".to_string(),
|
||||
// },
|
||||
// FilterOption {
|
||||
// id: "top-rated".to_string(),
|
||||
// title: "Top Rated".to_string(),
|
||||
// },
|
||||
// ],
|
||||
// multiSelect: false,
|
||||
// }],
|
||||
// nsfw: true,
|
||||
// cacheDuration: Some(1800),
|
||||
// });
|
||||
|
||||
// xxthots
|
||||
status.add_channel(Channel {
|
||||
id: "xxthots".to_string(),
|
||||
@@ -925,20 +789,6 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
|
||||
cacheDuration: None,
|
||||
});
|
||||
|
||||
// noodlemagazine
|
||||
// status.add_channel(Channel {
|
||||
// id: "noodlemagazine".to_string(),
|
||||
// name: "Noodlemagazine".to_string(),
|
||||
// description: "Discover the Best Adult Videos".to_string(),
|
||||
// premium: false,
|
||||
// favicon: "https://www.google.com/s2/favicons?sz=64&domain=noodlemagazine.com".to_string(),
|
||||
// status: "active".to_string(),
|
||||
// categories: vec![],
|
||||
// options: vec![],
|
||||
// nsfw: true,
|
||||
// cacheDuration: Some(1800),
|
||||
// });
|
||||
|
||||
//missav
|
||||
status.add_channel(Channel {
|
||||
id: "missav".to_string(),
|
||||
@@ -1138,7 +988,9 @@ async fn status(req: HttpRequest) -> Result<impl web::Responder, web::Error> {
|
||||
}
|
||||
|
||||
for provider in ALL_PROVIDERS.values() {
|
||||
status.add_channel(provider.get_channel(clientversion.clone()));
|
||||
if let Some(channel) = provider.get_channel(clientversion.clone()){
|
||||
status.add_channel(channel);
|
||||
}
|
||||
}
|
||||
status.iconUrl = format!("http://{}/favicon.ico", host).to_string();
|
||||
Ok(web::HttpResponse::Ok().json(&status))
|
||||
@@ -1241,6 +1093,7 @@ async fn videos_post(
|
||||
stars: Some(stars),
|
||||
categories: Some(categories),
|
||||
duration: Some(duration),
|
||||
sort: Some(sort.clone())
|
||||
};
|
||||
let video_items = provider
|
||||
.get_videos(
|
||||
@@ -1308,7 +1161,6 @@ pub fn get_provider(channel: &str) -> Option<DynProvider> {
|
||||
"perverzija" => Some(Arc::new(PerverzijaProvider::new())),
|
||||
"hanime" => Some(Arc::new(HanimeProvider::new())),
|
||||
"pornhub" => Some(Arc::new(PornhubProvider::new())),
|
||||
"pmvhaven" => Some(Arc::new(PmvhavenProvider::new())),
|
||||
"rule34video" => Some(Arc::new(Rule34videoProvider::new())),
|
||||
"redtube" => Some(Arc::new(RedtubeProvider::new())),
|
||||
"okporn" => Some(Arc::new(OkpornProvider::new())),
|
||||
@@ -1334,3 +1186,19 @@ pub fn get_provider(channel: &str) -> Option<DynProvider> {
|
||||
x => ALL_PROVIDERS.get(x).cloned(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn test() -> Result<impl web::Responder, web::Error> {
|
||||
// Simply await the function instead of blocking the thread
|
||||
let e = io::Error::new(io::ErrorKind::Other, "test error");
|
||||
let _ = send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some("chain_str".to_string()),
|
||||
Some("Context"),
|
||||
Some("xtra info"),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
).await;
|
||||
|
||||
Ok(web::HttpResponse::Ok())
|
||||
}
|
||||
16
src/main.rs
16
src/main.rs
@@ -2,7 +2,7 @@
|
||||
#![allow(non_snake_case)]
|
||||
|
||||
|
||||
use std::env;
|
||||
use std::{env, thread};
|
||||
|
||||
use diesel::{r2d2::{self, ConnectionManager}, SqliteConnection};
|
||||
use dotenvy::dotenv;
|
||||
@@ -49,9 +49,19 @@ async fn main() -> std::io::Result<()> {
|
||||
let mut requester = util::requester::Requester::new();
|
||||
requester.set_proxy(env::var("PROXY").unwrap_or("0".to_string()) != "0".to_string());
|
||||
|
||||
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new();
|
||||
let cache: util::cache::VideoCache = crate::util::cache::VideoCache::new().max_size(100_000).to_owned();
|
||||
|
||||
thread::spawn(move || {
|
||||
// Create a tiny runtime just for these async tasks
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.expect("build tokio runtime");
|
||||
|
||||
providers::init_providers_now();
|
||||
rt.block_on(async move {
|
||||
providers::init_providers_now();
|
||||
});
|
||||
});
|
||||
|
||||
web::HttpServer::new(move || {
|
||||
web::App::new()
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use std::fs;
|
||||
use std::time::Duration;
|
||||
use async_trait::async_trait;
|
||||
use error_chain::error_chain;
|
||||
use futures::future::join_all;
|
||||
use futures::StreamExt;
|
||||
use futures::stream::FuturesUnordered;
|
||||
use crate::api::{get_provider, ClientVersion};
|
||||
use crate::providers::{DynProvider, Provider};
|
||||
use crate::status::Channel;
|
||||
@@ -41,7 +43,7 @@ impl Provider for AllProvider {
|
||||
per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let mut sites_str = options.clone().sites.unwrap();
|
||||
let mut sites_str = options.clone().sites.unwrap_or_default();
|
||||
if sites_str.is_empty() {
|
||||
let files = fs::read_dir("./src/providers").unwrap();
|
||||
let providers = files.map(|entry| entry.unwrap().file_name())
|
||||
@@ -51,35 +53,69 @@ impl Provider for AllProvider {
|
||||
.collect::<Vec<String>>();
|
||||
sites_str = providers.join(",");
|
||||
}
|
||||
|
||||
let providers: Vec<DynProvider> = sites_str
|
||||
.split(',')
|
||||
.filter(|s| !s.is_empty())
|
||||
.filter_map(|s| get_provider(s)) // assumes get_provider -> Option<DynProvider>
|
||||
.filter_map(|s| get_provider(s))
|
||||
.collect();
|
||||
|
||||
let futures = providers.iter().map(|provider| {
|
||||
provider.get_videos(
|
||||
cache.clone(),
|
||||
pool.clone(),
|
||||
sort.clone(),
|
||||
query.clone(),
|
||||
page.clone(),
|
||||
per_page.clone(),
|
||||
options.clone()
|
||||
)
|
||||
}).collect::<Vec<_>>();
|
||||
let results:Vec<Vec<VideoItem>> = join_all(futures).await;
|
||||
let video_items: Vec<VideoItem> = interleave(&results);
|
||||
|
||||
|
||||
return video_items;
|
||||
}
|
||||
|
||||
fn get_channel(&self,clientversion:ClientVersion) -> Channel {
|
||||
println!("Getting channel for placeholder with client version: {:?}",clientversion);
|
||||
let _ = clientversion;
|
||||
Channel {
|
||||
id:"placeholder".to_string(),name:"PLACEHOLDER".to_string(),description:"PLACEHOLDER FOR PARENT CLASS".to_string(),premium:false,favicon:"https://www.google.com/s2/favicons?sz=64&domain=missav.ws".to_string(),status:"active".to_string(),categories:vec![],options:vec![],nsfw:true,cacheDuration:None,
|
||||
let mut futures = FuturesUnordered::new();
|
||||
|
||||
for provider in providers {
|
||||
let cache = cache.clone();
|
||||
let pool = pool.clone();
|
||||
let sort = sort.clone();
|
||||
let query = query.clone();
|
||||
let page = page.clone();
|
||||
let per_page = per_page.clone();
|
||||
let options = options.clone();
|
||||
|
||||
// Spawn the task so it lives independently of this function
|
||||
futures.push(tokio::spawn(async move {
|
||||
provider.get_videos(cache, pool, sort, query, page, per_page, options).await
|
||||
}));
|
||||
}
|
||||
|
||||
let mut all_results = Vec::new();
|
||||
let timeout_timer = tokio::time::sleep(Duration::from_secs(10));
|
||||
tokio::pin!(timeout_timer);
|
||||
|
||||
// Collect what we can within 55 seconds
|
||||
loop {
|
||||
tokio::select! {
|
||||
Some(result) = futures.next() => {
|
||||
// Ignore errors (panics or task cancellations)
|
||||
if let Ok(videos) = result {
|
||||
all_results.push(videos);
|
||||
}
|
||||
},
|
||||
_ = &mut timeout_timer => {
|
||||
// 55 seconds passed. Stop waiting and return what we have.
|
||||
// The tasks remaining in 'futures' will continue running in the
|
||||
// background because they were 'tokio::spawn'ed.
|
||||
break;
|
||||
},
|
||||
else => break, // All tasks finished before the timeout
|
||||
}
|
||||
}
|
||||
|
||||
interleave(&all_results)
|
||||
}
|
||||
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
|
||||
let _ = clientversion;
|
||||
Some(Channel {
|
||||
id: "placeholder".to_string(),
|
||||
name: "PLACEHOLDER".to_string(),
|
||||
description: "PLACEHOLDER FOR PARENT CLASS".to_string(),
|
||||
premium: false,
|
||||
favicon: "https://hottub.spacemoehre.de/favicon.ico".to_string(),
|
||||
status: "active".to_string(),
|
||||
categories: vec![],
|
||||
options: vec![],
|
||||
nsfw: true,
|
||||
cacheDuration: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3,7 +3,6 @@ use crate::api::ClientVersion;
|
||||
use crate::providers::Provider;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::parse_abbreviated_number;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoItem};
|
||||
use crate::{status::*, util};
|
||||
use async_trait::async_trait;
|
||||
@@ -18,6 +17,13 @@ error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
Json(serde_json::Error);
|
||||
}
|
||||
errors {
|
||||
Parse(msg: String) {
|
||||
description("parse error")
|
||||
display("parse error: {}", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,24 +33,15 @@ pub struct BeegProvider {
|
||||
stars: Arc<RwLock<Vec<FilterOption>>>,
|
||||
categories: Arc<RwLock<Vec<FilterOption>>>,
|
||||
}
|
||||
|
||||
impl BeegProvider {
|
||||
pub fn new() -> Self {
|
||||
let provider = BeegProvider {
|
||||
sites: Arc::new(RwLock::new(vec![FilterOption {
|
||||
id: "all".to_string(),
|
||||
title: "All".to_string(),
|
||||
}])),
|
||||
stars: Arc::new(RwLock::new(vec![FilterOption {
|
||||
id: "all".to_string(),
|
||||
title: "All".to_string(),
|
||||
}])),
|
||||
categories: Arc::new(RwLock::new(vec![FilterOption {
|
||||
id: "all".to_string(),
|
||||
title: "All".to_string(),
|
||||
}])),
|
||||
sites: Arc::new(RwLock::new(vec![FilterOption { id: "all".into(), title: "All".into() }])),
|
||||
stars: Arc::new(RwLock::new(vec![FilterOption { id: "all".into(), title: "All".into() }])),
|
||||
categories: Arc::new(RwLock::new(vec![FilterOption { id: "all".into(), title: "All".into() }])),
|
||||
};
|
||||
|
||||
// Kick off the background load but return immediately
|
||||
provider.spawn_initial_load();
|
||||
provider
|
||||
}
|
||||
@@ -55,160 +52,142 @@ impl BeegProvider {
|
||||
let stars = Arc::clone(&self.stars);
|
||||
|
||||
thread::spawn(move || {
|
||||
// Create a tiny runtime just for these async tasks
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.expect("build tokio runtime");
|
||||
let rt = match tokio::runtime::Builder::new_current_thread().enable_all().build() {
|
||||
Ok(rt) => rt,
|
||||
Err(e) => {
|
||||
eprintln!("beeg runtime init failed: {}", e);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
rt.block_on(async move {
|
||||
// If you have a streaming sites loader, call it here too
|
||||
if let Err(e) = Self::load_sites(sites).await {
|
||||
eprintln!("beeg load_sites_into failed: {e}");
|
||||
eprintln!("beeg load_sites failed: {}", e);
|
||||
}
|
||||
|
||||
if let Err(e) = Self::load_categories(categories).await {
|
||||
eprintln!("beeg load_categories failed: {e}");
|
||||
eprintln!("beeg load_categories failed: {}", e);
|
||||
}
|
||||
|
||||
if let Err(e) = Self::load_stars(stars).await {
|
||||
eprintln!("beeg load_stars failed: {e}");
|
||||
eprintln!("beeg load_stars failed: {}", e);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async fn load_stars(stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
async fn fetch_tags() -> Result<Value> {
|
||||
let mut requester = util::requester::Requester::new();
|
||||
let text = requester
|
||||
.get("https://store.externulls.com/tag/facts/tags?get_original=true&slug=index")
|
||||
.await
|
||||
.unwrap();
|
||||
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap();
|
||||
let stars_array = json.get("human").unwrap().as_array().unwrap();
|
||||
for s in stars_array {
|
||||
let star_name = s.get("tg_name").unwrap().as_str().unwrap().to_string();
|
||||
let star_id = s.get("tg_slug").unwrap().as_str().unwrap().to_string();
|
||||
Self::push_unique(
|
||||
&stars,
|
||||
FilterOption {
|
||||
id: star_id,
|
||||
title: star_name,
|
||||
},
|
||||
);
|
||||
let text = match requester
|
||||
.get("https://store.externulls.com/tag/facts/tags?get_original=true&slug=index", None)
|
||||
.await {
|
||||
Ok(text) => text,
|
||||
Err(e) => {
|
||||
eprintln!("beeg fetch_tags failed: {}", e);
|
||||
return Err(ErrorKind::Parse("failed to fetch tags".into()).into());
|
||||
}
|
||||
};
|
||||
Ok(serde_json::from_str(&text)?)
|
||||
}
|
||||
|
||||
async fn load_stars(stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
let json = Self::fetch_tags().await?;
|
||||
let arr = json
|
||||
.get("human")
|
||||
.and_then(|v| v.as_array().map(|v| v.as_slice()))
|
||||
.unwrap_or(&[]);
|
||||
for s in arr {
|
||||
if let (Some(name), Some(id)) = (
|
||||
s.get("tg_name").and_then(|v| v.as_str()),
|
||||
s.get("tg_slug").and_then(|v| v.as_str()),
|
||||
) {
|
||||
Self::push_unique(&stars, FilterOption { id: id.into(), title: name.into() });
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn load_categories(categories: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
let mut requester = util::requester::Requester::new();
|
||||
let text = requester
|
||||
.get("https://store.externulls.com/tag/facts/tags?get_original=true&slug=index")
|
||||
.await
|
||||
.unwrap();
|
||||
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap();
|
||||
let stars_array = json.get("other").unwrap().as_array().unwrap();
|
||||
for s in stars_array {
|
||||
let star_name = s.get("tg_name").unwrap().as_str().unwrap().to_string();
|
||||
let star_id = s.get("tg_slug").unwrap().as_str().unwrap().to_string();
|
||||
Self::push_unique(
|
||||
&categories,
|
||||
FilterOption {
|
||||
id: star_id.replace("{","").replace("}",""),
|
||||
title: star_name.replace("{","").replace("}",""),
|
||||
},
|
||||
);
|
||||
let json = Self::fetch_tags().await?;
|
||||
let arr = json
|
||||
.get("other")
|
||||
.and_then(|v| v.as_array().map(|v| v.as_slice()))
|
||||
.unwrap_or(&[]);
|
||||
for s in arr {
|
||||
if let (Some(name), Some(id)) = (
|
||||
s.get("tg_name").and_then(|v| v.as_str()),
|
||||
s.get("tg_slug").and_then(|v| v.as_str()),
|
||||
) {
|
||||
Self::push_unique(
|
||||
&categories,
|
||||
FilterOption {
|
||||
id: id.replace('{', "").replace('}', ""),
|
||||
title: name.replace('{', "").replace('}', ""),
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn load_sites(sites: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
let mut requester = util::requester::Requester::new();
|
||||
let text = requester
|
||||
.get("https://store.externulls.com/tag/facts/tags?get_original=true&slug=index")
|
||||
.await
|
||||
.unwrap();
|
||||
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap();
|
||||
let stars_array = json.get("productions").unwrap().as_array().unwrap();
|
||||
for s in stars_array {
|
||||
let star_name = s.get("tg_name").unwrap().as_str().unwrap().to_string();
|
||||
let star_id = s.get("tg_slug").unwrap().as_str().unwrap().to_string();
|
||||
Self::push_unique(
|
||||
&sites,
|
||||
FilterOption {
|
||||
id: star_id,
|
||||
title: star_name,
|
||||
},
|
||||
);
|
||||
let json = Self::fetch_tags().await?;
|
||||
let arr = json
|
||||
.get("productions")
|
||||
.and_then(|v| v.as_array().map(|v| v.as_slice()))
|
||||
.unwrap_or(&[]);
|
||||
for s in arr {
|
||||
if let (Some(name), Some(id)) = (
|
||||
s.get("tg_name").and_then(|v| v.as_str()),
|
||||
s.get("tg_slug").and_then(|v| v.as_str()),
|
||||
) {
|
||||
Self::push_unique(&sites, FilterOption { id: id.into(), title: name.into() });
|
||||
}
|
||||
}
|
||||
return Ok(());
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Push one item with minimal lock time and dedup by id
|
||||
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
|
||||
if let Ok(mut vec) = target.write() {
|
||||
if !vec.iter().any(|x| x.id == item.id) {
|
||||
vec.push(item);
|
||||
// Optional: keep it sorted for nicer UX
|
||||
// vec.sort_by(|a,b| a.title.cmp(&b.title));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
let _ = clientversion;
|
||||
let sites: Vec<FilterOption> = self
|
||||
.sites
|
||||
.read()
|
||||
.map(|g| g.clone()) // or: .map(|g| g.to_vec())
|
||||
.unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
|
||||
|
||||
let categories: Vec<FilterOption> = self
|
||||
.categories
|
||||
.read()
|
||||
.map(|g| g.clone()) // or: .map(|g| g.to_vec())
|
||||
.unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
|
||||
|
||||
let stars: Vec<FilterOption> = self
|
||||
.stars
|
||||
.read()
|
||||
.map(|g| g.clone()) // or: .map(|g| g.to_vec())
|
||||
.unwrap_or_default(); // or: .unwrap_or_else(|_| Vec::new())
|
||||
|
||||
fn build_channel(&self, _: ClientVersion) -> Channel {
|
||||
Channel {
|
||||
id: "beeg".to_string(),
|
||||
name: "Beeg".to_string(),
|
||||
description: "Watch your favorite Porn on Beeg.com".to_string(),
|
||||
id: "beeg".into(),
|
||||
name: "Beeg".into(),
|
||||
description: "Watch your favorite Porn on Beeg.com".into(),
|
||||
premium: false,
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=beeg.com".to_string(),
|
||||
status: "active".to_string(),
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=beeg.com".into(),
|
||||
status: "active".into(),
|
||||
categories: vec![],
|
||||
options: vec![
|
||||
ChannelOption {
|
||||
id: "sites".to_string(),
|
||||
title: "Sites".to_string(),
|
||||
description: "Filter for different Sites".to_string(),
|
||||
systemImage: "rectangle.stack".to_string(),
|
||||
colorName: "green".to_string(),
|
||||
options: sites,
|
||||
id: "sites".into(),
|
||||
title: "Sites".into(),
|
||||
description: "Filter for different Sites".into(),
|
||||
systemImage: "rectangle.stack".into(),
|
||||
colorName: "green".into(),
|
||||
options: self.sites.read().map(|v| v.clone()).unwrap_or_default(),
|
||||
multiSelect: false,
|
||||
},
|
||||
ChannelOption {
|
||||
id: "categories".to_string(),
|
||||
title: "Categories".to_string(),
|
||||
description: "Filter for different Networks".to_string(),
|
||||
systemImage: "list.dash".to_string(),
|
||||
colorName: "purple".to_string(),
|
||||
options: categories,
|
||||
id: "categories".into(),
|
||||
title: "Categories".into(),
|
||||
description: "Filter for different Networks".into(),
|
||||
systemImage: "list.dash".into(),
|
||||
colorName: "purple".into(),
|
||||
options: self.categories.read().map(|v| v.clone()).unwrap_or_default(),
|
||||
multiSelect: false,
|
||||
},
|
||||
ChannelOption {
|
||||
id: "stars".to_string(),
|
||||
title: "Stars".to_string(),
|
||||
description: "Filter for different Pornstars".to_string(),
|
||||
systemImage: "star.fill".to_string(),
|
||||
colorName: "yellow".to_string(),
|
||||
options: stars,
|
||||
id: "stars".into(),
|
||||
title: "Stars".into(),
|
||||
description: "Filter for different Pornstars".into(),
|
||||
systemImage: "star.fill".into(),
|
||||
colorName: "yellow".into(),
|
||||
options: self.stars.read().map(|v| v.clone()).unwrap_or_default(),
|
||||
multiSelect: false,
|
||||
},
|
||||
],
|
||||
@@ -252,7 +231,6 @@ impl BeegProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -263,7 +241,7 @@ impl BeegProvider {
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(json.clone());
|
||||
if !video_items.is_empty() {
|
||||
@@ -304,7 +282,7 @@ impl BeegProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let json: serde_json::Value = serde_json::from_str::<serde_json::Value>(&text).unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(json.clone());
|
||||
if !video_items.is_empty() {
|
||||
@@ -317,89 +295,60 @@ impl BeegProvider {
|
||||
}
|
||||
|
||||
fn get_video_items_from_html(&self, json: Value) -> Vec<VideoItem> {
|
||||
let mut items: Vec<VideoItem> = Vec::new();
|
||||
let video_items = match json.as_array(){
|
||||
Some(array) => array,
|
||||
let mut items = Vec::new();
|
||||
let array = match json.as_array() {
|
||||
Some(a) => a,
|
||||
None => return items,
|
||||
};
|
||||
for video in video_items {
|
||||
// println!("video: {}\n\n\n", serde_json::to_string_pretty(&video).unwrap());
|
||||
let file = match video.get("file"){
|
||||
|
||||
for video in array {
|
||||
let file = match video.get("file") { Some(v) => v, None => continue };
|
||||
let hls = match file.get("hls_resources") { Some(v) => v, None => continue };
|
||||
let key = match hls.get("fl_cdn_multi").and_then(|v| v.as_str()) {
|
||||
Some(v) => v,
|
||||
None => continue,
|
||||
};
|
||||
let hls_resources = match file.get("hls_resources"){
|
||||
Some(v) => v,
|
||||
None => continue,
|
||||
};
|
||||
let video_key = match hls_resources.get("fl_cdn_multi"){
|
||||
Some(v) => v,
|
||||
None => continue,
|
||||
};
|
||||
let video_url = format!(
|
||||
"https://video.externulls.com/{}",
|
||||
video_key.to_string().replace("\"","")
|
||||
);
|
||||
let data = match file.get("data") {
|
||||
Some(v) => v,
|
||||
None => continue,
|
||||
};
|
||||
let title = match data[0].get("cd_value") {
|
||||
Some(v) => decode(v.as_str().unwrap_or("").as_bytes()).to_string().unwrap_or(v.to_string()),
|
||||
None => "".to_string(),
|
||||
};
|
||||
let id = match file.get("id"){
|
||||
Some(v) => v.as_i64().unwrap_or(0).to_string(),
|
||||
None => title.clone(),
|
||||
};
|
||||
let fc_facts = match video.get("fc_facts") {
|
||||
Some(v) => v[0].clone(),
|
||||
None => continue,
|
||||
};
|
||||
let duration = match file.get("fl_duration") {
|
||||
Some(v) => parse_time_to_seconds(v.as_str().unwrap_or("0")).unwrap_or(0),
|
||||
None => 0,
|
||||
};
|
||||
let tags = match video.get("tags") {
|
||||
Some(v) => {
|
||||
// v should be an array of tag objects
|
||||
v.as_array()
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.map(|tag| {
|
||||
tag.get("tg_name")
|
||||
.and_then(|name| name.as_str())
|
||||
.unwrap_or("")
|
||||
.to_string()
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
None => Vec::new(),
|
||||
};
|
||||
|
||||
let id = file.get("id").and_then(|v| v.as_i64()).unwrap_or(0).to_string();
|
||||
let title = file
|
||||
.get("data")
|
||||
.and_then(|v| v.get(0))
|
||||
.and_then(|v| v.get("cd_value"))
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| decode(s.as_bytes()).to_string().unwrap_or_default())
|
||||
.unwrap_or_default();
|
||||
|
||||
let duration = file
|
||||
.get("fl_duration")
|
||||
.and_then(|v| v.as_u64())
|
||||
.unwrap_or(0);
|
||||
|
||||
let views = video
|
||||
.get("fc_facts")
|
||||
.and_then(|v| v.get(0))
|
||||
.and_then(|v| v.get("fc_st_views"))
|
||||
.and_then(|v| v.as_str())
|
||||
.and_then(|s| parse_abbreviated_number(s))
|
||||
.unwrap_or(0);
|
||||
|
||||
let thumb = format!("https://thumbs.externulls.com/videos/{}/0.webp?size=480x270", id);
|
||||
let views = match fc_facts.get("fc_st_views") {
|
||||
Some(v) => parse_abbreviated_number(v.as_str().unwrap_or("0")).unwrap_or(0),
|
||||
None => 0,
|
||||
};
|
||||
let mut video_item = VideoItem::new(
|
||||
|
||||
let mut item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url.to_string(),
|
||||
"beeg".to_string(),
|
||||
format!("https://video.externulls.com/{}", key),
|
||||
"beeg".into(),
|
||||
thumb,
|
||||
duration as u32,
|
||||
);
|
||||
|
||||
if views > 0 {
|
||||
video_item = video_item.views(views);
|
||||
item = item.views(views);
|
||||
}
|
||||
if !tags.is_empty() {
|
||||
video_item = video_item.tags(tags);
|
||||
}
|
||||
items.push(video_item);
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
return items;
|
||||
items
|
||||
}
|
||||
}
|
||||
|
||||
@@ -408,32 +357,26 @@ impl Provider for BeegProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
_pool: DbPool,
|
||||
_sort: String,
|
||||
_: DbPool,
|
||||
_: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
_per_page: String,
|
||||
_: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
|
||||
Some(q) => {
|
||||
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, options)
|
||||
.await
|
||||
}
|
||||
None => {
|
||||
self.get(cache, page.parse::<u8>().unwrap_or(1), options)
|
||||
.await
|
||||
}
|
||||
let page = page.parse::<u8>().unwrap_or(1);
|
||||
let result = match query {
|
||||
Some(q) => self.query(cache, page, &q, options).await,
|
||||
None => self.get(cache, page, options).await,
|
||||
};
|
||||
match videos {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
println!("Error fetching videos: {}", e);
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
result.unwrap_or_else(|e| {
|
||||
eprintln!("beeg provider error: {}", e);
|
||||
vec![]
|
||||
})
|
||||
}
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel {
|
||||
self.build_channel(clientversion)
|
||||
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
|
||||
Some(self.build_channel(clientversion))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -43,7 +43,6 @@ impl FreshpornoProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -56,7 +55,7 @@ impl FreshpornoProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -93,7 +92,7 @@ impl FreshpornoProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -193,11 +192,11 @@ impl Provider for FreshpornoProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_channel(&self,clientversion:ClientVersion) -> Channel {
|
||||
fn get_channel(&self,clientversion:ClientVersion) -> Option<Channel> {
|
||||
println!("Getting channel for placeholder with client version: {:?}",clientversion);
|
||||
let _ = clientversion;
|
||||
Channel {
|
||||
Some(Channel {
|
||||
id:"placeholder".to_string(),name:"PLACEHOLDER".to_string(),description:"PLACEHOLDER FOR PARENT CLASS".to_string(),premium:false,favicon:"https://www.google.com/s2/favicons?sz=64&domain=missav.ws".to_string(),status:"active".to_string(),categories:vec![],options:vec![],nsfw:true,cacheDuration:None,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -153,7 +153,7 @@ impl HanimeProvider {
|
||||
let url = format!("https://h.freeanimehentai.net/api/v8/video?id={}&", hit.slug);
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&url).await.unwrap();
|
||||
let text = requester.get(&url, None).await.unwrap();
|
||||
|
||||
let urls = text.split("\"servers\"").collect::<Vec<&str>>()[1];
|
||||
let mut url_vec = vec![];
|
||||
@@ -189,7 +189,7 @@ impl HanimeProvider {
|
||||
let old_items = match cache.get(&index) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 * 12 {
|
||||
println!("Cache hit for URL: {}", index);
|
||||
//println!("Cache hit for URL: {}", index);
|
||||
return Ok(items.clone());
|
||||
}
|
||||
else{
|
||||
@@ -208,7 +208,7 @@ impl HanimeProvider {
|
||||
.ordering(ordering);
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let response = requester.post("https://search.htv-services.com/search", &search, vec![]).await.unwrap();
|
||||
let response = requester.post_json("https://search.htv-services.com/search", &search, vec![]).await.unwrap();
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -44,7 +44,6 @@ impl HomoxxxProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
|
||||
483
src/providers/hqporner.rs
Normal file
483
src/providers/hqporner.rs
Normal file
@@ -0,0 +1,483 @@
|
||||
use crate::DbPool;
|
||||
use crate::api::ClientVersion;
|
||||
use crate::providers::Provider;
|
||||
use crate::status::*;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::{format_error_chain, send_discord_error_report};
|
||||
use crate::util::requester::Requester;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoFormat, VideoItem};
|
||||
use async_trait::async_trait;
|
||||
use error_chain::error_chain;
|
||||
use futures::future::join_all;
|
||||
use htmlentity::entity::{ICodedDataTrait, decode};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::thread::sleep;
|
||||
use std::{thread, vec};
|
||||
use titlecase::Titlecase;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
Json(serde_json::Error);
|
||||
}
|
||||
errors {
|
||||
Parse(msg: String) {
|
||||
description("parse error")
|
||||
display("parse error: {}", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HqpornerProvider {
|
||||
url: String,
|
||||
stars: Arc<RwLock<Vec<FilterOption>>>,
|
||||
categories: Arc<RwLock<Vec<FilterOption>>>,
|
||||
}
|
||||
|
||||
impl HqpornerProvider {
|
||||
pub fn new() -> Self {
|
||||
let provider = HqpornerProvider {
|
||||
url: "https://hqporner.com".to_string(),
|
||||
stars: Arc::new(RwLock::new(vec![])),
|
||||
categories: Arc::new(RwLock::new(vec![])),
|
||||
};
|
||||
provider.spawn_initial_load();
|
||||
provider
|
||||
}
|
||||
|
||||
fn spawn_initial_load(&self) {
|
||||
let url = self.url.clone();
|
||||
let stars = Arc::clone(&self.stars);
|
||||
let categories = Arc::clone(&self.categories);
|
||||
|
||||
thread::spawn(move || {
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build();
|
||||
|
||||
if let Ok(runtime) = rt {
|
||||
runtime.block_on(async move {
|
||||
if let Err(e) = Self::load_stars(&url, stars).await {
|
||||
eprintln!("load_stars failed: {e}");
|
||||
}
|
||||
if let Err(e) = Self::load_categories(&url, categories).await {
|
||||
eprintln!("load_categories failed: {e}");
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn load_stars(base_url: &str, stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
let mut requester = Requester::new();
|
||||
let text = requester
|
||||
.get(&format!("{}/girls", base_url), None)
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
|
||||
|
||||
let stars_div = text
|
||||
.split("<span>Girls</span>")
|
||||
.last()
|
||||
.and_then(|s| s.split("</ul>").next())
|
||||
.ok_or_else(|| Error::from("Could not find stars div"))?;
|
||||
|
||||
for stars_element in stars_div.split("<li ").skip(1) {
|
||||
let star_id = stars_element
|
||||
.split("href=\"/actress/")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.map(|s| s.to_string());
|
||||
|
||||
let star_name = stars_element
|
||||
.split("<a ")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('>').nth(1))
|
||||
.and_then(|s| s.split('<').next())
|
||||
.map(|s| s.to_string());
|
||||
|
||||
if let (Some(id), Some(name)) = (star_id, star_name) {
|
||||
Self::push_unique(&stars, FilterOption { id, title: name });
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn load_categories(
|
||||
base_url: &str,
|
||||
categories: Arc<RwLock<Vec<FilterOption>>>,
|
||||
) -> Result<()> {
|
||||
let mut requester = Requester::new();
|
||||
let text = requester
|
||||
.get(&format!("{}/categories", base_url), None)
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
|
||||
|
||||
let categories_div = text
|
||||
.split("<span>Categories</span>")
|
||||
.last()
|
||||
.and_then(|s| s.split("</ul>").next())
|
||||
.ok_or_else(|| Error::from("Could not find categories div"))?;
|
||||
|
||||
for categories_element in categories_div.split("<li ").skip(1) {
|
||||
let category_id = categories_element
|
||||
.split("href=\"/category/")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.map(|s| s.to_string());
|
||||
|
||||
let category_name = categories_element
|
||||
.split("<a ")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('>').nth(1))
|
||||
.and_then(|s| s.split('<').next())
|
||||
.map(|s| s.titlecase());
|
||||
|
||||
if let (Some(id), Some(name)) = (category_id, category_name) {
|
||||
Self::push_unique(&categories, FilterOption { id, title: name });
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
|
||||
Channel {
|
||||
id: "hqporner".to_string(),
|
||||
name: "HQPorner".to_string(),
|
||||
description: "HD Porn Videos Tube".to_string(),
|
||||
premium: false,
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=hqporner.com".to_string(),
|
||||
status: "active".to_string(),
|
||||
categories: self
|
||||
.categories
|
||||
.read()
|
||||
.map(|c| c.iter().map(|o| o.title.clone()).collect())
|
||||
.unwrap_or_default(),
|
||||
options: vec![],
|
||||
nsfw: true,
|
||||
cacheDuration: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
|
||||
if let Ok(mut vec) = target.write() {
|
||||
if !vec.iter().any(|x| x.id == item.id) {
|
||||
vec.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
_sort: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let video_url = format!("{}/hdporn/{}", self.url, page);
|
||||
if let Some((time, items)) = cache.get(&video_url) {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 300 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let mut requester = options.requester.clone().ok_or("No requester")?;
|
||||
let text = requester
|
||||
.get(&video_url, None)
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
|
||||
|
||||
let video_items = self.get_video_items_from_html(text, &mut requester).await;
|
||||
if !video_items.is_empty() {
|
||||
cache.insert(video_url, video_items.clone());
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn query(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
query: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let search_string = query.trim().to_lowercase();
|
||||
let mut video_url = format!("{}/?q={}&p={}", self.url, search_string, page);
|
||||
|
||||
if let Ok(stars) = self.stars.read() {
|
||||
if let Some(star) = stars
|
||||
.iter()
|
||||
.find(|s| s.title.to_lowercase() == search_string)
|
||||
{
|
||||
video_url = format!("{}/actress/{}/{}", self.url, star.id, page);
|
||||
}
|
||||
}
|
||||
if let Ok(cats) = self.categories.read() {
|
||||
if let Some(cat) = cats
|
||||
.iter()
|
||||
.find(|c| c.title.to_lowercase() == search_string)
|
||||
{
|
||||
video_url = format!("{}/category/{}/{}", self.url, cat.id, page);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((time, items)) = cache.get(&video_url) {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 300 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let mut requester = options.requester.clone().ok_or("No requester")?;
|
||||
let text = requester
|
||||
.get(&video_url, None)
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
|
||||
|
||||
let video_items = self.get_video_items_from_html(text, &mut requester).await;
|
||||
if !video_items.is_empty() {
|
||||
cache.insert(video_url, video_items.clone());
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn get_video_items_from_html(
|
||||
&self,
|
||||
html: String,
|
||||
requester: &mut Requester,
|
||||
) -> Vec<VideoItem> {
|
||||
if html.is_empty() || html.contains("404 Not Found") {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let raw_videos: Vec<String> = html
|
||||
.split("id=\"footer\"")
|
||||
.next()
|
||||
.and_then(|s| s.split("<section class=\"box features\">").nth(2))
|
||||
.map(|s| {
|
||||
s.split("<section class=\"box feature\">")
|
||||
.skip(1)
|
||||
.map(|v| v.to_string())
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
let futures = raw_videos
|
||||
.into_iter()
|
||||
.map(|el| self.get_video_item(el, requester.clone()));
|
||||
|
||||
join_all(futures)
|
||||
.await
|
||||
.into_iter()
|
||||
.inspect(|r| {
|
||||
if let Err(e) = r {
|
||||
let msg = e.to_string();
|
||||
let chain = format_error_chain(e);
|
||||
tokio::spawn(async move {
|
||||
let _ = send_discord_error_report(
|
||||
msg,
|
||||
Some(chain),
|
||||
Some("Hqporner Provider"),
|
||||
None,
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
)
|
||||
.await;
|
||||
});
|
||||
}
|
||||
})
|
||||
.filter_map(Result::ok)
|
||||
.filter(|item| !item.formats.clone().unwrap().is_empty())
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn get_video_item(&self, seg: String, mut requester: Requester) -> Result<VideoItem> {
|
||||
let video_url = format!(
|
||||
"{}{}",
|
||||
self.url,
|
||||
seg.split("<a href=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("url \n{seg}").into()))?
|
||||
);
|
||||
let title_raw = seg
|
||||
.split("<h3 class=\"meta-data-title\">")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('>').nth(1))
|
||||
.and_then(|s| s.split('<').next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("title \n{seg}").into()))?;
|
||||
let title = decode(title_raw.as_bytes())
|
||||
.to_string()
|
||||
.unwrap_or_else(|_| title_raw.to_string())
|
||||
.titlecase();
|
||||
|
||||
let id = video_url
|
||||
.split('/')
|
||||
.nth(4)
|
||||
.and_then(|s| s.split('.').next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("id \n{seg}").into()))?
|
||||
.to_string();
|
||||
let thumb = format!(
|
||||
"https:{}",
|
||||
seg.split("onmouseleave='defaultImage(\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("thumb \n{seg}").into()))?
|
||||
);
|
||||
let raw_duration = seg
|
||||
.split("<span class=\"icon fa-clock-o meta-data\">")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("s<").next())
|
||||
.map(|s| s.replace("m ", ":"))
|
||||
.unwrap_or_default();
|
||||
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
|
||||
|
||||
let (tags, formats) = self.extract_media(&video_url, &mut requester).await?;
|
||||
|
||||
Ok(
|
||||
VideoItem::new(id, title, video_url, "hqporner".into(), thumb, duration)
|
||||
.formats(formats)
|
||||
.tags(tags),
|
||||
)
|
||||
}
|
||||
|
||||
async fn extract_media(
|
||||
&self,
|
||||
url: &str,
|
||||
requester: &mut Requester,
|
||||
) -> Result<(Vec<String>, Vec<VideoFormat>)> {
|
||||
let mut formats = vec![];
|
||||
let mut tags = vec![];
|
||||
let resp = requester
|
||||
.get_raw_with_headers(
|
||||
url,
|
||||
vec![("Referer".to_string(), "https://hqporner.com/".into())],
|
||||
)
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("Request failed: {}", e)))?;
|
||||
let text = resp
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("Text conversion failed: {}", e)))?;
|
||||
|
||||
if text.contains("Why do I see it?") {
|
||||
return Ok((tags, formats));
|
||||
}
|
||||
|
||||
// Extract Stars & Tags
|
||||
if let Some(stars_block) = text
|
||||
.split("icon fa-star-o")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("</li>").next())
|
||||
{
|
||||
for star_el in stars_block.split("href=\"/actress/").skip(1) {
|
||||
let id = star_el.split('"').next().unwrap_or("").to_string();
|
||||
let name = star_el
|
||||
.split("\">")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
if !name.is_empty() {
|
||||
tags.push(name.clone());
|
||||
Self::push_unique(&self.stars, FilterOption { id, title: name });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Player / Video Extraction
|
||||
let player_url = format!(
|
||||
"https:{}",
|
||||
text.split("url: '/blocks/altplayer.php?i=")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('\'').next())
|
||||
.ok_or("No player link")?
|
||||
);
|
||||
let mut r = requester
|
||||
.get_raw_with_headers(
|
||||
&player_url,
|
||||
vec![("Referer".to_string(), "https://hqporner.com/".into())],
|
||||
).await;
|
||||
|
||||
if let Err(_e) = &r {
|
||||
sleep(std::time::Duration::from_secs(1));
|
||||
r = requester
|
||||
.get_raw_with_headers(
|
||||
&player_url,
|
||||
vec![("Referer".to_string(), "https://hqporner.com/".into())],
|
||||
).await;
|
||||
}
|
||||
let text2 = r
|
||||
.unwrap()
|
||||
.text()
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("Text conversion failed: {}", e)))?;
|
||||
|
||||
// Check for error response
|
||||
if text2.starts_with("ERR:"){
|
||||
return Ok((tags, formats));
|
||||
}
|
||||
|
||||
let video_element = text2
|
||||
.split("<video ")
|
||||
.nth(2)
|
||||
.and_then(|s| s.split("</video>").next())
|
||||
.ok_or(format!("No video element\n{player_url}\n{text2}"))?;
|
||||
for source in video_element.split("<source ").skip(1) {
|
||||
let title = source
|
||||
.split("title=\\\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("\\\"").next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let quality = title.split(' ').next().unwrap_or("HD").to_string();
|
||||
let media_url = format!(
|
||||
"https:{}",
|
||||
source
|
||||
.split("src=\\\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("\\\"").next())
|
||||
.unwrap_or("")
|
||||
);
|
||||
|
||||
formats.push(
|
||||
VideoFormat::new(media_url, quality, "mp4".into())
|
||||
.format_id(title.clone())
|
||||
.format_note(title),
|
||||
);
|
||||
}
|
||||
|
||||
Ok((tags, formats))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for HqpornerProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
_pool: DbPool,
|
||||
sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
_per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let page_num = page.parse::<u8>().unwrap_or(1);
|
||||
let res = match query {
|
||||
Some(q) => self.query(cache, page_num, &q, options).await,
|
||||
None => self.get(cache, page_num, &sort, options).await,
|
||||
};
|
||||
res.unwrap_or_else(|e| {
|
||||
eprintln!("Hqporner error: {e}");
|
||||
let _ = send_discord_error_report(e.to_string(), Some(format_error_chain(&e)), None, None, file!(), line!(), module_path!());
|
||||
vec![]
|
||||
})
|
||||
}
|
||||
|
||||
fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
|
||||
Some(self.build_channel(v))
|
||||
}
|
||||
}
|
||||
433
src/providers/hypnotube.rs
Normal file
433
src/providers/hypnotube.rs
Normal file
@@ -0,0 +1,433 @@
|
||||
use crate::DbPool;
|
||||
use crate::api::ClientVersion;
|
||||
use crate::providers::Provider;
|
||||
use crate::status::*;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::{format_error_chain, send_discord_error_report};
|
||||
use crate::util::requester::Requester;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoItem};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use error_chain::error_chain;
|
||||
use htmlentity::entity::{ICodedDataTrait, decode};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::{thread, vec};
|
||||
use titlecase::Titlecase;
|
||||
use wreq::Version;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
Json(serde_json::Error);
|
||||
}
|
||||
errors {
|
||||
Parse(msg: String) {
|
||||
description("parse error")
|
||||
display("parse error: {}", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct HypnotubeProvider {
|
||||
url: String,
|
||||
categories: Arc<RwLock<Vec<FilterOption>>>,
|
||||
}
|
||||
|
||||
impl HypnotubeProvider {
|
||||
pub fn new() -> Self {
|
||||
let provider = Self {
|
||||
url: "https://hypnotube.com".to_string(),
|
||||
categories: Arc::new(RwLock::new(vec![])),
|
||||
};
|
||||
provider.spawn_initial_load();
|
||||
provider
|
||||
}
|
||||
|
||||
fn spawn_initial_load(&self) {
|
||||
let url = self.url.clone();
|
||||
let categories = Arc::clone(&self.categories);
|
||||
|
||||
thread::spawn(async move || {
|
||||
let rt = match tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
{
|
||||
Ok(rt) => rt,
|
||||
Err(e) => {
|
||||
eprintln!("tokio runtime failed: {e}");
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("HypnoTube Provider"),
|
||||
Some("Failed to create tokio runtime"),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
)
|
||||
.await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
rt.block_on(async {
|
||||
if let Err(e) = Self::load_categories(&url, Arc::clone(&categories)).await {
|
||||
eprintln!("load_categories failed: {e}");
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("HypnoTube Provider"),
|
||||
Some("Failed to load categories during initial load"),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
)
|
||||
.await;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async fn load_categories(base: &str, cats: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
let mut requester = Requester::new();
|
||||
let text = requester
|
||||
.get(&format!("{base}/channels/"), Some(Version::HTTP_11))
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("{}", e)))?;
|
||||
|
||||
let block = text
|
||||
.split(" title END ")
|
||||
.last()
|
||||
.ok_or_else(|| ErrorKind::Parse("categories block".into()))?
|
||||
.split(" main END ")
|
||||
.next()
|
||||
.unwrap_or("");
|
||||
|
||||
for el in block.split("<!-- item -->").skip(1) {
|
||||
let id = el
|
||||
.split("<a href=\"https://hypnotube.com/channels/")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("/\"").next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("category id: {el}").into()))?
|
||||
.to_string();
|
||||
|
||||
let title = el
|
||||
.split("title=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("\"").next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("category title: {el}").into()))?
|
||||
.titlecase();
|
||||
|
||||
Self::push_unique(&cats, FilterOption { id, title });
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
let _ = clientversion;
|
||||
Channel {
|
||||
id: "hypnotube".to_string(),
|
||||
name: "Hypnotube".to_string(),
|
||||
description: "free video hypno tube for the sissy hypnosis porn fetish".to_string(),
|
||||
premium: false,
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=hypnotube.com".to_string(),
|
||||
status: "active".to_string(),
|
||||
categories: self
|
||||
.categories
|
||||
.read()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| c.title.clone())
|
||||
.collect(),
|
||||
options: vec![ChannelOption {
|
||||
id: "sort".to_string(),
|
||||
title: "Sort".to_string(),
|
||||
description: "Sort the Videos".to_string(),
|
||||
systemImage: "list.number".to_string(),
|
||||
colorName: "blue".to_string(),
|
||||
options: vec![
|
||||
FilterOption {
|
||||
id: "most recent".into(),
|
||||
title: "Most Recent".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "most viewed".into(),
|
||||
title: "Most Viewed".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "top rated".into(),
|
||||
title: "Top Rated".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "longest".into(),
|
||||
title: "Longest".into(),
|
||||
},
|
||||
],
|
||||
multiSelect: false,
|
||||
}],
|
||||
nsfw: true,
|
||||
cacheDuration: Some(1800),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
|
||||
if let Ok(mut vec) = target.write() {
|
||||
if !vec.iter().any(|x| x.id == item.id) {
|
||||
vec.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
sort: &str,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let sort_string = match sort {
|
||||
"top rated" => "top-rated",
|
||||
"most viewed" => "most-viewed",
|
||||
"longest" => "longest",
|
||||
_ => "videos",
|
||||
};
|
||||
let video_url = format!("{}/{}/page{}.html", self.url, sort_string, page);
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return items.clone();
|
||||
} else {
|
||||
items.clone()
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester
|
||||
.get(&video_url, Some(Version::HTTP_11))
|
||||
.await
|
||||
.unwrap();
|
||||
if text.contains("Sorry, no results were found.") {
|
||||
return vec![];
|
||||
}
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()).await;
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return old_items;
|
||||
}
|
||||
video_items
|
||||
}
|
||||
|
||||
async fn query(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
query: &str,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let sort_string = match options.sort.as_deref().unwrap_or("") {
|
||||
"top rated" => "rating",
|
||||
"most viewed" => "views",
|
||||
"longest" => "longest",
|
||||
_ => "newest",
|
||||
};
|
||||
let video_url = format!(
|
||||
"{}/search/videos/{}/{}/page{}.html",
|
||||
self.url,
|
||||
query.trim().replace(" ", "%20"),
|
||||
sort_string,
|
||||
page
|
||||
);
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return items.clone();
|
||||
} else {
|
||||
let _ = cache.check().await;
|
||||
return items.clone();
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = match requester
|
||||
.post(
|
||||
format!("{}/searchgate.php", self.url).as_str(),
|
||||
format!("q={}&type=videos", query.replace(" ", "+")).as_str(),
|
||||
vec![("Content-Type", "application/x-www-form-urlencoded")],
|
||||
)
|
||||
.await
|
||||
.unwrap()
|
||||
.text()
|
||||
.await
|
||||
{
|
||||
Ok(t) => t,
|
||||
Err(e) => {
|
||||
eprint!("Hypnotube search POST request failed: {}", e);
|
||||
return vec![];
|
||||
}
|
||||
};
|
||||
// println!("Hypnotube search POST response status: {}", p.text().await.unwrap_or_default());
|
||||
// let text = requester.get(&video_url, Some(Version::HTTP_11)).await.unwrap();
|
||||
if text.contains("Sorry, no results were found.") {
|
||||
return vec![];
|
||||
}
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone()).await;
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return old_items;
|
||||
}
|
||||
video_items
|
||||
}
|
||||
|
||||
async fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
|
||||
if html.is_empty() || html.contains("404 Not Found") {
|
||||
eprint!("Hypnotube returned empty or 404 html");
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let block = match html
|
||||
.split("pagination-col col pagination")
|
||||
.next()
|
||||
.and_then(|s| s.split(" title END ").last())
|
||||
{
|
||||
Some(b) => b,
|
||||
None => {
|
||||
eprint!("Hypnotube Provider: Failed to get block from html");
|
||||
let e = Error::from(ErrorKind::Parse("html".into()));
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("Hypnotube Provider"),
|
||||
Some(&format!("Failed to get block from html:\n```{html}\n```")),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
)
|
||||
.await;
|
||||
return vec![];
|
||||
}
|
||||
};
|
||||
let mut items = vec![];
|
||||
for seg in block.split("<!-- item -->").skip(1) {
|
||||
let video_url = match seg
|
||||
.split(" href=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
{
|
||||
Some(url) => url.to_string(),
|
||||
None => {
|
||||
eprint!("Hypnotube Provider: Failed to parse video url from segment");
|
||||
let e = Error::from(ErrorKind::Parse("video url".into()));
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("Hypnotube Provider"),
|
||||
Some(&format!(
|
||||
"Failed to parse video url from segment:\n```{seg}\n```"
|
||||
)),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
)
|
||||
.await;
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let mut title = seg
|
||||
.split(" title=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or_default()
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
title = decode(title.clone().as_bytes())
|
||||
.to_string()
|
||||
.unwrap_or(title)
|
||||
.titlecase();
|
||||
let id = video_url
|
||||
.split('/')
|
||||
.nth(4)
|
||||
.and_then(|s| s.split('.').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("video id".into()))
|
||||
.unwrap_or_else(|_| &title.as_str());
|
||||
let thumb = seg
|
||||
.split("<img ")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("src=\"").nth(1))
|
||||
.and_then(|s| s.split("\"").next())
|
||||
.ok_or_else(|| ErrorKind::Parse("thumb block".into()))
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let raw_duration = seg
|
||||
.split("<span class=\"time\">")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
|
||||
|
||||
let views = seg
|
||||
.split("<span class=\"icon i-eye\"></span>")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("span class=\"sub-desc\">").nth(1))
|
||||
.and_then(|s| s.split("<").next())
|
||||
.unwrap_or("0")
|
||||
.replace(",", "")
|
||||
.parse::<u32>()
|
||||
.unwrap_or(0);
|
||||
let video_item = VideoItem::new(
|
||||
id.to_owned(),
|
||||
title,
|
||||
video_url,
|
||||
"hypnotube".into(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
.views(views);
|
||||
items.push(video_item);
|
||||
}
|
||||
items
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for HypnotubeProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
_pool: DbPool,
|
||||
sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
_per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let page = page.parse::<u8>().unwrap_or(1);
|
||||
|
||||
let res = match query {
|
||||
Some(q) => self.to_owned().query(cache, page, &q, options).await,
|
||||
None => self.get(cache, page, &sort, options).await,
|
||||
};
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
|
||||
Some(self.build_channel(v))
|
||||
}
|
||||
}
|
||||
413
src/providers/javtiful.rs
Normal file
413
src/providers/javtiful.rs
Normal file
@@ -0,0 +1,413 @@
|
||||
use crate::DbPool;
|
||||
use crate::api::ClientVersion;
|
||||
use crate::providers::Provider;
|
||||
use crate::status::*;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::{format_error_chain, send_discord_error_report};
|
||||
use crate::util::requester::Requester;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoFormat, VideoItem};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use error_chain::error_chain;
|
||||
use futures::future::join_all;
|
||||
use htmlentity::entity::{decode, ICodedDataTrait};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::{vec};
|
||||
use titlecase::Titlecase;
|
||||
use wreq::Version;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
Json(serde_json::Error);
|
||||
}
|
||||
errors {
|
||||
Parse(msg: String) {
|
||||
description("parse error")
|
||||
display("parse error: {}", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JavtifulProvider {
|
||||
url: String,
|
||||
categories: Arc<RwLock<Vec<FilterOption>>>,
|
||||
}
|
||||
|
||||
impl JavtifulProvider {
|
||||
pub fn new() -> Self {
|
||||
let provider = Self {
|
||||
url: "https://javtiful.com".to_string(),
|
||||
categories: Arc::new(RwLock::new(vec![])),
|
||||
};
|
||||
provider
|
||||
}
|
||||
|
||||
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
let _ = clientversion;
|
||||
Channel {
|
||||
id: "javtiful".to_string(),
|
||||
name: "Javtiful".to_string(),
|
||||
description: "Watch Porn!".to_string(),
|
||||
premium: false,
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=javtiful.com".to_string(),
|
||||
status: "active".to_string(),
|
||||
categories: self
|
||||
.categories
|
||||
.read()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| c.title.clone())
|
||||
.collect(),
|
||||
options: vec![ChannelOption {
|
||||
id: "sort".to_string(),
|
||||
title: "Sort".to_string(),
|
||||
description: "Sort the Videos".to_string(),
|
||||
systemImage: "list.number".to_string(),
|
||||
colorName: "blue".to_string(),
|
||||
options: vec![
|
||||
FilterOption {
|
||||
id: "newest".into(),
|
||||
title: "Newest".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "top rated".into(),
|
||||
title: "Top Rated".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "most viewed".into(),
|
||||
title: "Most Viewed".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "top favorites".into(),
|
||||
title: "Top Favorites".into(),
|
||||
},
|
||||
],
|
||||
multiSelect: false,
|
||||
}],
|
||||
nsfw: true,
|
||||
cacheDuration: Some(1800),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
|
||||
if let Ok(mut vec) = target.write() {
|
||||
if !vec.iter().any(|x| x.id == item.id) {
|
||||
vec.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
sort: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let sort_string = match sort {
|
||||
"top rated" => "/sort=top_rated",
|
||||
"most viewed" => "/sort=most_viewed",
|
||||
_ => "",
|
||||
};
|
||||
let video_url = format!(
|
||||
"{}/videos{}?page={}",
|
||||
self.url, sort_string, page
|
||||
);
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url, Some(Version::HTTP_2)).await.unwrap();
|
||||
if page > 1 && !text.contains(&format!("<li class=\"page-item active\"><span class=\"page-link\">{}</span>", page)) {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
let video_items: Vec<VideoItem> = self
|
||||
.get_video_items_from_html(text.clone(), &mut requester)
|
||||
.await;
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn query(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
query: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let sort_string = match options.sort.as_deref().unwrap_or("") {
|
||||
"top rated" => "/sort=top_rated",
|
||||
"most viewed" => "/sort=most_viewed",
|
||||
_ => "",
|
||||
};
|
||||
let video_url = format!(
|
||||
"{}/search/videos{}?search_query={}&page={}",
|
||||
self.url, sort_string, query.replace(" ","+"), page
|
||||
);
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
let _ = cache.check().await;
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url, Some(Version::HTTP_2)).await.unwrap();
|
||||
if page > 1 && !text.contains(&format!("<li class=\"page-item active\"><span class=\"page-link\">{}</span>", page)) {
|
||||
return Ok(vec![]);
|
||||
}
|
||||
let video_items: Vec<VideoItem> = self
|
||||
.get_video_items_from_html(text.clone(), &mut requester)
|
||||
.await;
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn get_video_items_from_html(
|
||||
&self,
|
||||
html: String,
|
||||
requester: &mut Requester,
|
||||
) -> Vec<VideoItem> {
|
||||
if html.is_empty() || html.contains("404 Not Found") {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let block = match html
|
||||
.split("pagination ")
|
||||
.next()
|
||||
.and_then(|s| s.split("row row-cols-1 row-cols-sm-2 row-cols-lg-3 row-cols-xl-4").nth(1))
|
||||
{
|
||||
Some(b) => b,
|
||||
None => {
|
||||
eprint!("Javtiful Provider: Failed to get block from html");
|
||||
let e = Error::from(ErrorKind::Parse("html".into()));
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("Javtiful Provider"),
|
||||
Some(&format!("Failed to get block from html:\n```{html}\n```")),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
).await;
|
||||
return vec![]
|
||||
},
|
||||
};
|
||||
|
||||
let futures = block
|
||||
.split("card ")
|
||||
.skip(1)
|
||||
.filter(|seg| !seg.contains("SPONSOR"))
|
||||
.map(|el| self.get_video_item(el.to_string(), requester.clone()));
|
||||
|
||||
join_all(futures)
|
||||
.await
|
||||
.into_iter()
|
||||
.inspect(|r| {
|
||||
if let Err(e) = r {
|
||||
eprint!("Javtiful Provider: Failed to get video item:{}\n", e);
|
||||
// Prepare data to move into the background task
|
||||
let msg = e.to_string();
|
||||
let chain = format_error_chain(&e);
|
||||
|
||||
// Spawn the report into the background - NO .await here
|
||||
tokio::spawn(async move {
|
||||
let _ = send_discord_error_report(
|
||||
msg,
|
||||
Some(chain),
|
||||
Some("Javtiful Provider"),
|
||||
Some("Failed to get video item"),
|
||||
file!(), // Note: these might report the utility line
|
||||
line!(), // better to hardcode or pass from outside
|
||||
module_path!(),
|
||||
).await;
|
||||
});
|
||||
}
|
||||
})
|
||||
.filter_map(Result::ok)
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn get_video_item(
|
||||
&self,
|
||||
seg: String,
|
||||
mut requester: Requester,
|
||||
) -> Result<VideoItem> {
|
||||
let video_url = seg
|
||||
.split(" href=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("video url\n\n{seg}".into()))?
|
||||
.to_string();
|
||||
|
||||
let mut title = seg
|
||||
.split(" alt=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("video title\n\n{seg}").into()))?
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title).titlecase();
|
||||
let id = video_url
|
||||
.split('/')
|
||||
.nth(5)
|
||||
.and_then(|s| s.split('.').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("video id\n\n{seg}".into()))?
|
||||
.to_string();
|
||||
let thumb_block = seg
|
||||
.split("<img ")
|
||||
.nth(1)
|
||||
.ok_or_else(|| ErrorKind::Parse("thumb block\n\n{seg}".into()))?;
|
||||
|
||||
let thumb = thumb_block
|
||||
.split("data-src=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let mut preview = seg
|
||||
.split("data-trailer=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let raw_duration = seg
|
||||
.split("label-duration\">")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
|
||||
let (tags, formats, views) =
|
||||
self.extract_media(&video_url, &mut requester).await?;
|
||||
|
||||
if preview.len() == 0 {
|
||||
preview = format!("https://trailers.jav.si/preview/{id}.mp4");
|
||||
}
|
||||
let video_item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url,
|
||||
"javtiful".into(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
.formats(formats)
|
||||
.tags(tags)
|
||||
.preview(preview)
|
||||
.views(views);
|
||||
Ok(video_item)
|
||||
|
||||
}
|
||||
|
||||
async fn extract_media(
|
||||
&self,
|
||||
url: &str,
|
||||
requester: &mut Requester,
|
||||
) -> Result<(Vec<String>, Vec<VideoFormat>, u32)> {
|
||||
let text = requester
|
||||
.get(url, Some(Version::HTTP_2))
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("{}", e)))?;
|
||||
let tags = text.split("related-actress").next()
|
||||
.and_then(|s| s.split("video-comments").next())
|
||||
.and_then(|s| s.split(">Tags<").nth(1))
|
||||
.map(|tag_block| {
|
||||
tag_block
|
||||
.split("<a ")
|
||||
.skip(1)
|
||||
.filter_map(|tag_el| {
|
||||
tag_el
|
||||
.split('>')
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.map(|s| decode(s.as_bytes()).to_string().unwrap_or(s.to_string()).titlecase())
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.unwrap_or_else(|| vec![]);
|
||||
for tag in &tags {
|
||||
Self::push_unique(&self.categories, FilterOption {
|
||||
id: tag.to_ascii_lowercase().replace(" ","+"),
|
||||
title: tag.to_string(),
|
||||
});
|
||||
}
|
||||
let views = text.split(" Views ")
|
||||
.next()
|
||||
.and_then(|s| s.split(" ").last())
|
||||
.and_then(|s| s.replace(".","")
|
||||
.parse::<u32>().ok())
|
||||
.unwrap_or(0);
|
||||
|
||||
let quality="1080p".to_string();
|
||||
let video_url = url.replace("javtiful.com","hottub.spacemoehre.de/proxy/javtiful");
|
||||
Ok((
|
||||
tags,
|
||||
vec![VideoFormat::new(video_url, quality, "video/mp4".into())],
|
||||
views,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for JavtifulProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
_pool: DbPool,
|
||||
sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
_per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let page = page.parse::<u8>().unwrap_or(1);
|
||||
|
||||
let res = match query {
|
||||
Some(q) => self.to_owned().query(cache, page, &q, options).await,
|
||||
None => self.get(cache, page, &sort, options).await,
|
||||
};
|
||||
|
||||
res.unwrap_or_else(|e| {
|
||||
eprintln!("javtiful error: {e}");
|
||||
vec![]
|
||||
})
|
||||
}
|
||||
|
||||
fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
|
||||
Some(self.build_channel(v))
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,31 @@
|
||||
use std::vec;
|
||||
use async_trait::async_trait;
|
||||
use diesel::r2d2;
|
||||
use error_chain::error_chain;
|
||||
use htmlentity::entity::{decode, ICodedDataTrait};
|
||||
use futures::future::join_all;
|
||||
use wreq::Version;
|
||||
use crate::db;
|
||||
use crate::providers::Provider;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::{format_error_chain, send_discord_error_report};
|
||||
use crate::videos::ServerOptions;
|
||||
use crate::videos::{VideoItem};
|
||||
use crate::DbPool;
|
||||
use crate::util::requester::Requester;
|
||||
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
JsonError(serde_json::Error);
|
||||
Pool(r2d2::Error); // Assuming r2d2 or similar for pool
|
||||
}
|
||||
errors {
|
||||
ParsingError(t: String) {
|
||||
description("parsing error")
|
||||
display("Parsing error: '{}'", t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,259 +33,189 @@ error_chain! {
|
||||
pub struct MissavProvider {
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl MissavProvider {
|
||||
pub fn new() -> Self {
|
||||
MissavProvider {
|
||||
url: "https://missav.ws".to_string()
|
||||
}
|
||||
}
|
||||
async fn get(&self, cache:VideoCache, pool:DbPool, page: u8, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
||||
|
||||
// Extract needed fields from options at the start
|
||||
let language = options.language.clone().unwrap();
|
||||
let filter = options.filter.clone().unwrap();
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
async fn get(&self, cache: VideoCache, pool: DbPool, page: u8, mut sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
||||
// Use ok_or to avoid unwrapping options
|
||||
let language = options.language.as_ref().ok_or("Missing language")?;
|
||||
let filter = options.filter.as_ref().ok_or("Missing filter")?;
|
||||
let mut requester = options.requester.clone().ok_or("Missing requester")?;
|
||||
|
||||
let url_str = format!("{}/{}/{}?page={}&sort={}", self.url, language, filter, page, sort);
|
||||
if !sort.is_empty() {
|
||||
sort = format!("&sort={}", sort);
|
||||
}
|
||||
let url_str = format!("{}/{}/{}?page={}{}", self.url, language, filter, page, sort);
|
||||
|
||||
let old_items = match cache.get(&url_str) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
else{
|
||||
items.clone()
|
||||
}
|
||||
if let Some((time, items)) = cache.get(&url_str) {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 3600 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
let text = requester.get(&url_str).await.unwrap();
|
||||
// Pass a reference to options if needed, or reconstruct as needed
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), pool, requester).await;
|
||||
let text = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap_or_else(|e| {
|
||||
eprintln!("Error fetching Missav URL {}: {}", url_str, e);
|
||||
let _ = send_discord_error_report(e.to_string(), None, Some(&url_str), None, file!(), line!(), module_path!());
|
||||
"".to_string()
|
||||
});
|
||||
let video_items = self.get_video_items_from_html(text, pool, requester).await;
|
||||
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url_str);
|
||||
cache.insert(url_str.clone(), video_items.clone());
|
||||
} else{
|
||||
return Ok(old_items);
|
||||
cache.insert(url_str, video_items.clone());
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn query(&self, cache: VideoCache, pool:DbPool, page: u8, query: &str, sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
||||
// Extract needed fields from options at the start
|
||||
let language = options.language.clone().unwrap();
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
async fn query(&self, cache: VideoCache, pool: DbPool, page: u8, query: &str, mut sort: String, options: ServerOptions) -> Result<Vec<VideoItem>> {
|
||||
let language = options.language.as_ref().ok_or("Missing language")?;
|
||||
let mut requester = options.requester.clone().ok_or("Missing requester")?;
|
||||
|
||||
let search_string = query.replace(" ", "%20");
|
||||
let url_str = format!(
|
||||
"{}/{}/search/{}?page={}&sort={}",
|
||||
self.url, language, search_string, page, sort
|
||||
);
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&url_str) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
else{
|
||||
let _ = cache.check().await;
|
||||
return Ok(items.clone())
|
||||
}
|
||||
if !sort.is_empty() {
|
||||
sort = format!("&sort={}", sort);
|
||||
}
|
||||
let url_str = format!("{}/{}/search/{}?page={}{}", self.url, language, search_string, page, sort);
|
||||
|
||||
if let Some((time, items)) = cache.get(&url_str) {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 3600 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
let text = requester.get(&url_str).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), pool, requester).await;
|
||||
}
|
||||
|
||||
let text = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap_or_else(|e| {
|
||||
eprintln!("Error fetching Missav URL {}: {}", url_str, e);
|
||||
let _ = send_discord_error_report(e.to_string(), None, Some(&url_str), None, file!(), line!(), module_path!());
|
||||
"".to_string()
|
||||
});
|
||||
let video_items = self.get_video_items_from_html(text, pool, requester).await;
|
||||
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url_str);
|
||||
cache.insert(url_str.clone(), video_items.clone());
|
||||
} else{
|
||||
return Ok(old_items);
|
||||
cache.insert(url_str, video_items.clone());
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn get_video_items_from_html(&self, html: String, pool: DbPool, requester: Requester) -> Vec<VideoItem> {
|
||||
if html.is_empty() {
|
||||
println!("HTML is empty");
|
||||
return vec![];
|
||||
}
|
||||
let raw_videos = html
|
||||
.split("@mouseenter=\"setPreview(\'")
|
||||
.collect::<Vec<&str>>()[1..]
|
||||
.to_vec();
|
||||
let mut urls: Vec<String> = vec![];
|
||||
for video_segment in &raw_videos {
|
||||
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
|
||||
// for (index, line) in vid.iter().enumerate() {
|
||||
// println!("Line {}: {}", index, line.to_string().trim());
|
||||
// }
|
||||
|
||||
let url_str = video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
urls.push(url_str.clone());
|
||||
|
||||
}
|
||||
let futures = urls.into_iter().map(|el| self.get_video_item(el.clone(), pool.clone(), requester.clone()));
|
||||
let results: Vec<Result<VideoItem>> = join_all(futures).await;
|
||||
let video_items: Vec<VideoItem> = results
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.collect();
|
||||
if html.is_empty() { return vec![]; }
|
||||
|
||||
return video_items;
|
||||
let segments: Vec<&str> = html.split("@mouseenter=\"setPreview(\'").collect();
|
||||
if segments.len() < 2 { return vec![]; }
|
||||
|
||||
let mut urls = vec![];
|
||||
for video_segment in &segments[1..] {
|
||||
// Safer parsing: find start and end of href
|
||||
if let Some(start) = video_segment.find("<a href=\"") {
|
||||
let rest = &video_segment[start + 9..];
|
||||
if let Some(end) = rest.find('\"') {
|
||||
urls.push(rest[..end].to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let futures = urls.into_iter().map(|url| self.get_video_item(url, pool.clone(), requester.clone()));
|
||||
join_all(futures).await.into_iter().filter_map(Result::ok).collect()
|
||||
}
|
||||
|
||||
async fn get_video_item(&self, url_str: String, pool: DbPool, mut requester: Requester) -> Result<VideoItem> {
|
||||
let mut conn = pool.get().expect("couldn't get db connection from pool");
|
||||
let db_result = db::get_video(&mut conn,url_str.clone());
|
||||
match db_result {
|
||||
Ok(Some(entry)) => {
|
||||
let video_item: VideoItem = serde_json::from_str(entry.as_str()).unwrap();
|
||||
return Ok(video_item)
|
||||
}
|
||||
Ok(None) => {
|
||||
}
|
||||
Err(e) => {
|
||||
println!("Error fetching video from database: {}", e);
|
||||
// 1. Database Check
|
||||
{
|
||||
let mut conn = pool.get().map_err(|e| Error::from(format!("Pool error: {}", e)))?;
|
||||
if let Ok(Some(entry)) = db::get_video(&mut conn, url_str.clone()) {
|
||||
if let Ok(video_item) = serde_json::from_str::<VideoItem>(entry.as_str()) {
|
||||
return Ok(video_item);
|
||||
}
|
||||
}
|
||||
}
|
||||
drop(conn);
|
||||
let vid = requester.get(&url_str).await.unwrap();
|
||||
let mut title = vid.split("<meta property=\"og:title\" content=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0].trim()
|
||||
.to_string();
|
||||
|
||||
// 2. Fetch Page
|
||||
let vid = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap_or_else(|e| {
|
||||
eprintln!("Error fetching Missav URL {}: {}", url_str, e);
|
||||
let _ = send_discord_error_report(e.to_string(), None, Some(&url_str), None, file!(), line!(), module_path!());
|
||||
"".to_string()
|
||||
});
|
||||
|
||||
// Helper closure to extract content between two strings
|
||||
let extract = |html: &str, start_tag: &str, end_tag: &str| -> Option<String> {
|
||||
let start = html.find(start_tag)? + start_tag.len();
|
||||
let rest = &html[start..];
|
||||
let end = rest.find(end_tag)?;
|
||||
Some(rest[..end].to_string())
|
||||
};
|
||||
|
||||
let mut title = extract(&vid, "<meta property=\"og:title\" content=\"", "\"")
|
||||
.ok_or_else(|| ErrorKind::ParsingError(format!("title\n{:?}", vid)))?;
|
||||
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title);
|
||||
if url_str.contains("uncensored") {
|
||||
title = format!("[Uncensored] {}", title);
|
||||
}
|
||||
let thumb = vid.split("<meta property=\"og:image\" content=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
|
||||
let raw_duration = vid.split("<meta property=\"og:video:duration\" content=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
let duration = raw_duration.parse::<u32>().unwrap_or(0);
|
||||
let thumb = extract(&vid, "<meta property=\"og:image\" content=\"", "\"")
|
||||
.unwrap_or_default();
|
||||
|
||||
let id = url_str.split("/").collect::<Vec<&str>>().last().unwrap()
|
||||
.to_string();
|
||||
let duration = extract(&vid, "<meta property=\"og:video:duration\" content=\"", "\"")
|
||||
.and_then(|d| d.parse::<u32>().ok())
|
||||
.unwrap_or(0);
|
||||
|
||||
let id = url_str.split('/').last().ok_or("No ID found")?.to_string();
|
||||
|
||||
// 3. Extract Tags (Generic approach to avoid repetitive code)
|
||||
let mut tags = vec![];
|
||||
if vid.contains("<span>Actress:</span>"){
|
||||
for actress_snippet in vid.split("<span>Actress:</span>").collect::<Vec<&str>>()[1]
|
||||
.split("</div>").collect::<Vec<&str>>()[0].split("class=\"text-nord13 font-medium\">"){
|
||||
let tag = actress_snippet.split("<").collect::<Vec<&str>>()[0].trim()
|
||||
.to_string();
|
||||
if !tag.is_empty(){
|
||||
tags.push(format!("@actress:{}", tag));
|
||||
for (label, prefix) in [("Actress:", "@actress"), ("Actor:", "@actor"), ("Maker:", "@maker"), ("Genre:", "@genre")] {
|
||||
let marker = format!("<span>{}</span>", label);
|
||||
if let Some(section) = extract(&vid, &marker, "</div>") {
|
||||
for part in section.split("class=\"text-nord13 font-medium\">").skip(1) {
|
||||
if let Some(val) = part.split('<').next() {
|
||||
let clean = val.trim();
|
||||
if !clean.is_empty() {
|
||||
tags.push(format!("{}:{}", prefix, clean));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if vid.contains("<span>Actor:</span>"){
|
||||
for actor_snippet in vid.split("<span>Actor:</span>").collect::<Vec<&str>>()[1]
|
||||
.split("</div>").collect::<Vec<&str>>()[0].split("class=\"text-nord13 font-medium\">"){
|
||||
let tag = actor_snippet.split("<").collect::<Vec<&str>>()[0].trim()
|
||||
.to_string();
|
||||
if !tag.is_empty(){
|
||||
tags.push(format!("@actor:{}", tag));
|
||||
}
|
||||
}
|
||||
|
||||
// 4. Extract Video URL (The m3u8 logic)
|
||||
let video_url = (|| {
|
||||
let parts_str = vid.split("m3u8").nth(1)?.split("https").next()?;
|
||||
let mut parts: Vec<&str> = parts_str.split('|').collect();
|
||||
parts.reverse();
|
||||
if parts.len() < 8 { return None; }
|
||||
Some(format!("https://{}.{}/{}-{}-{}-{}-{}/playlist.m3u8",
|
||||
parts[1], parts[2], parts[3], parts[4], parts[5], parts[6], parts[7]))
|
||||
})().ok_or_else(|| ErrorKind::ParsingError(format!("video_url\n{:?}", vid).to_string()))?;
|
||||
|
||||
let video_item = VideoItem::new(id, title, video_url, "missav".to_string(), thumb, duration)
|
||||
.tags(tags)
|
||||
.preview(format!("https://fourhoi.com/{}/preview.mp4", url_str.split('/').last().unwrap_or_default()));
|
||||
|
||||
// 5. Cache to DB
|
||||
if let Ok(mut conn) = pool.get() {
|
||||
let _ = db::insert_video(&mut conn, &url_str, &serde_json::to_string(&video_item).unwrap_or_default());
|
||||
}
|
||||
|
||||
if vid.contains("<span>Maker:</span>"){
|
||||
for maker_snippet in vid.split("<span>Maker:</span>").collect::<Vec<&str>>()[1]
|
||||
.split("</div>").collect::<Vec<&str>>()[0]
|
||||
.split("class=\"text-nord13 font-medium\">"){
|
||||
let tag = maker_snippet.split("<").collect::<Vec<&str>>()[0].trim()
|
||||
.to_string();
|
||||
if !tag.is_empty(){
|
||||
tags.push(format!("@maker:{}", tag));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if vid.contains("<span>Genre:</span>"){
|
||||
for tag_snippet in vid.split("<span>Genre:</span>").collect::<Vec<&str>>()[1]
|
||||
.split("</div>").collect::<Vec<&str>>()[0].split("class=\"text-nord13 font-medium\">"){
|
||||
let tag = tag_snippet.split("<").collect::<Vec<&str>>()[0].trim()
|
||||
.to_string();
|
||||
if !tag.is_empty(){
|
||||
tags.push(format!("@genre:{}", tag));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let preview = format!("https://fourhoi.com/{}/preview.mp4",id.clone());
|
||||
|
||||
let mut video_url_parts = vid.split("m3u8").collect::<Vec<&str>>()[1]
|
||||
.split("https").collect::<Vec<&str>>()[0]
|
||||
.split("|").collect::<Vec<&str>>();
|
||||
video_url_parts.reverse();
|
||||
let video_url = format!("https://{}.{}/{}-{}-{}-{}-{}/playlist.m3u8",
|
||||
video_url_parts[1],
|
||||
video_url_parts[2],
|
||||
video_url_parts[3],
|
||||
video_url_parts[4],
|
||||
video_url_parts[5],
|
||||
video_url_parts[6],
|
||||
video_url_parts[7]
|
||||
);
|
||||
let video_item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url.clone(),
|
||||
"missav".to_string(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
.tags(tags)
|
||||
.preview(preview)
|
||||
;
|
||||
|
||||
let mut conn = pool.get().expect("couldn't get db connection from pool");
|
||||
let insert_result = db::insert_video(&mut conn, &url_str, &serde_json::to_string(&video_item)?);
|
||||
match insert_result{
|
||||
Ok(_) => (),
|
||||
Err(e) => {println!("{:?}", e); }
|
||||
}
|
||||
drop(conn);
|
||||
|
||||
return Ok(video_item);
|
||||
Ok(video_item)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for MissavProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
pool: DbPool,
|
||||
sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let _ = per_page;
|
||||
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
|
||||
Some(q) => self.query(cache, pool, page.parse::<u8>().unwrap_or(1), &q, sort, options).await,
|
||||
None => self.get(cache, pool, page.parse::<u8>().unwrap_or(1), sort, options).await,
|
||||
async fn get_videos(&self, cache: VideoCache, pool: DbPool, sort: String, query: Option<String>, page: String, _per_page: String, options: ServerOptions) -> Vec<VideoItem> {
|
||||
let page_num = page.parse::<u8>().unwrap_or(1);
|
||||
let result = match query {
|
||||
Some(q) => self.query(cache, pool, page_num, &q, sort, options).await,
|
||||
None => self.get(cache, pool, page_num, sort, options).await,
|
||||
};
|
||||
match videos {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
println!("Error fetching videos: {}", e);
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
result.unwrap_or_else(|e| {
|
||||
eprintln!("Error fetching videos: {}", e);
|
||||
let _ = send_discord_error_report(e.to_string(), Some(format_error_chain(&e)), None, None, file!(), line!(), module_path!());
|
||||
vec![]
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -36,6 +36,11 @@ pub mod tnaflix;
|
||||
pub mod pornxp;
|
||||
pub mod rule34gen;
|
||||
pub mod xxdbx;
|
||||
pub mod hqporner;
|
||||
pub mod noodlemagazine;
|
||||
pub mod pimpbunny;
|
||||
pub mod javtiful;
|
||||
pub mod hypnotube;
|
||||
|
||||
// convenient alias
|
||||
pub type DynProvider = Arc<dyn Provider>;
|
||||
@@ -48,6 +53,12 @@ pub static ALL_PROVIDERS: Lazy<HashMap<&'static str, DynProvider>> = Lazy::new(|
|
||||
m.insert("pornxp", Arc::new(pornxp::PornxpProvider::new()) as DynProvider);
|
||||
m.insert("rule34gen", Arc::new(rule34gen::Rule34genProvider::new()) as DynProvider);
|
||||
m.insert("xxdbx", Arc::new(xxdbx::XxdbxProvider::new()) as DynProvider);
|
||||
m.insert("hqporner", Arc::new(hqporner::HqpornerProvider::new()) as DynProvider);
|
||||
m.insert("pmvhaven", Arc::new(pmvhaven::PmvhavenProvider::new()) as DynProvider);
|
||||
m.insert("noodlemagazine", Arc::new(noodlemagazine::NoodlemagazineProvider::new()) as DynProvider);
|
||||
m.insert("pimpbunny", Arc::new(pimpbunny::PimpbunnyProvider::new()) as DynProvider);
|
||||
m.insert("javtiful", Arc::new(javtiful::JavtifulProvider::new()) as DynProvider);
|
||||
m.insert("hypnotube", Arc::new(hypnotube::HypnotubeProvider::new()) as DynProvider);
|
||||
// add more here as you migrate them
|
||||
m
|
||||
});
|
||||
@@ -71,13 +82,13 @@ pub trait Provider: Send + Sync {
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem>;
|
||||
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
|
||||
println!(
|
||||
"Getting channel for placeholder with client version: {:?}",
|
||||
clientversion
|
||||
);
|
||||
let _ = clientversion;
|
||||
Channel {
|
||||
Some(Channel {
|
||||
id: "placeholder".to_string(),
|
||||
name: "PLACEHOLDER".to_string(),
|
||||
description: "PLACEHOLDER FOR PARENT CLASS".to_string(),
|
||||
@@ -88,6 +99,6 @@ pub trait Provider: Send + Sync {
|
||||
options: vec![],
|
||||
nsfw: true,
|
||||
cacheDuration: None,
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
308
src/providers/noodlemagazine.rs
Normal file
308
src/providers/noodlemagazine.rs
Normal file
@@ -0,0 +1,308 @@
|
||||
use crate::DbPool;
|
||||
use crate::api::ClientVersion;
|
||||
use crate::providers::Provider;
|
||||
use crate::status::*;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::requester::Requester;
|
||||
use crate::util::parse_abbreviated_number;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoFormat, VideoItem};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use error_chain::error_chain;
|
||||
use futures::future::join_all;
|
||||
use htmlentity::entity::{decode, ICodedDataTrait};
|
||||
use wreq::Version;
|
||||
use titlecase::Titlecase;
|
||||
use std::vec;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
}
|
||||
errors {
|
||||
Parse(msg: String)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct NoodlemagazineProvider {
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl NoodlemagazineProvider {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
url: "https://noodlemagazine.com".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn build_channel(&self, _clientversion: ClientVersion) -> Channel {
|
||||
Channel {
|
||||
id: "noodlemagazine".into(),
|
||||
name: "Noodlemagazine".into(),
|
||||
description: "The Best Search Engine of HD Videos".into(),
|
||||
premium: false,
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=noodlemagazine.com".into(),
|
||||
status: "active".into(),
|
||||
categories: vec![],
|
||||
options: vec![],
|
||||
nsfw: true,
|
||||
cacheDuration: Some(1800),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
_sort: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let video_url = format!(
|
||||
"{}/popular/recent?sort_by=views&sort_order=desc&p={}",
|
||||
self.url,
|
||||
page.saturating_sub(1)
|
||||
);
|
||||
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((t, i)) if t.elapsed().unwrap_or_default().as_secs() < 300 => return Ok(i.clone()),
|
||||
Some((_, i)) => i.clone(),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
let mut requester = match options.requester.clone() {
|
||||
Some(r) => r,
|
||||
None => return Ok(old_items),
|
||||
};
|
||||
|
||||
let text = requester
|
||||
.get(&video_url, Some(Version::HTTP_2))
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let items = self.get_video_items_from_html(text, requester).await;
|
||||
|
||||
if items.is_empty() {
|
||||
Ok(old_items)
|
||||
} else {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url, items.clone());
|
||||
Ok(items)
|
||||
}
|
||||
}
|
||||
|
||||
async fn query(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
query: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let q = query.trim().replace(' ', "%20");
|
||||
let video_url = format!("{}/video/{}?p={}", self.url, q, page.saturating_sub(1));
|
||||
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((t, i)) if t.elapsed().unwrap_or_default().as_secs() < 300 => return Ok(i.clone()),
|
||||
Some((_, i)) => i.clone(),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
let mut requester = match options.requester.clone() {
|
||||
Some(r) => r,
|
||||
None => return Ok(old_items),
|
||||
};
|
||||
|
||||
let text = requester
|
||||
.get(&video_url, Some(Version::HTTP_2))
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let items = self.get_video_items_from_html(text, requester).await;
|
||||
|
||||
if items.is_empty() {
|
||||
Ok(old_items)
|
||||
} else {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url, items.clone());
|
||||
Ok(items)
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_video_items_from_html(
|
||||
&self,
|
||||
html: String,
|
||||
requester: Requester,
|
||||
) -> Vec<VideoItem> {
|
||||
if html.is_empty() || html.contains("404 Not Found") {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let section = match html.split(">Show more</div>").next() {
|
||||
Some(s) => s,
|
||||
None => return vec![],
|
||||
};
|
||||
|
||||
let list = match section
|
||||
.split("<div class=\"list_videos\" id=\"list_videos\">")
|
||||
.nth(1)
|
||||
{
|
||||
Some(l) => l,
|
||||
None => return vec![],
|
||||
};
|
||||
|
||||
let raw_videos = list
|
||||
.split("<div class=\"item\">")
|
||||
.skip(1)
|
||||
.map(|s| s.to_string());
|
||||
|
||||
let futures = raw_videos.map(|v| self.get_video_item(v, requester.clone()));
|
||||
let results = join_all(futures).await;
|
||||
|
||||
results.into_iter().filter_map(Result::ok).collect()
|
||||
}
|
||||
|
||||
async fn get_video_item(
|
||||
&self,
|
||||
video_segment: String,
|
||||
requester: Requester,
|
||||
) -> Result<VideoItem> {
|
||||
let href = video_segment
|
||||
.split("<a href=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.ok_or_else(|| Error::from("missing href"))?;
|
||||
|
||||
let video_url = format!("{}{}", self.url, href);
|
||||
|
||||
let mut title = video_segment
|
||||
.split("<div class=\"title\">")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.unwrap_or("")
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
title = decode(title.as_bytes())
|
||||
.to_string()
|
||||
.unwrap_or(title)
|
||||
.titlecase();
|
||||
|
||||
let id = video_url
|
||||
.split('/')
|
||||
.nth(4)
|
||||
.and_then(|s| s.split('.').next())
|
||||
.ok_or_else(|| Error::from("missing id"))?
|
||||
.to_string();
|
||||
|
||||
let thumb = video_segment
|
||||
.split("data-src=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let raw_duration = video_segment
|
||||
.split("#clock-o\"></use></svg>")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.unwrap_or("0:00");
|
||||
|
||||
let duration = parse_time_to_seconds(raw_duration).unwrap_or(0) as u32;
|
||||
|
||||
let views = video_segment
|
||||
.split("#eye\"></use></svg>")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.and_then(|v| parse_abbreviated_number(v.trim()))
|
||||
.unwrap_or(0);
|
||||
|
||||
let formats = self
|
||||
.extract_media(&video_url, requester)
|
||||
.await
|
||||
.ok_or_else(|| Error::from("media extraction failed"))?;
|
||||
|
||||
Ok(
|
||||
VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url,
|
||||
"noodlemagazine".into(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
.views(views)
|
||||
.formats(formats),
|
||||
)
|
||||
}
|
||||
|
||||
async fn extract_media(
|
||||
&self,
|
||||
video_url: &String,
|
||||
mut requester: Requester,
|
||||
) -> Option<Vec<VideoFormat>> {
|
||||
let text = requester
|
||||
.get(video_url, Some(Version::HTTP_2))
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
|
||||
let json_str = text
|
||||
.split("window.playlist = ")
|
||||
.nth(1)?
|
||||
.split(';')
|
||||
.next()?;
|
||||
|
||||
let json: serde_json::Value = serde_json::from_str(json_str).ok()?;
|
||||
let sources = json["sources"].as_array()?;
|
||||
|
||||
let mut formats = vec![];
|
||||
|
||||
for s in sources {
|
||||
let file = s["file"].as_str()?.to_string();
|
||||
let label = s["label"].as_str().unwrap_or("unknown").to_string();
|
||||
|
||||
formats.push(
|
||||
VideoFormat::new(file, label.clone(), "video/mp4".into())
|
||||
.format_id(label.clone())
|
||||
.format_note(label.clone())
|
||||
.http_header("Referer".into(), video_url.clone()),
|
||||
);
|
||||
}
|
||||
|
||||
Some(formats.into_iter().rev().collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for NoodlemagazineProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
pool: DbPool,
|
||||
sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let _ = pool;
|
||||
let _ = per_page;
|
||||
|
||||
let page = page.parse::<u8>().unwrap_or(1);
|
||||
|
||||
let res = match query {
|
||||
Some(q) => self.query(cache, page, &q, options).await,
|
||||
None => self.get(cache, page, &sort, options).await,
|
||||
};
|
||||
|
||||
res.unwrap_or_else(|e| {
|
||||
eprintln!("Noodlemagazine error: {e}");
|
||||
vec![]
|
||||
})
|
||||
}
|
||||
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
|
||||
Some(self.build_channel(clientversion))
|
||||
}
|
||||
}
|
||||
@@ -44,7 +44,6 @@ impl OkpornProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
|
||||
@@ -45,7 +45,6 @@ impl OkxxxProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
|
||||
@@ -84,24 +84,31 @@ impl OmgxxxProvider {
|
||||
let mut requester = util::requester::Requester::new();
|
||||
for page in [1..10].into_iter().flatten() {
|
||||
let text = requester
|
||||
.get(format!("{}/models/total-videos/{}/?gender_id=0", &base_url, page).as_str())
|
||||
.get(
|
||||
format!("{}/models/total-videos/{}/?gender_id=0", &base_url, page).as_str(),
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
if text.contains("404 Not Found") || text.is_empty() {
|
||||
break;
|
||||
}
|
||||
let stars_div = text
|
||||
.split("id=\"list_models_models_list_items\"")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split("class=\"pagination\"")
|
||||
.split("<div class=\"list-models\">")
|
||||
.collect::<Vec<&str>>()
|
||||
.last()
|
||||
.unwrap()
|
||||
.split("custom_list_models_models_list_pagination")
|
||||
.collect::<Vec<&str>>()[0];
|
||||
for stars_element in stars_div.split("<a ").collect::<Vec<&str>>()[1..].to_vec() {
|
||||
let star_url = stars_element.split("href=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0];
|
||||
let star_id = star_url.split("/").collect::<Vec<&str>>()[4].to_string();
|
||||
let star_name = stars_element.split("title=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
let star_name = stars_element
|
||||
.split("<strong class=\"title\">")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
Self::push_unique(
|
||||
@@ -122,7 +129,7 @@ impl OmgxxxProvider {
|
||||
loop {
|
||||
page += 1;
|
||||
let text = requester
|
||||
.get(format!("{}/sites/{}/", &base_url, page).as_str())
|
||||
.get(format!("{}/sites/{}/", &base_url, page).as_str(), None)
|
||||
.await
|
||||
.unwrap();
|
||||
if text.contains("404 Not Found") || text.is_empty() {
|
||||
@@ -158,7 +165,7 @@ impl OmgxxxProvider {
|
||||
|
||||
async fn load_networks(base_url: &str, networks: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
let mut requester = util::requester::Requester::new();
|
||||
let text = requester.get(&base_url).await.unwrap();
|
||||
let text = requester.get(&base_url, None).await.unwrap();
|
||||
let networks_div = text.split("class=\"sites__list\"").collect::<Vec<&str>>()[1]
|
||||
.split("</div>")
|
||||
.collect::<Vec<&str>>()[0];
|
||||
@@ -333,7 +340,6 @@ impl OmgxxxProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -345,7 +351,7 @@ impl OmgxxxProvider {
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -364,18 +370,35 @@ impl OmgxxxProvider {
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let mut search_type = "search";
|
||||
let mut search_string = query.to_string().to_lowercase().trim().replace(" ", "-");
|
||||
if query.starts_with("@") {
|
||||
search_type = query.split(":").collect::<Vec<&str>>()[0].trim_start_matches("@");
|
||||
search_string = search_string.split(":").collect::<Vec<&str>>()[1].to_string();
|
||||
let mut search_string = query.to_string().to_ascii_lowercase().trim().to_string();
|
||||
match self
|
||||
.stars
|
||||
.read()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.find(|s| s.title.to_ascii_lowercase() == search_string)
|
||||
{
|
||||
Some(star) => {
|
||||
search_type = "models";
|
||||
search_string = star.id.clone();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let video_url = format!(
|
||||
"{}/{}/{}/{}/",
|
||||
self.url,
|
||||
search_type,
|
||||
search_string,
|
||||
page
|
||||
);
|
||||
match self
|
||||
.sites
|
||||
.read()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.find(|s| s.title.to_ascii_lowercase() == search_string)
|
||||
{
|
||||
Some(site) => {
|
||||
search_type = "sites";
|
||||
search_string = site.id.clone();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
let mut video_url = format!("{}/{}/{}/{}/", self.url, search_type, search_string, page);
|
||||
video_url = video_url.replace(" ", "+");
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
@@ -393,7 +416,7 @@ impl OmgxxxProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -517,17 +540,36 @@ impl OmgxxxProvider {
|
||||
.collect::<Vec<&str>>()[1..]
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
format!(
|
||||
"@models:{}",
|
||||
s.split("/").collect::<Vec<&str>>()[4].to_string()
|
||||
)
|
||||
Self::push_unique(
|
||||
&self.stars,
|
||||
FilterOption {
|
||||
id: s.split("/").collect::<Vec<&str>>()[4].to_string(),
|
||||
title: s.split(">").collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.trim()
|
||||
.to_string(),
|
||||
},
|
||||
);
|
||||
s.split(">").collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.trim()
|
||||
.to_string()
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.to_vec(),
|
||||
false => vec![],
|
||||
};
|
||||
if !site_id.is_empty() {
|
||||
tags.push(format!("@sites:{}", site_id));
|
||||
Self::push_unique(
|
||||
&self.sites,
|
||||
FilterOption {
|
||||
id: site_id,
|
||||
title: site_name.to_string(),
|
||||
},
|
||||
);
|
||||
tags.push(site_name.to_string());
|
||||
}
|
||||
|
||||
let video_item = VideoItem::new(
|
||||
@@ -579,7 +621,7 @@ impl Provider for OmgxxxProvider {
|
||||
}
|
||||
}
|
||||
}
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel {
|
||||
self.build_channel(clientversion)
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
|
||||
Some(self.build_channel(clientversion))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ impl ParadisehillProvider {
|
||||
}
|
||||
};
|
||||
|
||||
let text = requester.get(&url_str).await.unwrap();
|
||||
let text = requester.get(&url_str, None).await.unwrap();
|
||||
// Pass a reference to options if needed, or reconstruct as needed
|
||||
let video_items: Vec<VideoItem> = self
|
||||
.get_video_items_from_html(text.clone(), requester)
|
||||
@@ -93,7 +93,7 @@ impl ParadisehillProvider {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
let text = requester.get(&url_str).await.unwrap();
|
||||
let text = requester.get(&url_str, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self
|
||||
.get_video_items_from_html(text.clone(), requester)
|
||||
.await;
|
||||
@@ -143,7 +143,7 @@ impl ParadisehillProvider {
|
||||
}
|
||||
|
||||
async fn get_video_item(&self, url_str: String, mut requester: Requester) -> Result<VideoItem> {
|
||||
let vid = requester.get(&url_str).await.unwrap();
|
||||
let vid = requester.get(&url_str, None).await.unwrap();
|
||||
let mut title = vid
|
||||
.split("<meta property=\"og:title\" content=\"")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
|
||||
@@ -45,7 +45,6 @@ impl PerfectgirlsProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
|
||||
@@ -11,6 +11,7 @@ use futures::future::join_all;
|
||||
use htmlentity::entity::{ICodedDataTrait, decode};
|
||||
use serde::Deserialize;
|
||||
use serde::Serialize;
|
||||
use wreq::Version;
|
||||
use std::vec;
|
||||
use wreq::Client;
|
||||
use wreq_util::Emulation;
|
||||
@@ -59,7 +60,7 @@ impl PerverzijaProvider {
|
||||
let old_items = match cache.get(&url_str) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 60 {
|
||||
println!("Cache hit for URL: {}", url_str);
|
||||
//println!("Cache hit for URL: {}", url_str);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -71,7 +72,7 @@ impl PerverzijaProvider {
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&url_str).await.unwrap();
|
||||
let text = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(), pool);
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url_str);
|
||||
@@ -122,7 +123,7 @@ impl PerverzijaProvider {
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&url_str).await.unwrap();
|
||||
let text = requester.get(&url_str, Some(Version::HTTP_2)).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = match query_parse {
|
||||
true => {
|
||||
self.get_video_items_from_html_query(text.clone(), pool)
|
||||
|
||||
537
src/providers/pimpbunny.rs
Normal file
537
src/providers/pimpbunny.rs
Normal file
@@ -0,0 +1,537 @@
|
||||
use crate::DbPool;
|
||||
use crate::api::ClientVersion;
|
||||
use crate::providers::Provider;
|
||||
use crate::status::*;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::{format_error_chain, send_discord_error_report};
|
||||
use crate::util::requester::Requester;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoFormat, VideoItem};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use error_chain::error_chain;
|
||||
use futures::future::join_all;
|
||||
use htmlentity::entity::{decode, ICodedDataTrait};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::{thread, vec};
|
||||
use titlecase::Titlecase;
|
||||
use wreq::Version;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
Json(serde_json::Error);
|
||||
}
|
||||
errors {
|
||||
Parse(msg: String) {
|
||||
description("parse error")
|
||||
display("parse error: {}", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PimpbunnyProvider {
|
||||
url: String,
|
||||
stars: Arc<RwLock<Vec<FilterOption>>>,
|
||||
categories: Arc<RwLock<Vec<FilterOption>>>,
|
||||
}
|
||||
|
||||
impl PimpbunnyProvider {
|
||||
pub fn new() -> Self {
|
||||
let provider = Self {
|
||||
url: "https://pimpbunny.com".to_string(),
|
||||
stars: Arc::new(RwLock::new(vec![])),
|
||||
categories: Arc::new(RwLock::new(vec![])),
|
||||
};
|
||||
provider.spawn_initial_load();
|
||||
provider
|
||||
}
|
||||
|
||||
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
let _ = clientversion;
|
||||
Channel {
|
||||
id: "pimpbunny".to_string(),
|
||||
name: "Pimpbunny".to_string(),
|
||||
description: "Watch Porn!".to_string(),
|
||||
premium: false,
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=pimpbunny.com".to_string(),
|
||||
status: "active".to_string(),
|
||||
categories: self
|
||||
.categories
|
||||
.read()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|c| c.title.clone())
|
||||
.collect(),
|
||||
options: vec![ChannelOption {
|
||||
id: "sort".to_string(),
|
||||
title: "Sort".to_string(),
|
||||
description: "Sort the Videos".to_string(),
|
||||
systemImage: "list.number".to_string(),
|
||||
colorName: "blue".to_string(),
|
||||
options: vec![
|
||||
FilterOption {
|
||||
id: "featured".into(),
|
||||
title: "Featured".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "most recent".into(),
|
||||
title: "Most Recent".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "most viewed".into(),
|
||||
title: "Most Viewed".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "best rated".into(),
|
||||
title: "Best Rated".into(),
|
||||
},
|
||||
],
|
||||
multiSelect: false,
|
||||
}],
|
||||
nsfw: true,
|
||||
cacheDuration: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn spawn_initial_load(&self) {
|
||||
let url = self.url.clone();
|
||||
let stars = Arc::clone(&self.stars);
|
||||
let categories = Arc::clone(&self.categories);
|
||||
|
||||
thread::spawn(async move || {
|
||||
let rt = match tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
{
|
||||
Ok(rt) => rt,
|
||||
Err(e) => {
|
||||
eprintln!("tokio runtime failed: {e}");
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("Pimpbunny Provider"),
|
||||
Some("Failed to create tokio runtime"),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
).await;
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
rt.block_on(async {
|
||||
if let Err(e) = Self::load_stars(&url, Arc::clone(&stars)).await {
|
||||
eprintln!("load_stars failed: {e}");
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("Pimpbunny Provider"),
|
||||
Some("Failed to load stars during initial load"),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
).await;
|
||||
}
|
||||
if let Err(e) = Self::load_categories(&url, Arc::clone(&categories)).await {
|
||||
eprintln!("load_categories failed: {e}");
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("Pimpbunny Provider"),
|
||||
Some("Failed to load categories during initial load"),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
).await;
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn push_unique(target: &Arc<RwLock<Vec<FilterOption>>>, item: FilterOption) {
|
||||
if let Ok(mut vec) = target.write() {
|
||||
if !vec.iter().any(|x| x.id == item.id) {
|
||||
vec.push(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn load_stars(base: &str, stars: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
let mut requester = Requester::new();
|
||||
let text = requester
|
||||
.get(
|
||||
&format!("{base}/onlyfans-models/?models_per_page=20"),
|
||||
Some(Version::HTTP_2),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("{}", e)))?;
|
||||
|
||||
let block = text
|
||||
.split("vt_list_models_with_advertising_custom_models_list_items")
|
||||
.last()
|
||||
.ok_or_else(|| ErrorKind::Parse("missing stars block".into()))?
|
||||
.split("pb-page-description")
|
||||
.next()
|
||||
.unwrap_or("");
|
||||
|
||||
for el in block.split("<div class=\"col\">").skip(1) {
|
||||
if el.contains("pb-promoted-link") || !el.contains("href=\"https://pimpbunny.com/onlyfans-models/") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let id = el
|
||||
.split("href=\"https://pimpbunny.com/onlyfans-models/")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("/\"").next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("star id: {el}").into()))?
|
||||
.to_string();
|
||||
|
||||
let title = el
|
||||
.split("ui-card-title")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("star title: {el}").into()))?
|
||||
.to_string();
|
||||
|
||||
Self::push_unique(&stars, FilterOption { id, title });
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn load_categories(base: &str, cats: Arc<RwLock<Vec<FilterOption>>>) -> Result<()> {
|
||||
let mut requester = Requester::new();
|
||||
let text = requester
|
||||
.get(
|
||||
&format!("{base}/categories/?items_per_page=120"),
|
||||
Some(Version::HTTP_2),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("{}", e)))?;
|
||||
|
||||
let block = text
|
||||
.split("list_categories_categories_list_items")
|
||||
.last()
|
||||
.ok_or_else(|| ErrorKind::Parse("categories block".into()))?
|
||||
.split("pb-pagination-wrapper")
|
||||
.next()
|
||||
.unwrap_or("");
|
||||
|
||||
for el in block.split("<div class=\"col\">").skip(1) {
|
||||
let id = el
|
||||
.split("href=\"https://pimpbunny.com/categories/")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("/\"").next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("category id: {el}").into()))?
|
||||
.to_string();
|
||||
|
||||
let title = el
|
||||
.split("ui-heading-h3")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('<').next())
|
||||
.ok_or_else(|| ErrorKind::Parse(format!("category title: {el}").into()))?
|
||||
.titlecase();
|
||||
|
||||
Self::push_unique(&cats, FilterOption { id, title });
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
sort: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let sort_string = match sort {
|
||||
"best rated" => "&sort_by=rating",
|
||||
"most viewed" => "&sort_by=video_viewed",
|
||||
_ => "&sort_by=post_date",
|
||||
};
|
||||
let video_url = format!(
|
||||
"{}/videos/{}/?videos_per_page=20{}",
|
||||
self.url, page, sort_string
|
||||
);
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url, Some(Version::HTTP_11)).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self
|
||||
.get_video_items_from_html(text.clone(), &mut requester)
|
||||
.await;
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn query(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
query: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let search_string = query.trim().to_string();
|
||||
|
||||
let mut video_url = format!(
|
||||
"{}/search/{}/?mode=async&function=get_block&block_id=list_videos_videos_list_search_result&videos_per_page=20&from_videos={}",
|
||||
self.url, search_string.replace(" ","-"), page
|
||||
);
|
||||
|
||||
let sort_string = match options.sort.as_deref().unwrap_or("") {
|
||||
"best rated" => "&sort_by=rating",
|
||||
"most viewed" => "&sort_by=video_viewed",
|
||||
_ => "&sort_by=post_date",
|
||||
};
|
||||
if let Some(star) = self
|
||||
.stars
|
||||
.read()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.find(|s| s.title.to_ascii_lowercase() == search_string.to_ascii_lowercase())
|
||||
{
|
||||
video_url = format!(
|
||||
"{}/onlyfans-models/{}/{}/?videos_per_page=20{}",
|
||||
self.url, star.id, page, sort_string
|
||||
);
|
||||
}
|
||||
if let Some(cat) = self
|
||||
.categories
|
||||
.read()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.find(|c| c.title.to_ascii_lowercase() == search_string.to_ascii_lowercase())
|
||||
{
|
||||
video_url = format!(
|
||||
"{}/categories/{}/{}/?videos_per_page=20{}",
|
||||
self.url, cat.id, page, sort_string
|
||||
);
|
||||
}
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
let _ = cache.check().await;
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url, Some(Version::HTTP_2)).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self
|
||||
.get_video_items_from_html(text.clone(), &mut requester)
|
||||
.await;
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn get_video_items_from_html(
|
||||
&self,
|
||||
html: String,
|
||||
requester: &mut Requester,
|
||||
) -> Vec<VideoItem> {
|
||||
if html.is_empty() || html.contains("404 Not Found") {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let block = match html
|
||||
.split("-pagination-wrapper")
|
||||
.next()
|
||||
.and_then(|s| s.split("video_list").nth(2))
|
||||
{
|
||||
Some(b) => b,
|
||||
None => return vec![],
|
||||
};
|
||||
|
||||
let futures = block
|
||||
.split("<div class=\"col\">")
|
||||
.skip(1)
|
||||
.map(|el| self.get_video_item(el.to_string(), requester.clone()));
|
||||
|
||||
join_all(futures)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.collect()
|
||||
}
|
||||
|
||||
async fn get_video_item(
|
||||
&self,
|
||||
seg: String,
|
||||
mut requester: Requester,
|
||||
) -> Result<VideoItem> {
|
||||
let video_url = seg
|
||||
.split(" href=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("video url".into()))?
|
||||
.to_string();
|
||||
|
||||
let mut title = seg
|
||||
.split("card-title")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('>').nth(1))
|
||||
.and_then(|s| s.split('<').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("video title".into()))?
|
||||
.trim()
|
||||
.to_string();
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title).titlecase();
|
||||
|
||||
let id = video_url
|
||||
.split('/')
|
||||
.nth(4)
|
||||
.and_then(|s| s.split('.').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("video id".into()))?
|
||||
.to_string();
|
||||
|
||||
let thumb_block = seg
|
||||
.split("card-thumbnail")
|
||||
.nth(1)
|
||||
.ok_or_else(|| ErrorKind::Parse("thumb block".into()))?;
|
||||
|
||||
let mut thumb = thumb_block
|
||||
.split("src=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
if thumb.starts_with("data:image") {
|
||||
thumb = thumb_block
|
||||
.split("data-webp=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
}
|
||||
|
||||
let preview = thumb_block
|
||||
.split("data-preview=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let (tags, formats, views, duration) =
|
||||
self.extract_media(&video_url, &mut requester).await?;
|
||||
|
||||
Ok(VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url,
|
||||
"pimpbunny".into(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
.formats(formats)
|
||||
.tags(tags)
|
||||
.preview(preview)
|
||||
.views(views))
|
||||
}
|
||||
|
||||
async fn extract_media(
|
||||
&self,
|
||||
url: &str,
|
||||
requester: &mut Requester,
|
||||
) -> Result<(Vec<String>, Vec<VideoFormat>, u32, u32)> {
|
||||
let text = requester
|
||||
.get(url, Some(Version::HTTP_2))
|
||||
.await
|
||||
.map_err(|e| Error::from(format!("{}", e)))?;
|
||||
|
||||
let json_str = text
|
||||
.split("application/ld+json\">")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("</script>").next())
|
||||
.ok_or_else(|| ErrorKind::Parse("ld+json".into()))?;
|
||||
|
||||
let json: serde_json::Value = serde_json::from_str(json_str)?;
|
||||
|
||||
let video_url = json["contentUrl"].as_str().unwrap_or("").to_string();
|
||||
let quality = video_url
|
||||
.split('_')
|
||||
.last()
|
||||
.and_then(|s| s.split('.').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let views = json["interactionStatistic"]
|
||||
.as_array()
|
||||
.and_then(|a| a.first())
|
||||
.and_then(|v| v["userInteractionCount"].as_str())
|
||||
.and_then(|v| v.parse().ok())
|
||||
.unwrap_or(0);
|
||||
|
||||
let duration = json["duration"]
|
||||
.as_str()
|
||||
.map(|d| parse_time_to_seconds(&d.replace(['P','T','H','M','S'], "")).unwrap_or(0))
|
||||
.unwrap_or(0) as u32;
|
||||
|
||||
Ok((
|
||||
vec![],
|
||||
vec![VideoFormat::new(video_url, quality, "video/mp4".into())],
|
||||
views,
|
||||
duration,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl Provider for PimpbunnyProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
_pool: DbPool,
|
||||
sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
_per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let page = page.parse::<u8>().unwrap_or(1);
|
||||
|
||||
let res = match query {
|
||||
Some(q) => self.to_owned().query(cache, page, &q, options).await,
|
||||
None => self.get(cache, page, &sort, options).await,
|
||||
};
|
||||
|
||||
res.unwrap_or_else(|e| {
|
||||
eprintln!("pimpbunny error: {e}");
|
||||
vec![]
|
||||
})
|
||||
}
|
||||
|
||||
fn get_channel(&self, v: ClientVersion) -> Option<Channel> {
|
||||
Some(self.build_channel(v))
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,17 @@
|
||||
use crate::DbPool;
|
||||
use crate::api::ClientVersion;
|
||||
use crate::providers::Provider;
|
||||
use crate::status::*;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::send_discord_error_report;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoItem};
|
||||
use async_trait::async_trait;
|
||||
use cute::c;
|
||||
use error_chain::error_chain;
|
||||
// use percent_encoding::{AsciiSet, CONTROLS, utf8_percent_encode};
|
||||
use htmlentity::entity::{decode, ICodedDataTrait};
|
||||
use std::sync::{Arc, RwLock};
|
||||
use std::vec;
|
||||
use std::fmt::Write;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
@@ -15,294 +20,116 @@ error_chain! {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct PmvhavenRequest {
|
||||
all: bool, //true,
|
||||
pmv: bool, //false,
|
||||
hmv: bool, //false,
|
||||
hypno: bool, //false,
|
||||
tiktok: bool, //false,
|
||||
koreanbj: bool, //false,
|
||||
other: bool, // false,
|
||||
explicitContent: Option<bool>, //null,
|
||||
sameSexContent: Option<bool>, //null,
|
||||
transContent: Option<String>, //null
|
||||
seizureWarning: Option<bool>, //null,
|
||||
tags: Vec<String>, //[],
|
||||
music: Vec<String>, //[],
|
||||
stars: Vec<String>, //[],
|
||||
creators: Vec<String>, //[],
|
||||
range: Vec<u32>, //[0,40],
|
||||
activeTime: String, //"All time",
|
||||
activeQuality: String, //"Quality",
|
||||
aspectRatio: String, //"Aspect Ratio",
|
||||
activeView: String, //"Newest",
|
||||
index: u32, //2,
|
||||
showSubscriptionsOnly: bool, //false,
|
||||
query: String, //"no",
|
||||
profile: Option<String>, //null
|
||||
}
|
||||
|
||||
impl PmvhavenRequest {
|
||||
pub fn new(page: u32) -> Self {
|
||||
PmvhavenRequest {
|
||||
all: true,
|
||||
pmv: false,
|
||||
hmv: false,
|
||||
hypno: false,
|
||||
tiktok: false,
|
||||
koreanbj: false,
|
||||
other: false,
|
||||
explicitContent: None,
|
||||
sameSexContent: None,
|
||||
transContent: None,
|
||||
seizureWarning: None,
|
||||
tags: vec![],
|
||||
music: vec![],
|
||||
stars: vec![],
|
||||
creators: vec![],
|
||||
range: vec![0, 40],
|
||||
activeTime: "All time".to_string(),
|
||||
activeQuality: "Quality".to_string(),
|
||||
aspectRatio: "Aspect Ratio".to_string(),
|
||||
activeView: "Newest".to_string(),
|
||||
index: page,
|
||||
showSubscriptionsOnly: false,
|
||||
query: "no".to_string(),
|
||||
profile: None,
|
||||
}
|
||||
}
|
||||
fn hypno(&mut self) -> &mut Self {
|
||||
self.all = false;
|
||||
self.pmv = false;
|
||||
self.hmv = false;
|
||||
self.tiktok = false;
|
||||
self.koreanbj = false;
|
||||
self.other = false;
|
||||
self.hypno = true;
|
||||
self
|
||||
}
|
||||
fn pmv(&mut self) -> &mut Self {
|
||||
self.all = false;
|
||||
self.pmv = true;
|
||||
self.hmv = false;
|
||||
self.tiktok = false;
|
||||
self.koreanbj = false;
|
||||
self.other = false;
|
||||
self.hypno = false;
|
||||
self
|
||||
}
|
||||
fn hmv(&mut self) -> &mut Self {
|
||||
self.all = false;
|
||||
self.pmv = false;
|
||||
self.hmv = true;
|
||||
self.tiktok = false;
|
||||
self.koreanbj = false;
|
||||
self.other = false;
|
||||
self.hypno = false;
|
||||
self
|
||||
}
|
||||
fn tiktok(&mut self) -> &mut Self {
|
||||
self.all = false;
|
||||
self.pmv = false;
|
||||
self.hmv = false;
|
||||
self.tiktok = true;
|
||||
self.koreanbj = false;
|
||||
self.other = false;
|
||||
self.hypno = false;
|
||||
self
|
||||
}
|
||||
fn koreanbj(&mut self) -> &mut Self {
|
||||
self.all = false;
|
||||
self.pmv = false;
|
||||
self.hmv = false;
|
||||
self.tiktok = false;
|
||||
self.koreanbj = true;
|
||||
self.other = false;
|
||||
self.hypno = false;
|
||||
self
|
||||
}
|
||||
fn other(&mut self) -> &mut Self {
|
||||
self.all = false;
|
||||
self.pmv = false;
|
||||
self.hmv = false;
|
||||
self.tiktok = false;
|
||||
self.koreanbj = false;
|
||||
self.other = true;
|
||||
self.hypno = false;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize)]
|
||||
struct PmvhavenSearch {
|
||||
mode: String, //"DefaultMoreSearch",
|
||||
data: String, //"pmv",
|
||||
index: u32,
|
||||
}
|
||||
|
||||
impl PmvhavenSearch {
|
||||
fn new(search: String, page: u32) -> PmvhavenSearch {
|
||||
PmvhavenSearch {
|
||||
mode: "DefaultMoreSearch".to_string(),
|
||||
data: search,
|
||||
index: page,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct PmvhavenVideo {
|
||||
title: String, //JAV Addiction Therapy",
|
||||
_uploader: Option<String>, //itonlygetsworse",
|
||||
duration: f32, //259.093333,
|
||||
_width: Option<String>, //3840",
|
||||
_height: Option<String>, //2160",
|
||||
_ratio: Option<u32>, //50,
|
||||
thumbnails: Vec<Option<String>>, //[
|
||||
// "placeholder",
|
||||
// "https://storage.pmvhaven.com/686f24e96f7124f3dfbe90ab/thumbnail/JAV Addiction Therapy_686f24e96f7124f3dfbe90ab.png",
|
||||
// "https://storage.pmvhaven.com/686f24e96f7124f3dfbe90ab/thumbnail/webp320_686f24e96f7124f3dfbe90ab.webp"
|
||||
// ],
|
||||
views: u32, //1971,
|
||||
_url: Option<String>, //https://storage.pmvhaven.com/686f24e96f7124f3dfbe90ab/JAV Addiction Therapy_686f24e96f7124f3dfbe90ab.mp4",
|
||||
previewUrlCompressed: Option<String>, //https://storage.pmvhaven.com/686f24e96f7124f3dfbe90ab/videoPreview/comus_686f24e96f7124f3dfbe90ab.mp4",
|
||||
_seizureWarning: Option<bool>, //false,
|
||||
_isoDate: Option<String>, //2025-07-10T02:52:26.000Z",
|
||||
_gayContent: Option<bool>, //false,
|
||||
_transContent: Option<bool>, //false,
|
||||
creator: Option<String>, //itonlygetsworse",
|
||||
_id: String, //686f2aeade2062f93d72931f",
|
||||
_totalRaters: Option<u32>, //42,
|
||||
_rating: Option<u32>, //164
|
||||
}
|
||||
|
||||
impl PmvhavenVideo {
|
||||
fn to_videoitem(self) -> VideoItem {
|
||||
// let encoded_title = percent_encode_emojis(&self.title);
|
||||
let thumbnail = self.thumbnails[self.thumbnails.len() - 1]
|
||||
.clone()
|
||||
.unwrap_or("".to_string());
|
||||
// let video_id = thumbnail.split("_").collect::<Vec<&str>>().last().unwrap_or(&"").to_string().split('.').next().unwrap_or("").to_string();
|
||||
let mut item = VideoItem::new(
|
||||
self._id.clone(),
|
||||
self.title.clone(),
|
||||
format!(
|
||||
"https://pmvhaven.com/video/{}_{}",
|
||||
self.title.replace(" ", "-"),
|
||||
self._id
|
||||
),
|
||||
"pmvhaven".to_string(),
|
||||
thumbnail,
|
||||
self.duration as u32,
|
||||
)
|
||||
.views(self.views);
|
||||
item = match self.creator {
|
||||
Some(c) => item.uploader(c),
|
||||
_ => item,
|
||||
};
|
||||
item = match self.previewUrlCompressed {
|
||||
Some(u) => item.preview(u),
|
||||
_ => item,
|
||||
};
|
||||
|
||||
return item;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(serde::Deserialize)]
|
||||
struct PmvhavenResponse {
|
||||
data: Vec<PmvhavenVideo>,
|
||||
_count: Option<u32>,
|
||||
}
|
||||
|
||||
impl PmvhavenResponse {
|
||||
fn to_videoitems(self) -> Vec<VideoItem> {
|
||||
return c![video.to_videoitem(), for video in self.data];
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PmvhavenProvider {
|
||||
url: String,
|
||||
stars: Arc<RwLock<Vec<String>>>,
|
||||
categories: Arc<RwLock<Vec<String>>>,
|
||||
}
|
||||
|
||||
impl PmvhavenProvider {
|
||||
pub fn new() -> Self {
|
||||
PmvhavenProvider {
|
||||
Self {
|
||||
url: "https://pmvhaven.com".to_string(),
|
||||
stars: Arc::new(RwLock::new(vec![])),
|
||||
categories: Arc::new(RwLock::new(vec![])),
|
||||
}
|
||||
}
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
sort: String,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let category = options.category.unwrap_or("".to_string());
|
||||
let index = format!("pmvhaven:{}:{}", page, category);
|
||||
let url = format!("{}/api/getmorevideos", self.url);
|
||||
let mut request = PmvhavenRequest::new(page as u32);
|
||||
request.activeView = sort;
|
||||
request = match category.as_str() {
|
||||
"hypno" => {
|
||||
request.hypno();
|
||||
request
|
||||
}
|
||||
"pmv" => {
|
||||
request.pmv();
|
||||
request
|
||||
}
|
||||
"hmv" => {
|
||||
request.hmv();
|
||||
request
|
||||
}
|
||||
"tiktok" => {
|
||||
request.tiktok();
|
||||
request
|
||||
}
|
||||
"koreanbj" => {
|
||||
request.koreanbj();
|
||||
request
|
||||
}
|
||||
"other" => {
|
||||
request.other();
|
||||
request
|
||||
}
|
||||
_ => request,
|
||||
};
|
||||
|
||||
let old_items = match cache.get(&index) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
fn build_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
let _ = clientversion;
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let response = requester.post(&url, &request, vec![("Content-Type".to_string(),"text/plain;charset=UTF-8".to_string())]).await.unwrap();
|
||||
let videos = match response.json::<PmvhavenResponse>().await {
|
||||
Ok(resp) => resp,
|
||||
Err(e) => {
|
||||
println!("Failed to parse PmvhavenResponse: {}", e);
|
||||
return Ok(old_items);
|
||||
}
|
||||
};
|
||||
let video_items: Vec<VideoItem> = videos.to_videoitems();
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url);
|
||||
cache.insert(url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
let categories = self
|
||||
.categories
|
||||
.read()
|
||||
.map(|g| g.clone())
|
||||
.unwrap_or_default();
|
||||
|
||||
Channel {
|
||||
id: "pmvhaven".to_string(),
|
||||
name: "PMVHaven".to_string(),
|
||||
description: "Best PMV Videos".to_string(),
|
||||
premium: false,
|
||||
favicon: "https://www.google.com/s2/favicons?sz=64&domain=pmvhaven.com".to_string(),
|
||||
status: "active".to_string(),
|
||||
categories,
|
||||
options: vec![
|
||||
ChannelOption {
|
||||
id: "sort".into(),
|
||||
title: "Sort".into(),
|
||||
description: "Sort the Videos".into(),
|
||||
systemImage: "list.number".into(),
|
||||
colorName: "blue".into(),
|
||||
options: vec![
|
||||
FilterOption {
|
||||
id: "relevance".into(),
|
||||
title: "Relevance".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "newest".into(),
|
||||
title: "Newest".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "oldest".into(),
|
||||
title: "Oldest".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "most viewed".into(),
|
||||
title: "Most Viewed".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "most liked".into(),
|
||||
title: "Most Liked".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "most disliked".into(),
|
||||
title: "Most Disliked".into(),
|
||||
},
|
||||
],
|
||||
multiSelect: false,
|
||||
},
|
||||
ChannelOption {
|
||||
id: "duration".into(),
|
||||
title: "Duration".into(),
|
||||
description: "Length of the Videos".into(),
|
||||
systemImage: "timer".into(),
|
||||
colorName: "green".into(),
|
||||
options: vec![
|
||||
FilterOption {
|
||||
id: "any".into(),
|
||||
title: "Any".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "<4 min".into(),
|
||||
title: "<4 min".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "4-20 min".into(),
|
||||
title: "4-20 min".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: "20-60 min".into(),
|
||||
title: "20-60 min".into(),
|
||||
},
|
||||
FilterOption {
|
||||
id: ">1 hour".into(),
|
||||
title: ">1 hour".into(),
|
||||
},
|
||||
],
|
||||
multiSelect: false,
|
||||
},
|
||||
],
|
||||
nsfw: true,
|
||||
cacheDuration: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn push_unique(target: &Arc<RwLock<Vec<String>>>, item: String) {
|
||||
if let Ok(mut vec) = target.write() {
|
||||
if !vec.iter().any(|x| x == &item) {
|
||||
vec.push(item);
|
||||
}
|
||||
}
|
||||
return Ok(video_items);
|
||||
}
|
||||
|
||||
async fn query(
|
||||
@@ -312,41 +139,123 @@ impl PmvhavenProvider {
|
||||
query: &str,
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let index = format!("pmvhaven:{}:{}", query, page);
|
||||
let url = format!("{}/api/v2/search", self.url);
|
||||
let request = PmvhavenSearch::new(query.to_string(), page as u32);
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&index) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
let _ = cache.check().await;
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
let search = query.trim().to_string();
|
||||
|
||||
let sort = match options.sort.as_deref() {
|
||||
Some("newest") => "&sort=-uploadDate",
|
||||
Some("oldest") => "&sort=uploadDate",
|
||||
Some("most viewed") => "&sort=-views",
|
||||
Some("most liked") => "&sort=-likes",
|
||||
Some("most disliked") => "&sort=-dislikes",
|
||||
_ => "",
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let response = requester.post(&url, &request, vec![("Content-Type".to_string(),"text/plain;charset=UTF-8".to_string())]).await.unwrap();
|
||||
let videos = match response.json::<PmvhavenResponse>().await {
|
||||
Ok(resp) => resp,
|
||||
Err(e) => {
|
||||
println!("Failed to parse PmvhavenResponse: {}", e);
|
||||
return Ok(old_items);
|
||||
}
|
||||
let duration = match options.duration.as_deref() {
|
||||
Some("<4 min") => "&durationMax=240",
|
||||
Some("4-20 min") => "&durationMin=240&durationMax=1200",
|
||||
Some("20-60 min") => "&durationMin=1200&durationMax=3600",
|
||||
Some(">1 hour") => "&durationMin=3600",
|
||||
_ => "",
|
||||
};
|
||||
let video_items: Vec<VideoItem> = videos.to_videoitems();
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url);
|
||||
cache.insert(url.clone(), video_items.clone());
|
||||
|
||||
let endpoint = if search.is_empty() {
|
||||
"api/videos"
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
"api/videos/search"
|
||||
};
|
||||
|
||||
let mut url = format!(
|
||||
"{}/{endpoint}?limit=100&page={page}{duration}{sort}",
|
||||
self.url
|
||||
);
|
||||
|
||||
if let Ok(stars) = self.stars.read() {
|
||||
if let Some(star) = stars.iter().find(|s| s.eq_ignore_ascii_case(&search)) {
|
||||
url.push_str(&format!("&stars={star}"));
|
||||
}
|
||||
}
|
||||
return Ok(video_items);
|
||||
|
||||
if let Ok(cats) = self.categories.read() {
|
||||
if let Some(cat) = cats.iter().find(|c| c.eq_ignore_ascii_case(&search)) {
|
||||
url.push_str(&format!("&tagMode=OR&tags={cat}&expandTags=false"));
|
||||
}
|
||||
}
|
||||
|
||||
if !search.is_empty() {
|
||||
url.push_str(&format!("&q={search}"));
|
||||
}
|
||||
if let Some((time, items)) = cache.get(&url) {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 300 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let mut requester = match options.requester {
|
||||
Some(r) => r,
|
||||
None => return Ok(vec![]),
|
||||
};
|
||||
|
||||
let text = requester.get(&url, None).await.unwrap_or_default();
|
||||
let json = serde_json::from_str(&text).unwrap_or(serde_json::Value::Null);
|
||||
let items = self.get_video_items_from_json(json).await;
|
||||
|
||||
if !items.is_empty() {
|
||||
cache.remove(&url);
|
||||
cache.insert(url, items.clone());
|
||||
}
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
async fn get_video_items_from_json(&self, json: serde_json::Value) -> Vec<VideoItem> {
|
||||
let mut items = vec![];
|
||||
|
||||
if !json.get("success").and_then(|v| v.as_bool()).unwrap_or(false) {
|
||||
return items;
|
||||
}
|
||||
|
||||
let videos = json.get("data").and_then(|v| v.as_array()).cloned().unwrap_or_default();
|
||||
|
||||
for video in videos {
|
||||
let title = decode(video.get("title").and_then(|v| v.as_str()).unwrap_or("").as_bytes())
|
||||
.to_string()
|
||||
.unwrap_or_default();
|
||||
|
||||
let id = video
|
||||
.get("_id")
|
||||
.and_then(|v| v.as_str())
|
||||
.unwrap_or(&title)
|
||||
.to_string();
|
||||
|
||||
let video_url = video.get("videoUrl").and_then(|v| v.as_str()).unwrap_or("").to_string();
|
||||
let thumb = video.get("thumbnailUrl").and_then(|v| v.as_str()).unwrap_or("").to_string();
|
||||
let preview = video.get("previewUrl").and_then(|v| v.as_str()).unwrap_or("").to_string();
|
||||
|
||||
let views = video.get("views").and_then(|v| v.as_u64()).unwrap_or(0);
|
||||
let duration = parse_time_to_seconds(video.get("duration").and_then(|v| v.as_str()).unwrap_or("0")).unwrap_or(0);
|
||||
|
||||
let tags = video.get("tags").and_then(|v| v.as_array()).cloned().unwrap_or_default();
|
||||
let stars = video.get("starsTags").and_then(|v| v.as_array()).cloned().unwrap_or_default();
|
||||
for t in tags.iter() {
|
||||
if let Some(s) = t.as_str() {
|
||||
let decoded = decode(s.as_bytes()).to_string().unwrap_or_default();
|
||||
Self::push_unique(&self.categories, decoded.clone());
|
||||
}
|
||||
}
|
||||
for t in stars.iter() {
|
||||
if let Some(s) = t.as_str() {
|
||||
let decoded = decode(s.as_bytes()).to_string().unwrap_or_default();
|
||||
Self::push_unique(&self.stars, decoded.clone());
|
||||
}
|
||||
}
|
||||
|
||||
items.push(
|
||||
VideoItem::new(id, title, video_url.replace(' ', "%20"), "pmvhaven".into(), thumb, duration as u32)
|
||||
.views(views as u32)
|
||||
.preview(preview)
|
||||
);
|
||||
}
|
||||
|
||||
items
|
||||
}
|
||||
}
|
||||
|
||||
@@ -355,36 +264,39 @@ impl Provider for PmvhavenProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
pool: DbPool,
|
||||
sort: String,
|
||||
_pool: DbPool,
|
||||
_sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
per_page: String,
|
||||
_per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let _ = per_page;
|
||||
let _ = pool; // Ignored in this implementation
|
||||
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
|
||||
Some(q) => {
|
||||
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, options)
|
||||
.await
|
||||
}
|
||||
None => {
|
||||
self.get(
|
||||
cache,
|
||||
page.parse::<u8>().unwrap_or(1),
|
||||
sort,
|
||||
options,
|
||||
)
|
||||
.await
|
||||
}
|
||||
};
|
||||
match videos {
|
||||
let page = page.parse::<u8>().unwrap_or(1);
|
||||
let query = query.unwrap_or_default();
|
||||
|
||||
match self.query(cache, page, &query, options).await {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
println!("Error fetching videos: {}", e);
|
||||
eprintln!("pmvhaven error: {e}");
|
||||
let mut chain_str = String::new();
|
||||
for (i, cause) in e.iter().enumerate() {
|
||||
let _ = writeln!(chain_str, "{}. {}", i + 1, cause);
|
||||
}
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(chain_str),
|
||||
Some("PMVHaven Provider"),
|
||||
Some("Failed to load videos from PMVHaven"),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
).await;
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<Channel> {
|
||||
Some(self.build_channel(clientversion))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,7 +49,6 @@ impl Porn00Provider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -62,7 +61,7 @@ impl Porn00Provider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -99,7 +98,7 @@ impl Porn00Provider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
|
||||
@@ -42,7 +42,6 @@ impl PornhatProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -53,7 +52,7 @@ impl PornhatProvider {
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -92,7 +91,7 @@ impl PornhatProvider {
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
|
||||
@@ -4,239 +4,254 @@ use crate::providers::Provider;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoItem};
|
||||
|
||||
use error_chain::error_chain;
|
||||
use htmlentity::entity::{ICodedDataTrait, decode};
|
||||
use std::vec;
|
||||
use htmlentity::entity::{decode, ICodedDataTrait};
|
||||
use async_trait::async_trait;
|
||||
use std::vec;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
}
|
||||
errors {
|
||||
Parse(msg: String) {
|
||||
description("parse error")
|
||||
display("parse error: {}", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PornhubProvider {
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl PornhubProvider {
|
||||
pub fn new() -> Self {
|
||||
PornhubProvider {
|
||||
Self {
|
||||
url: "https://www.pornhub.com".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
sort: &str,
|
||||
options:ServerOptions
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let video_url = format!("{}/video?o={}&page={}", self.url, sort, page);
|
||||
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
Some((time, items)) if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 => {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
Some((_, items)) => items.clone(),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(),"<ul id=\"video");
|
||||
if !video_items.is_empty() {
|
||||
let mut requester = match options.requester.clone() {
|
||||
Some(r) => r,
|
||||
None => return Ok(old_items),
|
||||
};
|
||||
|
||||
let text = match requester.get(&video_url, None).await {
|
||||
Ok(t) => t,
|
||||
Err(_) => return Ok(old_items),
|
||||
};
|
||||
|
||||
let video_items = self.get_video_items_from_html(text, "<ul id=\"video");
|
||||
|
||||
if video_items.is_empty() {
|
||||
Ok(old_items)
|
||||
} else {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
Ok(video_items)
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
|
||||
async fn query(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
query: &str,
|
||||
sort: &str,
|
||||
options:ServerOptions
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let mut split_string = "<ul id=\"video";
|
||||
let search_string = query.to_lowercase().trim().replace(" ", "+");
|
||||
let mut video_url = format!("{}/video/search?search={}&page={}", self.url, search_string, page);
|
||||
if query.starts_with("@"){
|
||||
let url_parts = query[1..].split(":").collect::<Vec<&str>>();
|
||||
video_url = [self.url.to_string(), url_parts[0].to_string(), url_parts[1].replace(" ", "-").to_string(), "videos?page=".to_string()].join("/");
|
||||
video_url += &page.to_string();
|
||||
if query.contains("@model") || query.contains("@pornstar"){
|
||||
let search_string = query.to_lowercase().trim().replace(' ', "+");
|
||||
|
||||
let mut video_url =
|
||||
format!("{}/video/search?search={}&page={}", self.url, search_string, page);
|
||||
|
||||
if query.starts_with('@') {
|
||||
let mut parts = query[1..].split(':');
|
||||
let a = parts.next().unwrap_or("");
|
||||
let b = parts.next().unwrap_or("");
|
||||
video_url = format!("{}/{}/{}/videos?page={}", self.url, a, b.replace(' ', "-"), page);
|
||||
|
||||
if query.contains("@model") || query.contains("@pornstar") {
|
||||
split_string = "mostRecentVideosSection";
|
||||
}
|
||||
if query.contains("@channels"){
|
||||
if query.contains("@channels") {
|
||||
split_string = "<ul class=\"videos row-5-thumbs";
|
||||
}
|
||||
}
|
||||
|
||||
if query.contains("@channels"){
|
||||
video_url += match sort {
|
||||
"mr" => "",
|
||||
"mv" => "&o=vi",
|
||||
"tr" => "&o=ra",
|
||||
_ => "",
|
||||
}
|
||||
} else{
|
||||
video_url += match sort {
|
||||
"mr" => "",
|
||||
"mv" => "&o=mv",
|
||||
"tr" => "&o=tr",
|
||||
"lg" => "&o=lg",
|
||||
_ => "&o=mv",
|
||||
}
|
||||
}
|
||||
video_url.push_str(match (query.contains("@channels"), sort) {
|
||||
(true, "mv") => "&o=vi",
|
||||
(true, "tr") => "&o=ra",
|
||||
(false, "mv") => "&o=mv",
|
||||
(false, "tr") => "&o=tr",
|
||||
(false, "lg") => "&o=lg",
|
||||
_ => "",
|
||||
});
|
||||
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
let _ = cache.check().await;
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
Some((time, items)) if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 => {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
Some((_, items)) => items.clone(),
|
||||
None => vec![],
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone(),split_string);
|
||||
if !video_items.is_empty() {
|
||||
let mut requester = match options.requester.clone() {
|
||||
Some(r) => r,
|
||||
None => return Ok(old_items),
|
||||
};
|
||||
|
||||
let text = match requester.get(&video_url, None).await {
|
||||
Ok(t) => t,
|
||||
Err(_) => return Ok(old_items),
|
||||
};
|
||||
|
||||
let video_items = self.get_video_items_from_html(text, split_string);
|
||||
|
||||
if video_items.is_empty() {
|
||||
Ok(old_items)
|
||||
} else {
|
||||
cache.remove(&video_url);
|
||||
cache.insert(video_url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
Ok(video_items)
|
||||
}
|
||||
Ok(video_items)
|
||||
|
||||
}
|
||||
|
||||
fn get_video_items_from_html(&self, html: String, split_string: &str) -> Vec<VideoItem> {
|
||||
if html.is_empty() {
|
||||
println!("HTML is empty");
|
||||
return vec![];
|
||||
}
|
||||
let mut items: Vec<VideoItem> = Vec::new();
|
||||
let video_listing_content = html.split(split_string).collect::<Vec<&str>>()[1].split("Porn in German").collect::<Vec<&str>>()[0];
|
||||
let raw_videos = video_listing_content
|
||||
let content = match html.split(split_string).nth(1) {
|
||||
Some(c) => c,
|
||||
None => return vec![],
|
||||
};
|
||||
|
||||
let content = content.split("Porn in German").next().unwrap_or("");
|
||||
|
||||
let mut items = Vec::new();
|
||||
|
||||
for seg in content
|
||||
.split("class=\"pcVideoListItem ")
|
||||
.collect::<Vec<&str>>()[1..]
|
||||
.to_vec();
|
||||
for video_segment in &raw_videos {
|
||||
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
|
||||
// for (index, line) in vid.iter().enumerate() {
|
||||
// println!("Line {}: {}", index, line);
|
||||
// }
|
||||
if video_segment.contains("wrapVideoBlock"){
|
||||
continue; // Skip if the segment is a wrapVideoBlock
|
||||
}
|
||||
let video_url: String;
|
||||
if !video_segment.contains("<a href=\"") {
|
||||
let url_part = video_segment.split("data-video-vkey=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0];
|
||||
video_url = format!("{}{}", self.url, url_part);
|
||||
}
|
||||
else{
|
||||
let url_part = video_segment.split("<a href=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0];
|
||||
if url_part.is_empty() || url_part == "javascript:void(0)" {
|
||||
continue;
|
||||
}
|
||||
video_url = format!("{}{}", self.url, url_part);
|
||||
}
|
||||
if video_url.starts_with("https://www.pornhub.comjavascript:void(0)") {
|
||||
continue;
|
||||
}
|
||||
let mut title = video_segment.split("\" title=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
// html decode
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title);
|
||||
let id = video_segment.split("data-video-id=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
let raw_duration = video_segment.split("duration").collect::<Vec<&str>>()[1].split(">").collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
|
||||
let view_part = match video_segment.split("iews\">").collect::<Vec<&str>>().len(){
|
||||
2 => video_segment.split("iews\">").collect::<Vec<&str>>()[1],
|
||||
3 => video_segment.split("iews\">").collect::<Vec<&str>>()[2],
|
||||
_ => "<var>0<", // Skip if the format is unexpected
|
||||
.skip(1)
|
||||
.filter(|s| !s.contains("wrapVideoBlock"))
|
||||
{
|
||||
let url_part = seg
|
||||
.split("<a href=\"")
|
||||
.nth(1)
|
||||
.or_else(|| seg.split("data-video-vkey=\"").nth(1))
|
||||
.and_then(|s| s.split('"').next());
|
||||
|
||||
let video_url = match url_part {
|
||||
Some(u) if !u.is_empty() && u != "javascript:void(0)" => format!("{}{}", self.url, u),
|
||||
_ => continue,
|
||||
};
|
||||
let views = parse_abbreviated_number(view_part
|
||||
.split("<var>").collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]).unwrap_or(0);
|
||||
|
||||
let thumb = video_segment.split("src=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
let mut title = seg
|
||||
.split("\" title=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let uploaderBlock;
|
||||
let uploader_href;
|
||||
let mut tag = String::new();
|
||||
if video_segment.contains("videoUploaderBlock") {
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title);
|
||||
|
||||
uploaderBlock = video_segment.split("videoUploaderBlock").collect::<Vec<&str>>()[1]
|
||||
.to_string();
|
||||
uploader_href = uploaderBlock.split("href=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.split("/").collect::<Vec<&str>>();
|
||||
tag = format!("@{}:{}", uploader_href[1], uploader_href[2].replace("-", " "));
|
||||
let id = match seg
|
||||
.split("data-video-id=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
{
|
||||
Some(id) => id.to_string(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
}
|
||||
else{
|
||||
uploader_href = vec![];
|
||||
}
|
||||
let raw_duration = seg
|
||||
.split("duration")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('>').nth(1))
|
||||
.and_then(|s| s.split('<').next())
|
||||
.unwrap_or("0:00");
|
||||
|
||||
let duration = parse_time_to_seconds(raw_duration).unwrap_or(0) as u32;
|
||||
|
||||
let mut video_item = VideoItem::new(
|
||||
let views = seg
|
||||
.split("iews\">")
|
||||
.filter_map(|p| p.split("<var>").nth(1))
|
||||
.next()
|
||||
.and_then(|v| v.split('<').next())
|
||||
.and_then(|v| parse_abbreviated_number(v))
|
||||
.unwrap_or(0);
|
||||
|
||||
let thumb = seg
|
||||
.split("src=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let (tag, uploader) = if seg.contains("videoUploaderBlock") {
|
||||
let href = seg
|
||||
.split("videoUploaderBlock")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("href=\"").nth(1))
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("");
|
||||
|
||||
let parts: Vec<&str> = href.split('/').collect();
|
||||
if parts.len() >= 3 {
|
||||
(
|
||||
Some(format!("@{}:{}", parts[1], parts[2].replace('-', " "))),
|
||||
Some(parts[2].to_string()),
|
||||
)
|
||||
} else {
|
||||
(None, None)
|
||||
}
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
let mut item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
video_url.to_string(),
|
||||
"pornhub".to_string(),
|
||||
video_url,
|
||||
"pornhub".into(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
;
|
||||
);
|
||||
|
||||
if views > 0 {
|
||||
video_item = video_item.views(views);
|
||||
item = item.views(views);
|
||||
}
|
||||
if !tag.is_empty() {
|
||||
video_item = video_item.tags(vec![tag])
|
||||
.uploader(uploader_href[2].to_string());
|
||||
if let Some(t) = tag {
|
||||
item = item.tags(vec![t]);
|
||||
}
|
||||
items.push(video_item);
|
||||
if let Some(u) = uploader {
|
||||
item = item.uploader(u);
|
||||
}
|
||||
|
||||
items.push(item);
|
||||
}
|
||||
return items;
|
||||
|
||||
items
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -251,29 +266,29 @@ impl Provider for PornhubProvider {
|
||||
per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let _ = options;
|
||||
let _ = pool;
|
||||
let _ = per_page;
|
||||
let _ = pool; // Ignored in this implementation
|
||||
let mut sort = sort.to_lowercase();
|
||||
if sort.contains("date"){
|
||||
sort = "mr".to_string();
|
||||
}
|
||||
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
|
||||
Some(q) => {
|
||||
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, &sort, options)
|
||||
.await
|
||||
}
|
||||
None => {
|
||||
self.get(cache, page.parse::<u8>().unwrap_or(1), &sort, options)
|
||||
.await
|
||||
}
|
||||
|
||||
let page = page.parse::<u8>().unwrap_or(1);
|
||||
let mut sort = match sort.as_str() {
|
||||
"mv" => "mv",
|
||||
"tr" => "tr",
|
||||
"cm" => "cm",
|
||||
"lg" => "lg",
|
||||
_ => "mr",
|
||||
};
|
||||
match videos {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
println!("Error fetching videos: {}", e);
|
||||
vec![]
|
||||
}
|
||||
if sort.contains("date") {
|
||||
sort = "mr".into();
|
||||
}
|
||||
|
||||
let res = match query {
|
||||
Some(q) => self.query(cache, page, &q, &sort, options).await,
|
||||
None => self.get(cache, page, &sort, options).await,
|
||||
};
|
||||
|
||||
res.unwrap_or_else(|e| {
|
||||
eprintln!("PornhubProvider error: {e}");
|
||||
vec![]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -84,7 +84,6 @@ impl PornxpProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -96,7 +95,7 @@ impl PornxpProvider {
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -142,7 +141,7 @@ impl PornxpProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -266,7 +265,7 @@ impl Provider for PornxpProvider {
|
||||
}
|
||||
}
|
||||
}
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel {
|
||||
self.build_channel(clientversion)
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
|
||||
Some(self.build_channel(clientversion))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,8 +63,7 @@ impl PornzogProvider {
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
println!("Fetching URL: {}", video_url);
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -77,7 +76,6 @@ impl PornzogProvider {
|
||||
|
||||
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
|
||||
if html.is_empty() {
|
||||
println!("HTML is empty");
|
||||
return vec![];
|
||||
}
|
||||
let mut items: Vec<VideoItem> = Vec::new();
|
||||
|
||||
@@ -39,7 +39,6 @@ impl RedtubeProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -50,7 +49,7 @@ impl RedtubeProvider {
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -87,7 +86,7 @@ impl RedtubeProvider {
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html_query(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
|
||||
@@ -103,7 +103,7 @@ fn build_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
old_items = match cache.get(&index) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", url);
|
||||
// println!("Cache hit for URL: {}", url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -115,7 +115,7 @@ fn build_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
};
|
||||
}
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&url).await.unwrap();
|
||||
let text = requester.get(&url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url);
|
||||
@@ -163,7 +163,7 @@ fn build_channel(&self, clientversion: ClientVersion) -> Channel {
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&url).await.unwrap();
|
||||
let text = requester.get(&url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url);
|
||||
@@ -282,7 +282,7 @@ impl Provider for Rule34genProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel {
|
||||
self.build_channel(clientversion)
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
|
||||
Some(self.build_channel(clientversion))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,203 +1,241 @@
|
||||
use crate::util::parse_abbreviated_number;
|
||||
use crate::DbPool;
|
||||
use crate::providers::Provider;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::send_discord_error_report;
|
||||
use crate::util::parse_abbreviated_number;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::{ServerOptions, VideoItem};
|
||||
use async_trait::async_trait;
|
||||
use error_chain::error_chain;
|
||||
use htmlentity::entity::{ICodedDataTrait, decode};
|
||||
use std::vec;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use async_trait::async_trait;
|
||||
use std::vec;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
Io(std::io::Error);
|
||||
HttpRequest(wreq::Error);
|
||||
}
|
||||
errors {
|
||||
ParsingError(t: String) {
|
||||
description("html parsing error")
|
||||
display("HTML parsing error: '{}'", t)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Rule34videoProvider {
|
||||
url: String,
|
||||
}
|
||||
|
||||
impl Rule34videoProvider {
|
||||
pub fn new() -> Self {
|
||||
Rule34videoProvider {
|
||||
url: "https://rule34video.com".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper to safely extract a string between two delimiters
|
||||
fn extract_between<'a>(content: &'a str, start_pat: &str, end_pat: &str) -> Option<&'a str> {
|
||||
let start_idx = content.find(start_pat)? + start_pat.len();
|
||||
let sub = &content[start_idx..];
|
||||
let end_idx = sub.find(end_pat)?;
|
||||
Some(&sub[..end_idx])
|
||||
}
|
||||
|
||||
async fn get(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
sort: &str,
|
||||
options: ServerOptions
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let now = SystemTime::now()
|
||||
let timestamp_millis = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("Time went backwards");
|
||||
.map(|d| d.as_millis())
|
||||
.unwrap_or(0);
|
||||
|
||||
let timestamp_millis = now.as_millis(); // u128
|
||||
let expected_sorts = vec!["post_date", "video_viewed", "rating", "duration", "pseudo_random"];
|
||||
let sort = if expected_sorts.contains(&sort) {
|
||||
let expected_sorts = vec![
|
||||
"post_date",
|
||||
"video_viewed",
|
||||
"rating",
|
||||
"duration",
|
||||
"pseudo_random",
|
||||
];
|
||||
let sort_val = if expected_sorts.contains(&sort) {
|
||||
sort
|
||||
} else {
|
||||
"post_date"
|
||||
};
|
||||
|
||||
let index = format!("rule34video:{}:{}", page, sort);
|
||||
let index = format!("rule34video:{}:{}", page, sort_val);
|
||||
|
||||
let url = format!("{}/?mode=async&function=get_block&block_id=custom_list_videos_most_recent_videos&tag_ids=&sort_by={}&from={}&_={}", self.url, sort, page, timestamp_millis);
|
||||
|
||||
let mut old_items: Vec<VideoItem> = vec![];
|
||||
if !(sort == "pseudo_random") {
|
||||
old_items = match cache.get(&index) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
}
|
||||
if sort_val != "pseudo_random" {
|
||||
if let Some((time, items)) = cache.get(&index) {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 300 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&url).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
|
||||
let mut requester = options.requester.clone().ok_or("Requester missing")?;
|
||||
let url = format!(
|
||||
"{}/?mode=async&function=get_block&block_id=custom_list_videos_most_recent_videos&tag_ids=&sort_by={}&from={}&_={}",
|
||||
self.url, sort_val, page, timestamp_millis
|
||||
);
|
||||
|
||||
let text = requester.get(&url, None).await.unwrap_or_else(|e| {
|
||||
eprintln!("Error fetching rule34video URL {}: {}", url, e);
|
||||
let _ = send_discord_error_report(e.to_string(), None, Some(&url), None, file!(), line!(), module_path!());
|
||||
"".to_string()
|
||||
});
|
||||
let video_items = self.get_video_items_from_html(text);
|
||||
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url);
|
||||
cache.insert(url.clone(), video_items.clone());
|
||||
cache.insert(index, video_items.clone());
|
||||
Ok(video_items)
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
// Return empty or old items if available
|
||||
Ok(cache
|
||||
.get(&index)
|
||||
.map(|(_, items)| items)
|
||||
.unwrap_or_default())
|
||||
}
|
||||
Ok(video_items)
|
||||
}
|
||||
|
||||
async fn query(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
page: u8,
|
||||
query: &str,
|
||||
sort: &str,
|
||||
options: ServerOptions
|
||||
options: ServerOptions,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
let now = SystemTime::now()
|
||||
let timestamp_millis = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.expect("Time went backwards");
|
||||
let timestamp_millis = now.as_millis(); // u128
|
||||
let expected_sorts = vec!["post_date", "video_viewed", "rating", "duration", "pseudo_random"];
|
||||
let sort = if expected_sorts.contains(&sort) {
|
||||
.map(|d| d.as_millis())
|
||||
.unwrap_or(0);
|
||||
|
||||
let expected_sorts = vec![
|
||||
"post_date",
|
||||
"video_viewed",
|
||||
"rating",
|
||||
"duration",
|
||||
"pseudo_random",
|
||||
];
|
||||
let sort_val = if expected_sorts.contains(&sort) {
|
||||
sort
|
||||
} else {
|
||||
"post_date"
|
||||
};
|
||||
|
||||
let index = format!("rule34video:{}:{}:{}", page, sort, query);
|
||||
let index = format!("rule34video:{}:{}:{}", page, sort_val, query);
|
||||
|
||||
let url = format!("{}/search/{}/?mode=async&function=get_block&block_id=custom_list_videos_videos_list_search&tag_ids=&sort_by={}&from_videos={}&from_albums={}&_={}", self.url, query.replace(" ","-"), sort, page, page, timestamp_millis);
|
||||
if let Some((time, items)) = cache.get(&index) {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 300 {
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&index) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
let _ = cache.check().await;
|
||||
return Ok(items.clone());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&url).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url);
|
||||
cache.insert(url.clone(), video_items.clone());
|
||||
} else {
|
||||
return Ok(old_items);
|
||||
}
|
||||
let mut requester = options.requester.clone().ok_or("Requester missing")?;
|
||||
let url = format!(
|
||||
"{}/search/{}/?mode=async&function=get_block&block_id=custom_list_videos_videos_list_search&tag_ids=&sort_by={}&from_videos={}&from_albums={}&_={}",
|
||||
self.url,
|
||||
query.replace(" ", "-"),
|
||||
sort_val,
|
||||
page,
|
||||
page,
|
||||
timestamp_millis
|
||||
);
|
||||
|
||||
let text = requester.get(&url, None).await.unwrap_or_else(|e| {
|
||||
eprintln!("Error fetching rule34video URL {}: {}", url, e);
|
||||
let _ = send_discord_error_report(e.to_string(), None, Some(&url), None, file!(), line!(), module_path!());
|
||||
"".to_string()
|
||||
});
|
||||
let video_items = self.get_video_items_from_html(text);
|
||||
|
||||
if !video_items.is_empty() {
|
||||
cache.insert(index, video_items.clone());
|
||||
Ok(video_items)
|
||||
} else {
|
||||
Ok(cache
|
||||
.get(&index)
|
||||
.map(|(_, items)| items)
|
||||
.unwrap_or_default())
|
||||
}
|
||||
}
|
||||
|
||||
fn get_video_items_from_html(&self, html: String) -> Vec<VideoItem> {
|
||||
if html.is_empty() {
|
||||
println!("HTML is empty");
|
||||
return vec![];
|
||||
}
|
||||
let mut items: Vec<VideoItem> = Vec::new();
|
||||
let video_listing_content = html.split("<div class=\"thumbs clearfix\" id=\"custom_list_videos").collect::<Vec<&str>>()[1].split("<div class=\"pagination\"").collect::<Vec<&str>>()[0].to_string();
|
||||
let raw_videos = video_listing_content
|
||||
.split("<div class=\"item thumb video_")
|
||||
.collect::<Vec<&str>>()[1..]
|
||||
.to_vec();
|
||||
for video_segment in &raw_videos {
|
||||
// let vid = video_segment.split("\n").collect::<Vec<&str>>()[1]
|
||||
// for (index, line) in vid.iter().enumerate() {
|
||||
// println!("Line {}: {}", index, line);
|
||||
// }
|
||||
|
||||
if video_segment.contains("https://rule34video.com/images/advertisements"){
|
||||
// Safely isolate the video listing section
|
||||
let video_listing = match Self::extract_between(
|
||||
&html,
|
||||
"id=\"custom_list_videos",
|
||||
"<div class=\"pagination\"",
|
||||
) {
|
||||
Some(content) => content,
|
||||
None => return vec![],
|
||||
};
|
||||
|
||||
let mut items = Vec::new();
|
||||
// Skip the first split result as it's the preamble
|
||||
let raw_videos = video_listing
|
||||
.split("<div class=\"item thumb video_")
|
||||
.skip(1);
|
||||
|
||||
for video_segment in raw_videos {
|
||||
if video_segment.contains("title=\"Advertisement\"") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut title = video_segment.split("<div class=\"thumb_title\">").collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
// Title extraction
|
||||
let title_raw =
|
||||
Self::extract_between(video_segment, "<div class=\"thumb_title\">", "<")
|
||||
.unwrap_or("Unknown");
|
||||
let title = decode(title_raw.as_bytes())
|
||||
.to_string()
|
||||
.unwrap_or_else(|_| title_raw.to_string());
|
||||
|
||||
// ID extraction
|
||||
let id = Self::extract_between(video_segment, "https://rule34video.com/video/", "/")
|
||||
.unwrap_or("0")
|
||||
.to_string();
|
||||
// html decode
|
||||
title = decode(title.as_bytes()).to_string().unwrap_or(title);
|
||||
let id = video_segment.split("https://rule34video.com/video/").collect::<Vec<&str>>()[1].split("/").collect::<Vec<&str>>()[0].to_string();
|
||||
let raw_duration = video_segment.split("<div class=\"time\">").collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
|
||||
// Duration extraction
|
||||
let raw_duration =
|
||||
Self::extract_between(video_segment, "<div class=\"time\">", "<").unwrap_or("0:00");
|
||||
let duration = parse_time_to_seconds(raw_duration).unwrap_or(0) as u32;
|
||||
|
||||
// Views extraction
|
||||
let views_segment = Self::extract_between(video_segment, "<div class=\"views\">", "<");
|
||||
let views_count_str = views_segment
|
||||
.and_then(|s| s.split("</svg>").nth(1))
|
||||
.unwrap_or("0");
|
||||
let views = parse_abbreviated_number(views_count_str.trim()).unwrap_or(0);
|
||||
|
||||
// Thumbnail extraction
|
||||
let thumb = Self::extract_between(video_segment, "data-original=\"", "\"")
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
// URL extraction
|
||||
let url =
|
||||
Self::extract_between(video_segment, "<a class=\"th js-open-popup\" href=\"", "\"")
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
|
||||
let views = parse_abbreviated_number(&video_segment
|
||||
.split("<div class=\"views\">").collect::<Vec<&str>>()[1].split("</svg>").collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]).unwrap_or(0);
|
||||
//https://rule34video.com/get_file/47/5e71602b7642f9b997f90c979a368c99b8aad90d89/3942000/3942353/3942353_preview.mp4/
|
||||
//https://rule34video.com/get_file/47/5e71602b7642f9b997f90c979a368c99b8aad90d89/3942000/3942353/3942353_preview.mp4/
|
||||
let thumb = video_segment.split("<img class=\"thumb lazy-load\" src=\"").collect::<Vec<&str>>()[1].split("data-original=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
let url = video_segment.split("<a class=\"th js-open-popup\" href=\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
// let preview = video_segment.split("<div class=\"img wrap_image\" data-preview=\"").collect::<Vec<&str>>()[1]
|
||||
// .split("\"")
|
||||
// .collect::<Vec<&str>>()[0]
|
||||
// .to_string();
|
||||
|
||||
|
||||
let video_item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
url.to_string(),
|
||||
"Rule34video".to_string(),
|
||||
thumb,
|
||||
duration,
|
||||
)
|
||||
.views(views)
|
||||
// .preview(preview)
|
||||
;
|
||||
|
||||
|
||||
items.push(video_item);
|
||||
items.push(
|
||||
VideoItem::new(id, title, url, "Rule34video".to_string(), thumb, duration)
|
||||
.views(views),
|
||||
);
|
||||
}
|
||||
return items;
|
||||
items
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -205,30 +243,24 @@ impl Provider for Rule34videoProvider {
|
||||
async fn get_videos(
|
||||
&self,
|
||||
cache: VideoCache,
|
||||
pool: DbPool,
|
||||
_pool: DbPool,
|
||||
sort: String,
|
||||
query: Option<String>,
|
||||
page: String,
|
||||
per_page: String,
|
||||
_per_page: String,
|
||||
options: ServerOptions,
|
||||
) -> Vec<VideoItem> {
|
||||
let _ = options;
|
||||
let _ = per_page;
|
||||
let _ = pool; // Ignored in this implementation
|
||||
let videos: std::result::Result<Vec<VideoItem>, Error> = match query {
|
||||
Some(q) => {
|
||||
self.query(cache, page.parse::<u8>().unwrap_or(1), &q, &sort, options)
|
||||
.await
|
||||
}
|
||||
None => {
|
||||
self.get(cache, page.parse::<u8>().unwrap_or(1), &sort, options)
|
||||
.await
|
||||
}
|
||||
let page_num = page.parse::<u8>().unwrap_or(1);
|
||||
|
||||
let result = match query {
|
||||
Some(q) => self.query(cache, page_num, &q, &sort, options).await,
|
||||
None => self.get(cache, page_num, &sort, options).await,
|
||||
};
|
||||
match videos {
|
||||
|
||||
match result {
|
||||
Ok(v) => v,
|
||||
Err(e) => {
|
||||
println!("Error fetching videos: {}", e);
|
||||
eprintln!("Error fetching videos: {}", e);
|
||||
vec![]
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,17 @@
|
||||
use crate::DbPool;
|
||||
use crate::providers::Provider;
|
||||
use crate::util::cache::VideoCache;
|
||||
use crate::util::discord::format_error_chain;
|
||||
use crate::util::discord::send_discord_error_report;
|
||||
use crate::util::requester::Requester;
|
||||
use crate::util::time::parse_time_to_seconds;
|
||||
use crate::videos::VideoItem;
|
||||
use crate::videos::ServerOptions;
|
||||
use crate::videos::VideoItem;
|
||||
use async_trait::async_trait;
|
||||
use error_chain::error_chain;
|
||||
use htmlentity::entity::{ICodedDataTrait, decode};
|
||||
use scraper::{Html, Selector};
|
||||
use std::vec;
|
||||
use async_trait::async_trait;
|
||||
|
||||
error_chain! {
|
||||
foreign_links {
|
||||
@@ -17,16 +19,14 @@ error_chain! {
|
||||
HttpRequest(wreq::Error);
|
||||
JsonError(serde_json::Error);
|
||||
}
|
||||
errors {
|
||||
Parse(msg: String) {
|
||||
description("html parse error")
|
||||
display("html parse error: {}", msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// fn has_blacklisted_class(element: &ElementRef, blacklist: &[&str]) -> bool {
|
||||
// element
|
||||
// .value()
|
||||
// .attr("class")
|
||||
// .map(|classes| classes.split_whitespace().any(|c| blacklist.contains(&c)))
|
||||
// .unwrap_or(false)
|
||||
// }
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SxyprnProvider {
|
||||
url: String,
|
||||
@@ -81,11 +81,30 @@ impl SxyprnProvider {
|
||||
}
|
||||
};
|
||||
|
||||
let text = requester.get(&url_str).await.unwrap();
|
||||
let text = requester.get(&url_str, None).await.unwrap();
|
||||
// Pass a reference to options if needed, or reconstruct as needed
|
||||
let video_items: Vec<VideoItem> = self
|
||||
let video_items = match self
|
||||
.get_video_items_from_html(text.clone(), pool, requester)
|
||||
.await;
|
||||
.await
|
||||
{
|
||||
Ok(items) => items,
|
||||
Err(e) => {
|
||||
println!("Error parsing video items: {}", e);
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("Sxyprn Provider"),
|
||||
Some(&format!("URL: {}", url_str)),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
).await;
|
||||
return Ok(old_items);
|
||||
}
|
||||
};
|
||||
// let video_items: Vec<VideoItem> = self
|
||||
// .get_video_items_from_html(text.clone(), pool, requester)
|
||||
// .await;
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url_str);
|
||||
cache.insert(url_str.clone(), video_items.clone());
|
||||
@@ -115,7 +134,10 @@ impl SxyprnProvider {
|
||||
let search_string = query.replace(" ", "-");
|
||||
let url_str = format!(
|
||||
"{}/{}.html?page={}&sm={}",
|
||||
self.url, search_string, ((page as u32) - 1) * 20, sort_string
|
||||
self.url,
|
||||
search_string,
|
||||
((page as u32) - 1) * 20,
|
||||
sort_string
|
||||
);
|
||||
// Check our Video Cache. If the result is younger than 1 hour, we return it.
|
||||
let old_items = match cache.get(&url_str) {
|
||||
@@ -131,10 +153,31 @@ impl SxyprnProvider {
|
||||
vec![]
|
||||
}
|
||||
};
|
||||
let text = requester.get(&url_str).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self
|
||||
let text = requester.get(&url_str, None).await.unwrap();
|
||||
|
||||
let video_items = match self
|
||||
.get_video_items_from_html(text.clone(), pool, requester)
|
||||
.await;
|
||||
.await
|
||||
{
|
||||
Ok(items) => items,
|
||||
Err(e) => {
|
||||
println!("Error parsing video items: {}", e);// 1. Convert the error to a string immediately
|
||||
send_discord_error_report(
|
||||
e.to_string(),
|
||||
Some(format_error_chain(&e)),
|
||||
Some("Sxyprn Provider"),
|
||||
Some(&format!("URL: {}", url_str)),
|
||||
file!(),
|
||||
line!(),
|
||||
module_path!(),
|
||||
).await;
|
||||
return Ok(old_items);
|
||||
}
|
||||
};
|
||||
|
||||
// let video_items: Vec<VideoItem> = self
|
||||
// .get_video_items_from_html(text.clone(), pool, requester)
|
||||
// .await;
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&url_str);
|
||||
cache.insert(url_str.clone(), video_items.clone());
|
||||
@@ -147,41 +190,50 @@ impl SxyprnProvider {
|
||||
async fn get_video_items_from_html(
|
||||
&self,
|
||||
html: String,
|
||||
pool: DbPool,
|
||||
requester: Requester,
|
||||
) -> Vec<VideoItem> {
|
||||
let _ = requester;
|
||||
let _ = pool;
|
||||
_pool: DbPool,
|
||||
_requester: Requester,
|
||||
) -> Result<Vec<VideoItem>> {
|
||||
if html.is_empty() {
|
||||
println!("HTML is empty");
|
||||
return vec![];
|
||||
return Ok(vec![]);
|
||||
}
|
||||
let raw_videos = html.split("<script async").collect::<Vec<&str>>()[0]
|
||||
.split("post_el_small'")
|
||||
.collect::<Vec<&str>>()[1..]
|
||||
.to_vec();
|
||||
let mut items: Vec<VideoItem> = Vec::new();
|
||||
for video_segment in &raw_videos {
|
||||
// let vid = video_segment.split("\n").collect::<Vec<&str>>();
|
||||
// for (index, line) in vid.iter().enumerate() {
|
||||
// println!("Line {}: {}", index, line.to_string().trim());
|
||||
// }
|
||||
// println!("\n\n\n");
|
||||
|
||||
let url = video_segment.split("/post/").collect::<Vec<&str>>()[1]
|
||||
.split("'")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
// take content before "<script async"
|
||||
let before_script = html
|
||||
.split("<script async")
|
||||
.next()
|
||||
.ok_or_else(|| ErrorKind::Parse("missing '<script async' split point".into()))?;
|
||||
|
||||
// split into video segments (skip the first chunk)
|
||||
let raw_videos: Vec<&str> = before_script.split("post_el_small'").skip(1).collect();
|
||||
|
||||
if raw_videos.is_empty() {
|
||||
return Err(ErrorKind::Parse("no 'post_el_small\\'' segments found".into()).into());
|
||||
}
|
||||
|
||||
let mut items = Vec::new();
|
||||
|
||||
for video_segment in raw_videos {
|
||||
// url id
|
||||
let url = video_segment
|
||||
.split("/post/")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('\'').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("failed to extract /post/ url".into()))?
|
||||
.to_string();
|
||||
|
||||
let video_url = format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}", url);
|
||||
|
||||
let title_parts = video_segment.split("post_text").collect::<Vec<&str>>()[1]
|
||||
.split("style=''>")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split("</div>")
|
||||
.collect::<Vec<&str>>()[0];
|
||||
// title parts
|
||||
let title_parts = video_segment
|
||||
.split("post_text")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("style=''>").nth(1))
|
||||
.and_then(|s| s.split("</div>").next())
|
||||
.ok_or_else(|| ErrorKind::Parse("failed to extract title_parts".into()))?;
|
||||
|
||||
let document = Html::parse_document(title_parts);
|
||||
let selector = Selector::parse("*").unwrap();
|
||||
let selector = Selector::parse("*")
|
||||
.map_err(|e| ErrorKind::Parse(format!("selector parse failed: {e}")))?;
|
||||
|
||||
let mut texts = Vec::new();
|
||||
for element in document.select(&selector) {
|
||||
@@ -190,86 +242,96 @@ impl SxyprnProvider {
|
||||
texts.push(text.trim().to_string());
|
||||
}
|
||||
}
|
||||
let mut title = texts[0].clone();
|
||||
// html decode
|
||||
|
||||
let mut title = texts.join(" ");
|
||||
title = decode(title.as_bytes())
|
||||
.to_string()
|
||||
.unwrap_or(title)
|
||||
.replace(" ", " ");
|
||||
title = title.replace(" + ", " ").replace(" ", " ");
|
||||
if title.to_ascii_lowercase().starts_with("new "){
|
||||
|
||||
title = title
|
||||
.replace('\n', "")
|
||||
.replace(" + ", " ")
|
||||
.replace(" ", " ")
|
||||
.trim()
|
||||
.to_string();
|
||||
|
||||
if title.to_ascii_lowercase().starts_with("new ") {
|
||||
title = title[4..].to_string();
|
||||
}
|
||||
// println!("Title: {}", title);
|
||||
let id = video_url.split("/").collect::<Vec<&str>>()[6].to_string();
|
||||
|
||||
let thumb = format!(
|
||||
"https:{}",
|
||||
video_segment
|
||||
.split("<img class='mini_post_vid_thumb lazyload'")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split("data-src='")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split("'")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string()
|
||||
);
|
||||
// id (DON'T index [6])
|
||||
let id = video_url
|
||||
.split('/')
|
||||
.last()
|
||||
.ok_or_else(|| ErrorKind::Parse("failed to extract id from video_url".into()))?
|
||||
.split('?')
|
||||
.next()
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let preview = match video_segment.contains("class='hvp_player'") {
|
||||
true => Some(format!(
|
||||
// thumb
|
||||
let thumb_path = video_segment
|
||||
.split("<img class='mini_post_vid_thumb lazyload'")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("data-src='").nth(1))
|
||||
.and_then(|s| s.split('\'').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("failed to extract thumb".into()))?;
|
||||
|
||||
let thumb = format!("https:{thumb_path}");
|
||||
|
||||
// preview
|
||||
let preview = if video_segment.contains("class='hvp_player'") {
|
||||
Some(format!(
|
||||
"https:{}",
|
||||
video_segment
|
||||
.split("class='hvp_player'")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split(" src='")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split("'")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string()
|
||||
)),
|
||||
false => None,
|
||||
.nth(1)
|
||||
.and_then(|s| s.split(" src='").nth(1))
|
||||
.and_then(|s| s.split('\'').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("failed to extract preview src".into()))?
|
||||
))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// views
|
||||
let views = video_segment
|
||||
.split("<strong>·</strong> ")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split(" ")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.nth(1)
|
||||
.and_then(|s| s.split_whitespace().next())
|
||||
.ok_or_else(|| ErrorKind::Parse("failed to extract views".into()))?
|
||||
.to_string();
|
||||
|
||||
let raw_duration = video_segment.split("duration_small").collect::<Vec<&str>>()[1]
|
||||
.split("title='")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split("'")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split(">")
|
||||
.collect::<Vec<&str>>()[1]
|
||||
.split("<")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
// duration
|
||||
let raw_duration = video_segment
|
||||
.split("duration_small")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split("title='").nth(1))
|
||||
.and_then(|s| s.split('\'').nth(1))
|
||||
.and_then(|s| s.split('>').nth(1))
|
||||
.and_then(|s| s.split('<').next())
|
||||
.ok_or_else(|| ErrorKind::Parse("failed to extract duration".into()))?
|
||||
.to_string();
|
||||
|
||||
let duration = parse_time_to_seconds(&raw_duration).unwrap_or(0) as u32;
|
||||
|
||||
// stream urls (your filter condition looks suspicious; leaving as-is)
|
||||
let stream_urls = video_segment
|
||||
.split("extlink_icon extlink")
|
||||
.collect::<Vec<&str>>()
|
||||
.iter()
|
||||
.map(|part| {
|
||||
let url = part
|
||||
.split("href='")
|
||||
.collect::<Vec<&str>>()
|
||||
.filter_map(|part| {
|
||||
part.split("href='")
|
||||
.last()
|
||||
.unwrap_or(&"")
|
||||
.split("'")
|
||||
.collect::<Vec<&str>>()[0]
|
||||
.to_string();
|
||||
url
|
||||
.and_then(|s| s.split('\'').next())
|
||||
.map(|u| u.to_string())
|
||||
})
|
||||
.filter(|url| url.starts_with("http") && !url.starts_with("https://bigwarp.io/"))
|
||||
.filter(|url| url.starts_with("https://lulustream."))
|
||||
.collect::<Vec<String>>();
|
||||
let video_item_url = match stream_urls.first() {
|
||||
Some(u) => u.clone(),
|
||||
None => video_url.clone(),
|
||||
};
|
||||
|
||||
let video_item_url = stream_urls.first().cloned().unwrap_or_else(|| {
|
||||
format!("https://hottub.spacemoehre.de/proxy/sxyprn/post/{}", id)
|
||||
});
|
||||
|
||||
let mut video_item = VideoItem::new(
|
||||
id,
|
||||
title,
|
||||
@@ -279,12 +341,15 @@ impl SxyprnProvider {
|
||||
duration,
|
||||
)
|
||||
.views(views.parse::<u32>().unwrap_or(0));
|
||||
|
||||
if let Some(p) = preview {
|
||||
video_item = video_item.preview(p);
|
||||
}
|
||||
|
||||
items.push(video_item);
|
||||
}
|
||||
return items;
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -368,7 +368,6 @@ impl TnaflixProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -380,7 +379,7 @@ impl TnaflixProvider {
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -427,7 +426,7 @@ impl TnaflixProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -555,7 +554,7 @@ impl Provider for TnaflixProvider {
|
||||
}
|
||||
}
|
||||
}
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel {
|
||||
self.build_channel(clientversion)
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
|
||||
Some(self.build_channel(clientversion))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,7 +94,6 @@ impl XxdbxProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -106,7 +105,7 @@ impl XxdbxProvider {
|
||||
};
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -156,7 +155,7 @@ impl XxdbxProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -279,7 +278,7 @@ impl Provider for XxdbxProvider {
|
||||
}
|
||||
}
|
||||
}
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> crate::status::Channel {
|
||||
self.build_channel(clientversion)
|
||||
fn get_channel(&self, clientversion: ClientVersion) -> Option<crate::status::Channel> {
|
||||
Some(self.build_channel(clientversion))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +51,6 @@ impl XxthotsProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -62,7 +61,7 @@ impl XxthotsProvider {
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -99,7 +98,7 @@ impl XxthotsProvider {
|
||||
}
|
||||
};
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
|
||||
@@ -46,7 +46,6 @@ impl YoujizzProvider {
|
||||
let old_items = match cache.get(&video_url) {
|
||||
Some((time, items)) => {
|
||||
if time.elapsed().unwrap_or_default().as_secs() < 60 * 5 {
|
||||
println!("Cache hit for URL: {}", video_url);
|
||||
return Ok(items.clone());
|
||||
} else {
|
||||
items.clone()
|
||||
@@ -59,7 +58,7 @@ impl YoujizzProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
@@ -95,7 +94,7 @@ impl YoujizzProvider {
|
||||
|
||||
let mut requester = options.requester.clone().unwrap();
|
||||
|
||||
let text = requester.get(&video_url).await.unwrap();
|
||||
let text = requester.get(&video_url, None).await.unwrap();
|
||||
let video_items: Vec<VideoItem> = self.get_video_items_from_html(text.clone());
|
||||
if !video_items.is_empty() {
|
||||
cache.remove(&video_url);
|
||||
|
||||
64
src/proxies/javtiful.rs
Normal file
64
src/proxies/javtiful.rs
Normal file
@@ -0,0 +1,64 @@
|
||||
use ntex::web;
|
||||
use wreq::Version;
|
||||
|
||||
use crate::util::requester::Requester;
|
||||
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JavtifulProxy {
|
||||
}
|
||||
|
||||
impl JavtifulProxy {
|
||||
pub fn new() -> Self {
|
||||
JavtifulProxy {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_video_url(
|
||||
&self,
|
||||
url: String,
|
||||
requester: web::types::State<Requester>,
|
||||
) -> String {
|
||||
let mut requester = requester.get_ref().clone();
|
||||
let url = "https://javtiful.com/".to_string() + &url;
|
||||
let text = requester.get(&url, None).await.unwrap_or("".to_string());
|
||||
if text.is_empty() {
|
||||
return "".to_string();
|
||||
}
|
||||
let video_id = url
|
||||
.split('/')
|
||||
.nth(4)
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let token = text.split("data-csrf-token=\"")
|
||||
.nth(1)
|
||||
.and_then(|s| s.split('"').next())
|
||||
.unwrap_or("")
|
||||
.to_string();
|
||||
|
||||
let form = wreq::multipart::Form::new()
|
||||
.text("video_id", video_id.clone())
|
||||
.text("pid_c", "".to_string())
|
||||
.text("token", token.clone());
|
||||
let resp = match requester
|
||||
.post_multipart(
|
||||
"https://javtiful.com/ajax/get_cdn",
|
||||
form,
|
||||
vec![("Referer".to_string(), url.to_string())],
|
||||
Some(Version::HTTP_11),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(r) => r,
|
||||
Err(_) => return "".to_string(),
|
||||
};
|
||||
let text = resp.text().await.unwrap_or_default();
|
||||
let json: serde_json::Value = serde_json::from_str(&text).unwrap_or(serde_json::Value::Null);
|
||||
let video_url = json.get("playlists")
|
||||
.map(|v| v.to_string().replace("\"", ""))
|
||||
.unwrap_or_default();
|
||||
|
||||
return video_url;
|
||||
}
|
||||
}
|
||||
@@ -4,10 +4,12 @@ use crate::{proxies::sxyprn::SxyprnProxy, util::requester::Requester};
|
||||
|
||||
pub mod sxyprn;
|
||||
pub mod hanimecdn;
|
||||
pub mod javtiful;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum AnyProxy {
|
||||
Sxyprn(SxyprnProxy),
|
||||
Javtiful(javtiful::JavtifulProxy),
|
||||
}
|
||||
|
||||
pub trait Proxy {
|
||||
@@ -26,12 +28,8 @@ impl Proxy for AnyProxy {
|
||||
requester: web::types::State<Requester>,
|
||||
) -> String {
|
||||
match self {
|
||||
AnyProxy::Sxyprn(p) => {
|
||||
p.get_video_url(
|
||||
url,
|
||||
requester,
|
||||
).await
|
||||
}
|
||||
AnyProxy::Sxyprn(p) => p.get_video_url(url, requester).await,
|
||||
AnyProxy::Javtiful(p) => p.get_video_url(url, requester).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -40,30 +40,34 @@ impl SxyprnProxy {
|
||||
) -> String {
|
||||
let mut requester = requester.get_ref().clone();
|
||||
let url = "https://sxyprn.com/".to_string() + &url;
|
||||
let text = requester.get(&url).await.unwrap_or("".to_string());
|
||||
let text = requester.get(&url, None).await.unwrap_or("".to_string());
|
||||
if text.is_empty() {
|
||||
return "".to_string();
|
||||
}
|
||||
let data_string = text.split("data-vnfo='").collect::<Vec<&str>>()[1]
|
||||
.split("\":\"").collect::<Vec<&str>>()[1]
|
||||
.split("\"}").collect::<Vec<&str>>()[0].replace("\\","");
|
||||
|
||||
//println!("src: {}",data_string);
|
||||
let mut tmp = data_string
|
||||
.split("/")
|
||||
.map(|s| s.to_string())
|
||||
.collect::<Vec<String>>();
|
||||
//println!("tmp: {:?}",tmp);
|
||||
tmp[1] = format!("{}8/{}", tmp[1], boo(ssut51(tmp[6].as_str()), ssut51(tmp[7].as_str())));
|
||||
|
||||
//println!("tmp[1]: {:?}",tmp[1]);
|
||||
//preda
|
||||
tmp[5] = format!(
|
||||
"{}",
|
||||
tmp[5].parse::<u32>().unwrap() - ssut51(tmp[6].as_str()) - ssut51(tmp[7].as_str())
|
||||
);
|
||||
//println!("tmp: {:?}",tmp);
|
||||
let sxyprn_video_url = format!("https://sxyprn.com{}",tmp.join("/"));
|
||||
|
||||
let response = requester.get_raw(&sxyprn_video_url).await;
|
||||
match response {
|
||||
Ok(resp) => {
|
||||
return resp.headers().get("Location").unwrap().to_str().unwrap_or("").to_string();
|
||||
return format!("https:{}", resp.headers().get("Location").unwrap().to_str().unwrap_or("").to_string());
|
||||
},
|
||||
Err(e) => {
|
||||
println!("Error fetching video URL: {}", e);
|
||||
|
||||
20
src/proxy.rs
20
src/proxy.rs
@@ -1,5 +1,6 @@
|
||||
use ntex::web::{self, HttpRequest};
|
||||
|
||||
use crate::proxies::javtiful::JavtifulProxy;
|
||||
use crate::proxies::sxyprn::SxyprnProxy;
|
||||
use crate::util::requester::Requester;
|
||||
use crate::proxies::*;
|
||||
@@ -7,24 +8,24 @@ use crate::proxies::*;
|
||||
pub fn config(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("/sxyprn/{endpoint}*")
|
||||
.route(web::post().to(sxyprn))
|
||||
.route(web::get().to(sxyprn)),
|
||||
.route(web::post().to(proxy2redirect))
|
||||
.route(web::get().to(proxy2redirect)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/javtiful/{endpoint}*")
|
||||
.route(web::post().to(proxy2redirect))
|
||||
.route(web::get().to(proxy2redirect)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/hanime-cdn/{endpoint}*")
|
||||
.route(web::post().to(crate::proxies::hanimecdn::get_image))
|
||||
.route(web::get().to(crate::proxies::hanimecdn::get_image)),
|
||||
)
|
||||
// .service(
|
||||
// web::resource("/videos")
|
||||
// // .route(web::get().to(videos_get))
|
||||
// .route(web::post().to(videos_post)),
|
||||
// )
|
||||
;
|
||||
}
|
||||
|
||||
|
||||
async fn sxyprn(req: HttpRequest,
|
||||
async fn proxy2redirect(req: HttpRequest,
|
||||
requester: web::types::State<Requester>,) -> Result<impl web::Responder, web::Error> {
|
||||
let proxy = get_proxy(req.uri().to_string().split("/").collect::<Vec<&str>>()[2]).unwrap();
|
||||
let endpoint = req.match_info().query("endpoint").to_string();
|
||||
@@ -33,13 +34,14 @@ async fn sxyprn(req: HttpRequest,
|
||||
_ => "Error".to_string(),
|
||||
};
|
||||
Ok(web::HttpResponse::Found()
|
||||
.header("Location", format!("https:{}", video_url))
|
||||
.header("Location", video_url)
|
||||
.finish())
|
||||
}
|
||||
|
||||
fn get_proxy(proxy: &str) -> Option<AnyProxy> {
|
||||
match proxy {
|
||||
"sxyprn" => Some(AnyProxy::Sxyprn(SxyprnProxy::new())),
|
||||
"javtiful" => Some(AnyProxy::Javtiful(JavtifulProxy::new())),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
@@ -1,22 +1,28 @@
|
||||
use std::time::{SystemTime};
|
||||
use std::time::SystemTime;
|
||||
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::Duration;
|
||||
|
||||
|
||||
use crate::videos::VideoItem;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct VideoCache{
|
||||
cache: Arc<Mutex<std::collections::HashMap<String, (SystemTime, Vec<VideoItem>)>>>, // url -> time+Items
|
||||
pub struct VideoCache {
|
||||
cache: Arc<Mutex<std::collections::HashMap<String, (SystemTime, Vec<VideoItem>)>>>, // url -> time+Items
|
||||
max_size: usize,
|
||||
}
|
||||
impl VideoCache {
|
||||
pub fn new() -> Self {
|
||||
VideoCache {
|
||||
cache: Arc::new(Mutex::new(std::collections::HashMap::new())),
|
||||
max_size: 100,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_size(&mut self, size: usize) -> &mut Self {
|
||||
self.max_size = size;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn get(&self, key: &str) -> Option<(SystemTime, Vec<VideoItem>)> {
|
||||
let cache = self.cache.lock().ok()?;
|
||||
cache.get(key).cloned()
|
||||
@@ -24,14 +30,22 @@ impl VideoCache {
|
||||
|
||||
pub fn insert(&self, key: String, value: Vec<VideoItem>) {
|
||||
if let Ok(mut cache) = self.cache.lock() {
|
||||
if cache.len() >= self.max_size {
|
||||
// Simple eviction policy: remove a random entry
|
||||
if let Some(first_key) = cache.keys().next().cloned() {
|
||||
cache.remove(&first_key);
|
||||
}
|
||||
}
|
||||
cache.insert(key.clone(), (SystemTime::now(), value.clone()));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove(&self, key: &str) {
|
||||
if let Ok(mut cache) = self.cache.lock() {
|
||||
cache.remove(key);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn entries(&self) -> Option<Vec<(String, (SystemTime, Vec<VideoItem>))>> {
|
||||
if let Ok(cache) = self.cache.lock() {
|
||||
// Return a cloned vector of the cache entries
|
||||
@@ -40,21 +54,24 @@ impl VideoCache {
|
||||
None
|
||||
}
|
||||
|
||||
pub async fn check(&self) -> Result<(), Box<dyn std::error::Error>>{
|
||||
let iter = match self.entries(){
|
||||
Some(iter) => iter,
|
||||
None => return Err(Box::new(std::io::Error::new(std::io::ErrorKind::Other, "Could not get entries")))
|
||||
};
|
||||
pub async fn check(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let iter = match self.entries() {
|
||||
Some(iter) => iter,
|
||||
None => {
|
||||
return Err(Box::new(std::io::Error::new(
|
||||
std::io::ErrorKind::Other,
|
||||
"Could not get entries",
|
||||
)));
|
||||
}
|
||||
};
|
||||
|
||||
for (key, (time, _items)) in iter {
|
||||
if let Ok(elapsed) = time.elapsed() {
|
||||
if elapsed > Duration::from_secs(60*60){
|
||||
println!("Key: {}, elapsed: {:?}", key, elapsed);
|
||||
self.remove(&key);
|
||||
}
|
||||
for (key, (time, _items)) in iter {
|
||||
if let Ok(elapsed) = time.elapsed() {
|
||||
if elapsed > Duration::from_secs(60 * 60) {
|
||||
self.remove(&key);
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
107
src/util/discord.rs
Normal file
107
src/util/discord.rs
Normal file
@@ -0,0 +1,107 @@
|
||||
use std::error::Error;
|
||||
use std::fmt::Write as _;
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
use dashmap::DashMap;
|
||||
use once_cell::sync::Lazy;
|
||||
use serde_json::json;
|
||||
use crate::util::requester;
|
||||
|
||||
// Global cache: Map<ErrorSignature, LastSentTimestamp>
|
||||
static ERROR_CACHE: Lazy<DashMap<String, u64>> = Lazy::new(DashMap::new);
|
||||
// const COOLDOWN_SECONDS: u64 = 3600; // 1 Hour cooldown
|
||||
|
||||
pub fn format_error_chain(err: &dyn Error) -> String {
|
||||
let mut chain_str = String::new();
|
||||
let mut current_err: Option<&dyn Error> = Some(err);
|
||||
let mut index = 1;
|
||||
while let Some(e) = current_err {
|
||||
let _ = writeln!(chain_str, "{}. {}", index, e);
|
||||
current_err = e.source();
|
||||
index += 1;
|
||||
}
|
||||
chain_str
|
||||
}
|
||||
|
||||
pub async fn send_discord_error_report(
|
||||
error_msg: String,
|
||||
error_chain: Option<String>,
|
||||
context: Option<&str>,
|
||||
extra_info: Option<&str>,
|
||||
file: &str,
|
||||
line: u32,
|
||||
module: &str,
|
||||
) {
|
||||
let now = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.map(|d| d.as_secs())
|
||||
.unwrap_or(0);
|
||||
|
||||
// --- Deduplication Logic ---
|
||||
// Create a unique key based on error content and location
|
||||
let error_signature = format!("{}-{}-{}", error_msg, file, line);
|
||||
|
||||
if let Some(_) = ERROR_CACHE.get(&error_signature) {
|
||||
// if now - *last_sent < COOLDOWN_SECONDS {
|
||||
// Error is still in cooldown, skip sending
|
||||
return;
|
||||
// }
|
||||
}
|
||||
|
||||
// Update the cache with the current timestamp
|
||||
ERROR_CACHE.insert(error_signature, now);
|
||||
// ---------------------------
|
||||
|
||||
let webhook_url = match std::env::var("DISCORD_WEBHOOK") {
|
||||
Ok(url) => url,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
const MAX_FIELD: usize = 1024;
|
||||
let truncate = |s: &str| {
|
||||
if s.len() > MAX_FIELD {
|
||||
format!("{}…", &s[..MAX_FIELD - 1])
|
||||
} else {
|
||||
s.to_string()
|
||||
}
|
||||
};
|
||||
|
||||
let payload = json!({
|
||||
"embeds": [{
|
||||
"title": "🚨 Rust Error Report",
|
||||
"color": 0xE74C3C,
|
||||
"fields": [
|
||||
{
|
||||
"name": "Error",
|
||||
"value": format!("```{}```", truncate(&error_msg)),
|
||||
"inline": false
|
||||
},
|
||||
{
|
||||
"name": "Error Chain",
|
||||
"value": truncate(&error_chain.unwrap_or_else(|| "No chain provided".to_string())),
|
||||
"inline": false
|
||||
},
|
||||
{
|
||||
"name": "Location",
|
||||
"value": format!("`{}`:{}\n`{}`", file, line, module),
|
||||
"inline": true
|
||||
},
|
||||
{
|
||||
"name": "Context",
|
||||
"value": truncate(context.unwrap_or("n/a")),
|
||||
"inline": true
|
||||
},
|
||||
{
|
||||
"name": "Extra Info",
|
||||
"value": truncate(extra_info.unwrap_or("n/a")),
|
||||
"inline": false
|
||||
}
|
||||
],
|
||||
"footer": {
|
||||
"text": format!("Unix time: {} | Cooldown active", now)
|
||||
}
|
||||
}]
|
||||
});
|
||||
|
||||
let mut requester = requester::Requester::new();
|
||||
let _ = requester.post_json(&webhook_url, &payload, vec![]).await;
|
||||
}
|
||||
@@ -13,65 +13,52 @@ pub struct FlareSolverrRequest {
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize, Debug)]
|
||||
pub struct FlaresolverrCookie {
|
||||
pub name: String, //"cf_clearance",
|
||||
pub value: String, //"lnKoXclrIp_mDrWJFfktPGm8GDyxjSpzy9dx0qDTiRg-1748689259-1.2.1.1-AIFERAPCdCSvvdu1mposNdUpKV9wHZXBpSI2L9k9TaKkPcqmomON_XEb6ZtRBtrmQu_DC8AzKllRg2vNzVKOUsvv9ndjQ.vv8Z7cNkgzpIbGFy96kXyAYH2mUk3Q7enZovDlEbK5kpV3Sbmd2M3_bUCBE1WjAMMdXlyNElH1LOpUm149O9hrluXjAffo4SwHI4HO0UckBPWBlBqhznKPgXxU0g8VHLDeYnQKViY8rP2ud4tyzKnJUxuYXzr4aWBNMp6TESp49vesRiel_Y5m.rlTY4zSb517S9iPbEQiYHRI.uH5mMHVI3jvJl0Mx94tPrpFnkhDdmzL3DRSllJe9k786Lf21I9WBoH2cCR3yHw",
|
||||
pub domain: String, //".discord.com",
|
||||
pub path: String, //"/",
|
||||
pub expires: f64, //1780225259.237105,
|
||||
pub size: u64, //438,
|
||||
pub httpOnly: bool, //true,
|
||||
pub secure: bool, //true,
|
||||
pub session: bool, //false,
|
||||
pub sameSite: Option<String>, //"None",
|
||||
pub priority: String, //"Medium",
|
||||
pub sameParty: bool, //false,
|
||||
pub sourceScheme: String, //"Secure",
|
||||
pub sourcePort: u32, //443,
|
||||
pub partitionKey: Option<String>,
|
||||
pub name: String,
|
||||
pub value: String,
|
||||
pub domain: String,
|
||||
pub path: String,
|
||||
pub expires: f64,
|
||||
pub size: u64,
|
||||
pub httpOnly: bool,
|
||||
pub secure: bool,
|
||||
pub session: bool,
|
||||
pub sameSite: Option<String>,
|
||||
pub priority: String,
|
||||
pub sameParty: bool,
|
||||
pub sourceScheme: String,
|
||||
pub sourcePort: u32,
|
||||
pub partitionKey: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize, Debug)]
|
||||
#[derive(serde::Serialize, serde::Deserialize, Debug)]
|
||||
pub struct FlareSolverrSolution {
|
||||
pub url: String,
|
||||
pub url: String,
|
||||
pub status: u32,
|
||||
pub response: String,
|
||||
pub response: String,
|
||||
pub headers: HashMap<String, String>,
|
||||
pub cookies: Vec<FlaresolverrCookie>,
|
||||
pub cookies: Vec<FlaresolverrCookie>,
|
||||
pub userAgent: String,
|
||||
}
|
||||
// impl FlareSolverrSolution {
|
||||
// fn to_client(&self,){
|
||||
// let mut headers = header::HeaderMap::new();
|
||||
// for (h, v) in &self.headers {
|
||||
// println!("{}: {}", h, v);
|
||||
// headers.insert(
|
||||
// header::HeaderName::from_bytes(h.as_bytes()).unwrap(),
|
||||
// header::HeaderValue::from_str(v).unwrap(),
|
||||
// );
|
||||
// }
|
||||
// // let client = reqwest::Client::builder()
|
||||
// // .danger_accept_invalid_certs(true)
|
||||
// // .
|
||||
// // .build().unwrap();
|
||||
// }
|
||||
// }
|
||||
|
||||
#[derive(serde::Serialize, serde::Deserialize, Debug)]
|
||||
pub struct FlareSolverrResponse {
|
||||
status: String,
|
||||
message: String,
|
||||
pub status: String,
|
||||
pub message: String,
|
||||
pub solution: FlareSolverrSolution,
|
||||
startTimestamp: u64,
|
||||
endTimestamp: u64,
|
||||
version: String,
|
||||
pub startTimestamp: u64,
|
||||
pub endTimestamp: u64,
|
||||
pub version: String,
|
||||
}
|
||||
|
||||
pub struct Flaresolverr {
|
||||
url: String,
|
||||
proxy: bool,
|
||||
}
|
||||
|
||||
impl Flaresolverr {
|
||||
pub fn new(url: String) -> Self {
|
||||
Flaresolverr {
|
||||
url: url,
|
||||
Self {
|
||||
url,
|
||||
proxy: false,
|
||||
}
|
||||
}
|
||||
@@ -85,28 +72,34 @@ impl Flaresolverr {
|
||||
request: FlareSolverrRequest,
|
||||
) -> Result<FlareSolverrResponse, Box<dyn std::error::Error>> {
|
||||
let client = Client::builder()
|
||||
.emulation(Emulation::Firefox136)
|
||||
.build()?;
|
||||
.emulation(Emulation::Firefox136)
|
||||
.build()?;
|
||||
|
||||
let mut request = client
|
||||
.post(&self.url)
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&json!({
|
||||
"cmd": request.cmd,
|
||||
"url": request.url,
|
||||
"maxTimeout": request.maxTimeout,
|
||||
}));
|
||||
let mut req = client
|
||||
.post(&self.url)
|
||||
.header("Content-Type", "application/json")
|
||||
.json(&json!({
|
||||
"cmd": request.cmd,
|
||||
"url": request.url,
|
||||
"maxTimeout": request.maxTimeout,
|
||||
}));
|
||||
|
||||
if self.proxy {
|
||||
if let Ok(proxy_url) = env::var("BURP_URL") {
|
||||
let proxy = Proxy::all(&proxy_url).unwrap();
|
||||
request = request.proxy(proxy.clone());
|
||||
match Proxy::all(&proxy_url) {
|
||||
Ok(proxy) => {
|
||||
req = req.proxy(proxy);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Invalid proxy URL '{}': {}", proxy_url, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let response = request.send().await?;
|
||||
let response = req.send().await?;
|
||||
|
||||
let body: FlareSolverrResponse = response.json::<FlareSolverrResponse>().await?;
|
||||
Ok(body)
|
||||
let body = response.json::<FlareSolverrResponse>().await?;
|
||||
Ok(body)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@ pub mod time;
|
||||
pub mod flaresolverr;
|
||||
pub mod cache;
|
||||
pub mod requester;
|
||||
pub mod discord;
|
||||
|
||||
pub fn parse_abbreviated_number(s: &str) -> Option<u32> {
|
||||
let s = s.trim();
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use serde::Serialize;
|
||||
use wreq::multipart::Form;
|
||||
use std::env;
|
||||
use wreq::Client;
|
||||
use wreq::Proxy;
|
||||
@@ -46,23 +47,6 @@ impl Requester {
|
||||
self.proxy = proxy;
|
||||
}
|
||||
|
||||
// pub fn set_flaresolverr_session(&mut self, session: String) {
|
||||
// self.flaresolverr_session = Some(session);
|
||||
// }
|
||||
|
||||
// fn get_url_from_location_header(&self, prev_url: &str, location: &str) -> String {
|
||||
// if location.starts_with("http://") || location.starts_with("https://") {
|
||||
// location.to_string()
|
||||
// } else if location.starts_with("//") {
|
||||
// format!("{}{}", "https:", location)
|
||||
// } else if location.starts_with('/') {
|
||||
// let base_url = prev_url.split('/').take(3).collect::<Vec<&str>>().join("/");
|
||||
// format!("{}{}", base_url, location)
|
||||
// } else {
|
||||
// format!("{}/{}", prev_url, location)
|
||||
// }
|
||||
// }
|
||||
|
||||
pub async fn get_raw(&mut self, url: &str) -> Result<Response, wreq::Error> {
|
||||
let client = Client::builder()
|
||||
.cert_verification(false)
|
||||
@@ -83,7 +67,11 @@ impl Requester {
|
||||
request.send().await
|
||||
}
|
||||
|
||||
pub async fn get_raw_with_headers(&mut self, url: &str, headers: Vec<(String, String)>) -> Result<Response, wreq::Error> {
|
||||
pub async fn get_raw_with_headers(
|
||||
&mut self,
|
||||
url: &str,
|
||||
headers: Vec<(String, String)>,
|
||||
) -> Result<Response, wreq::Error> {
|
||||
let client = Client::builder()
|
||||
.cert_verification(false)
|
||||
.emulation(Emulation::Firefox136)
|
||||
@@ -106,24 +94,22 @@ impl Requester {
|
||||
request.send().await
|
||||
}
|
||||
|
||||
pub async fn post<S>(&mut self, url: &str, data: &S, headers: Vec<(String, String)>) -> Result<Response, wreq::Error>
|
||||
pub async fn post_json<S>(
|
||||
&mut self,
|
||||
url: &str,
|
||||
data: &S,
|
||||
headers: Vec<(String, String)>,
|
||||
) -> Result<Response, wreq::Error>
|
||||
where
|
||||
S: Serialize + ?Sized,
|
||||
{
|
||||
let client = Client::builder()
|
||||
.cert_verification(false)
|
||||
.emulation(Emulation::Firefox136)
|
||||
.cookie_store(true)
|
||||
.build()
|
||||
.expect("Failed to create HTTP client");
|
||||
|
||||
let mut request = client.post(url).version(Version::HTTP_11).json(data);
|
||||
let mut request = self.client.post(url).version(Version::HTTP_11).json(data);
|
||||
|
||||
// Set custom headers
|
||||
for (key, value) in headers.iter() {
|
||||
request = request.header(key, value);
|
||||
}
|
||||
|
||||
|
||||
if self.proxy {
|
||||
if let Ok(proxy_url) = env::var("BURP_URL") {
|
||||
let proxy = Proxy::all(&proxy_url).unwrap();
|
||||
@@ -134,8 +120,18 @@ impl Requester {
|
||||
request.send().await
|
||||
}
|
||||
|
||||
pub async fn get(&mut self, url: &str) -> Result<String, AnyErr> {
|
||||
let mut request = self.client.get(url).version(Version::HTTP_11);
|
||||
pub async fn post(
|
||||
&mut self,
|
||||
url: &str,
|
||||
data: &str,
|
||||
headers: Vec<(&str, &str)>,
|
||||
) -> Result<Response, wreq::Error> {
|
||||
let mut request = self.client.post(url).version(Version::HTTP_11).body(data.to_string());
|
||||
|
||||
// Set custom headers
|
||||
for (key, value) in headers.iter() {
|
||||
request = request.header(key.to_string(), value.to_string());
|
||||
}
|
||||
|
||||
if self.proxy {
|
||||
if let Ok(proxy_url) = env::var("BURP_URL") {
|
||||
@@ -144,12 +140,70 @@ impl Requester {
|
||||
}
|
||||
}
|
||||
|
||||
let response = request.send().await?;
|
||||
request.send().await
|
||||
}
|
||||
|
||||
if response.status().is_success() || response.status().as_u16() == 404 {
|
||||
return Ok(response.text().await?);
|
||||
pub async fn post_multipart(
|
||||
&mut self,
|
||||
url: &str,
|
||||
form: Form,
|
||||
headers: Vec<(String, String)>,
|
||||
_http_version: Option<Version>,
|
||||
) -> Result<Response, wreq::Error>
|
||||
{
|
||||
let http_version = match _http_version {
|
||||
Some(v) => v,
|
||||
None => Version::HTTP_11,
|
||||
};
|
||||
|
||||
let mut request = self.client.post(url).multipart(form).version(http_version);
|
||||
|
||||
// Set custom headers
|
||||
for (key, value) in headers.iter() {
|
||||
request = request.header(key, value);
|
||||
}
|
||||
|
||||
if self.proxy {
|
||||
if let Ok(proxy_url) = env::var("BURP_URL") {
|
||||
let proxy = Proxy::all(&proxy_url).unwrap();
|
||||
request = request.proxy(proxy);
|
||||
}
|
||||
}
|
||||
|
||||
request.send().await
|
||||
}
|
||||
|
||||
pub async fn get(&mut self, url: &str, _http_version: Option<Version>) -> Result<String, AnyErr> {
|
||||
let http_version = match _http_version {
|
||||
Some(v) => v,
|
||||
None => Version::HTTP_11,
|
||||
};
|
||||
loop {
|
||||
let mut request = self.client.get(url).version(http_version);
|
||||
if self.proxy {
|
||||
if let Ok(proxy_url) = env::var("BURP_URL") {
|
||||
let proxy = Proxy::all(&proxy_url).unwrap();
|
||||
request = request.proxy(proxy);
|
||||
}
|
||||
}
|
||||
let response = request.send().await?;
|
||||
if response.status().is_success() || response.status().as_u16() == 404 {
|
||||
return Ok(response.text().await?);
|
||||
}
|
||||
if response.status().as_u16() == 429 {
|
||||
tokio::time::sleep(std::time::Duration::from_secs(1)).await;
|
||||
continue;
|
||||
} else {
|
||||
println!(
|
||||
"Direct request to {} failed with status: {}",
|
||||
url,
|
||||
response.status()
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// If direct request failed, try FlareSolverr. Map its error to a Send+Sync error immediately,
|
||||
// so no non-Send error value lives across later `.await`s.
|
||||
let flare_url = env::var("FLARE_URL").expect("FLARE_URL not set");
|
||||
|
||||
@@ -48,6 +48,7 @@ pub struct ServerOptions {
|
||||
pub stars: Option<String>, //
|
||||
pub categories: Option<String>, //
|
||||
pub duration: Option<String>, //
|
||||
pub sort: Option<String>, //
|
||||
}
|
||||
|
||||
#[derive(serde::Serialize, Debug)]
|
||||
@@ -128,6 +129,9 @@ impl VideoItem {
|
||||
}
|
||||
}
|
||||
pub fn tags(mut self, tags: Vec<String>) -> Self {
|
||||
if tags.is_empty(){
|
||||
return self;
|
||||
}
|
||||
self.tags = Some(tags);
|
||||
self
|
||||
}
|
||||
@@ -156,6 +160,9 @@ impl VideoItem {
|
||||
self
|
||||
}
|
||||
pub fn formats(mut self, formats: Vec<VideoFormat>) -> Self {
|
||||
if formats.is_empty(){
|
||||
return self;
|
||||
}
|
||||
self.formats = Some(formats);
|
||||
self
|
||||
}
|
||||
@@ -270,6 +277,15 @@ impl VideoFormat {
|
||||
headers.insert(key, value);
|
||||
}
|
||||
}
|
||||
pub fn http_header(&mut self, key: String, value: String) -> Self {
|
||||
if self.http_headers.is_none() {
|
||||
self.http_headers = Some(HashMap::new());
|
||||
}
|
||||
if let Some(headers) = &mut self.http_headers {
|
||||
headers.insert(key, value);
|
||||
}
|
||||
self.to_owned()
|
||||
}
|
||||
pub fn format_id(mut self, format_id: String) -> Self {
|
||||
self.format_id = Some(format_id);
|
||||
self
|
||||
|
||||
Reference in New Issue
Block a user