Files
file-srv-gui-v3/src/search.rs

183 lines
5.3 KiB
Rust

use std::ops::Deref;
use crate::util::clamp;
use eframe::egui;
use serde::Deserialize;
use tokio::sync::mpsc;
#[derive(Debug, Clone, Deserialize)]
pub struct Root {
pub results: Vec<FileEntry>,
pub metadata: Metadata,
}
#[derive(Debug, Clone, Deserialize)]
pub struct Metadata {
pub page: usize,
pub total_pages: usize,
pub page_size: usize,
}
#[derive(Debug, Clone, Deserialize)]
pub struct FileEntry {
pub name: String,
pub ext: String,
#[allow(dead_code)]
pub path: String,
pub url: String,
pub size: i64,
pub preview: String,
}
pub struct SearchContext {
pub is_searching: bool,
pub server_url: String,
pub query: String,
search_rx: Option<mpsc::Receiver<Result<(Vec<FileEntry>, Metadata), String>>>,
pub search_results: Vec<FileEntry>,
pub page: usize,
pub per_page: usize,
pub total_pages: usize,
pub total_results: usize,
}
impl Default for SearchContext {
fn default() -> Self {
SearchContext {
is_searching: false,
server_url: "https://stuff.catgirls.fish/search".to_string(),
query: "".to_string(),
search_rx: None,
search_results: vec![],
page: 1,
per_page: 25,
total_pages: 0,
total_results: 0,
}
}
}
impl SearchContext {
pub fn next_page(&mut self, ctx: &egui::Context) {
let skip = self.page == self.total_pages;
self.page = clamp(self.page + 1, 1, self.total_pages);
if !skip {
self.start_search();
ctx.forget_all_images();
}
}
pub fn previous_page(&mut self, ctx: &egui::Context) {
let skip = self.page == 1;
self.page = clamp(self.page - 1, 1, self.total_pages);
if !skip {
self.start_search();
ctx.forget_all_images();
}
}
pub fn start_search(&mut self) -> String {
let page = self.page.clone();
let page_size = self.per_page.clone();
let query = self.query.clone();
let url = self.server_url.clone();
let (tx, rx) = mpsc::channel::<Result<(Vec<FileEntry>, Metadata), String>>(1);
self.search_rx = Some(rx);
self.is_searching = true;
self.search_results.clear();
self.page = 0;
self.total_pages = 0;
self.total_results = 0;
tokio::spawn(async move {
let res = search_files(url, query, page, page_size).await;
let _ = tx.send(res).await;
});
"Searching...".to_string()
}
pub fn process_channel(&mut self, ctx: &egui::Context) -> (String, bool) {
let done = false;
let mut return_str = "".to_string();
if self.search_rx.is_some() {
let mut clear_rx = false;
if let Some(rx) = self.search_rx.as_mut() {
match rx.try_recv() {
Ok(Ok(results)) => {
// Ok recv, ok results
self.search_results = results.0.clone();
return_str = "Ready!".to_string();
self.total_pages = results.1.total_pages;
self.page = results.1.page;
self.per_page = results.1.page_size;
self.is_searching = false;
clear_rx = true;
}
Ok(Err(e)) => {
// Ok recv, err results
return_str = format!("Search failed: {}", e);
self.is_searching = false;
clear_rx = true;
}
Err(mpsc::error::TryRecvError::Empty) => {
ctx.request_repaint();
}
Err(mpsc::error::TryRecvError::Disconnected) => {
return_str = "Search thread ended unexpectedly".to_string();
self.is_searching = false;
clear_rx = true;
}
}
}
if clear_rx {
self.search_rx = None;
}
}
(return_str, done)
}
}
pub async fn search_files(
url: String,
query: String,
page: usize,
page_size: usize,
) -> Result<(Vec<FileEntry>, Metadata), String> {
let full_url = format!("{}?q={}&p={}&s={}", url, query, page, page_size);
let client = match reqwest::Client::builder()
.user_agent(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:141.0) Gecko/20100101 Firefox/141.0",
)
.danger_accept_invalid_certs(true)
.build()
{
Ok(client) => client,
Err(e) => return Err(format!("Failed to create the client: {}", e)),
};
let res = client.get(full_url).send().await;
let response = match res {
Ok(response_ok) => response_ok,
Err(e) => return Err(format!("Failed to download the file: {}", e)),
};
if response.status() != reqwest::StatusCode::OK {
return Err(format!(
"Failed to download the file: {}",
response.status()
));
}
let results = match response.json::<Root>().await {
Ok(r) => r,
Err(e) => {
return Err(format!("Failed to deserialize results data: {}", e));
}
};
Ok((results.results, results.metadata))
}