Factor searching into its own class

This commit is contained in:
2026-03-20 11:15:11 -04:00
parent 0f62444bee
commit 187633c533
5 changed files with 196 additions and 206 deletions

View File

@@ -1,7 +1,6 @@
use crate::search::SearchContext;
use crate::types; use crate::types;
use crate::util::clamp;
use crate::util::download; use crate::util::download;
use crate::util::search_files;
use eframe::egui; use eframe::egui;
use egui::ColorImage; use egui::ColorImage;
use human_bytes::human_bytes; use human_bytes::human_bytes;
@@ -10,13 +9,11 @@ use tokio::sync::mpsc;
pub struct Application { pub struct Application {
download_path: Option<String>, download_path: Option<String>,
server_url: String,
query: String,
status: String, status: String,
show_side_panel: bool, show_side_panel: bool,
search_ctx: types::SearchContext, search_ctx: SearchContext,
download_ctx: types::DownloadContext, download_ctx: types::DownloadContext,
} }
@@ -43,29 +40,13 @@ impl Application {
// Search button and input // Search button and input
ui.label("Search: "); ui.label("Search: ");
if ui.text_edit_singleline(&mut self.query).lost_focus() if ui
.text_edit_singleline(&mut self.search_ctx.query)
.lost_focus()
&& ui && ui
.input(|i| i.key_pressed(egui::Key::Enter) && !self.search_ctx.is_searching) .input(|i| i.key_pressed(egui::Key::Enter) && !self.search_ctx.is_searching)
{ {
let query = self.query.clone(); self.search_ctx.start_search();
let url = self.server_url.clone();
let page = self.search_ctx.page.clone();
let page_size = self.search_ctx.per_page.clone();
let (tx, rx) = mpsc::channel::<
Result<(Vec<types::FileEntry>, types::Metadata), String>,
>(1);
self.search_ctx.search_rx = Some(rx);
self.search_ctx.is_searching = true;
self.search_ctx.search_results.clear();
self.search_ctx.page = 0;
self.search_ctx.total_pages = 0;
self.search_ctx.total_results = 0;
self.status = "Searching...".to_string();
tokio::spawn(async move {
let res = search_files(url, query, page, page_size).await;
let _ = tx.send(res).await;
});
} }
ui.separator(); ui.separator();
@@ -74,57 +55,11 @@ impl Application {
self.search_ctx.page, self.search_ctx.total_pages self.search_ctx.page, self.search_ctx.total_pages
)); ));
if ui.button(" - ").clicked() && !self.search_ctx.is_searching { if ui.button(" - ").clicked() && !self.search_ctx.is_searching {
let skip = self.search_ctx.page == 1; self.search_ctx.previous_page(ctx);
self.search_ctx.page =
clamp(self.search_ctx.page - 1, 1, self.search_ctx.total_pages);
if !skip {
let query = self.query.clone();
let url = self.server_url.clone();
let page = self.search_ctx.page.clone();
let page_size = self.search_ctx.per_page.clone();
let (tx, rx) = mpsc::channel::<
Result<(Vec<types::FileEntry>, types::Metadata), String>,
>(1);
self.search_ctx.search_rx = Some(rx);
self.search_ctx.is_searching = true;
self.search_ctx.search_results.clear();
self.search_ctx.page = 0;
self.search_ctx.total_pages = 0;
self.search_ctx.total_results = 0;
self.status = "Searching...".to_string();
tokio::spawn(async move {
let res = search_files(url, query, page, page_size).await;
let _ = tx.send(res).await;
});
ctx.forget_all_images();
}
} }
if ui.button("+").clicked() && !self.search_ctx.is_searching { if ui.button("+").clicked() && !self.search_ctx.is_searching {
let skip = self.search_ctx.page == self.search_ctx.total_pages; self.search_ctx.next_page(ctx);
self.search_ctx.page =
clamp(self.search_ctx.page + 1, 1, self.search_ctx.total_pages);
if !skip {
let query = self.query.clone();
let url = self.server_url.clone();
let page = self.search_ctx.page.clone();
let page_size = self.search_ctx.per_page.clone();
let (tx, rx) = mpsc::channel::<
Result<(Vec<types::FileEntry>, types::Metadata), String>,
>(1);
self.search_ctx.search_rx = Some(rx);
self.search_ctx.is_searching = true;
self.search_ctx.search_results.clear();
self.search_ctx.page = 0;
self.search_ctx.total_pages = 0;
self.search_ctx.total_results = 0;
self.status = "Searching...".to_string();
tokio::spawn(async move {
let res = search_files(url, query, page, page_size).await;
let _ = tx.send(res).await;
});
ctx.forget_all_images();
}
} }
ui.label(&self.status); ui.label(&self.status);
@@ -136,7 +71,7 @@ impl Application {
egui::SidePanel::left("left_panel").show(ctx, |ui| { egui::SidePanel::left("left_panel").show(ctx, |ui| {
ui.horizontal(|ui| { ui.horizontal(|ui| {
ui.label("API Url: "); ui.label("API Url: ");
ui.text_edit_singleline(&mut self.server_url); ui.text_edit_singleline(&mut self.search_ctx.server_url);
}); });
ui.horizontal(|ui| { ui.horizontal(|ui| {
ui.label("Save location: "); ui.label("Save location: ");
@@ -153,14 +88,10 @@ impl Application {
ui.label(format!("Items per page: {}", self.search_ctx.per_page)); ui.label(format!("Items per page: {}", self.search_ctx.per_page));
if ui.button(" - ").clicked() { if ui.button(" - ").clicked() {
self.search_ctx.per_page = self.search_ctx.per_page.saturating_sub(1); self.search_ctx.per_page = self.search_ctx.per_page.saturating_sub(1);
self.search_ctx.total_pages =
self.search_ctx.total_results / self.search_ctx.per_page;
ctx.forget_all_images(); ctx.forget_all_images();
} }
if ui.button("+").clicked() { if ui.button("+").clicked() {
self.search_ctx.per_page = self.search_ctx.per_page.saturating_add(1); self.search_ctx.per_page = self.search_ctx.per_page.saturating_add(1);
self.search_ctx.total_pages =
self.search_ctx.total_results / self.search_ctx.per_page;
ctx.forget_all_images(); ctx.forget_all_images();
} }
}); });
@@ -275,39 +206,9 @@ impl Application {
} }
fn process_channels(&mut self, ctx: &egui::Context) { fn process_channels(&mut self, ctx: &egui::Context) {
if self.search_ctx.search_rx.is_some() { let (search_status, should_update) = self.search_ctx.process_channel(ctx);
let mut clear_rx = false; if should_update {
if let Some(rx) = self.search_ctx.search_rx.as_mut() { self.status = search_status;
match rx.try_recv() {
Ok(Ok(results)) => {
// Ok recv, ok results
self.search_ctx.search_results = results.0.clone();
self.status = "Ready!".to_string();
self.search_ctx.total_pages = results.1.total_pages;
self.search_ctx.page = results.1.page;
self.search_ctx.per_page = results.1.page_size;
self.search_ctx.is_searching = false;
clear_rx = true;
}
Ok(Err(e)) => {
// Ok recv, err results
self.status = format!("Search failed: {}", e);
self.search_ctx.is_searching = false;
clear_rx = true;
}
Err(mpsc::error::TryRecvError::Empty) => {
ctx.request_repaint();
}
Err(mpsc::error::TryRecvError::Disconnected) => {
self.status = "Search thread ended unexpectedly".to_string();
self.search_ctx.is_searching = false;
clear_rx = true;
}
}
}
if clear_rx {
self.search_ctx.search_rx = None;
}
} }
if self.download_ctx.download_rx.is_some() { if self.download_ctx.download_rx.is_some() {
@@ -346,10 +247,8 @@ impl Default for Application {
fn default() -> Self { fn default() -> Self {
Application { Application {
download_path: None, download_path: None,
server_url: "https://stuff.catgirls.fish/search".to_string(),
query: "".to_string(),
status: "Ready!".to_string(), status: "Ready!".to_string(),
search_ctx: types::SearchContext::default(), search_ctx: SearchContext::default(),
show_side_panel: false, show_side_panel: false,
download_ctx: types::DownloadContext::default(), download_ctx: types::DownloadContext::default(),
} }

View File

@@ -1,6 +1,7 @@
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")] #![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
mod app; mod app;
mod search;
mod types; mod types;
mod util; mod util;

182
src/search.rs Normal file
View File

@@ -0,0 +1,182 @@
use std::ops::Deref;
use crate::util::clamp;
use eframe::egui;
use serde::Deserialize;
use tokio::sync::mpsc;
#[derive(Debug, Clone, Deserialize)]
pub struct Root {
pub results: Vec<FileEntry>,
pub metadata: Metadata,
}
#[derive(Debug, Clone, Deserialize)]
pub struct Metadata {
pub page: usize,
pub total_pages: usize,
pub page_size: usize,
}
#[derive(Debug, Clone, Deserialize)]
pub struct FileEntry {
pub name: String,
pub ext: String,
#[allow(dead_code)]
pub path: String,
pub url: String,
pub size: i64,
pub preview: String,
}
pub struct SearchContext {
pub is_searching: bool,
pub server_url: String,
pub query: String,
search_rx: Option<mpsc::Receiver<Result<(Vec<FileEntry>, Metadata), String>>>,
pub search_results: Vec<FileEntry>,
pub page: usize,
pub per_page: usize,
pub total_pages: usize,
pub total_results: usize,
}
impl Default for SearchContext {
fn default() -> Self {
SearchContext {
is_searching: false,
server_url: "https://stuff.catgirls.fish/search".to_string(),
query: "".to_string(),
search_rx: None,
search_results: vec![],
page: 1,
per_page: 25,
total_pages: 0,
total_results: 0,
}
}
}
impl SearchContext {
pub fn next_page(&mut self, ctx: &egui::Context) {
let skip = self.page == self.total_pages;
self.page = clamp(self.page + 1, 1, self.total_pages);
if !skip {
self.start_search();
ctx.forget_all_images();
}
}
pub fn previous_page(&mut self, ctx: &egui::Context) {
let skip = self.page == 1;
self.page = clamp(self.page - 1, 1, self.total_pages);
if !skip {
self.start_search();
ctx.forget_all_images();
}
}
pub fn start_search(&mut self) -> String {
let page = self.page.clone();
let page_size = self.per_page.clone();
let query = self.query.clone();
let url = self.server_url.clone();
let (tx, rx) = mpsc::channel::<Result<(Vec<FileEntry>, Metadata), String>>(1);
self.search_rx = Some(rx);
self.is_searching = true;
self.search_results.clear();
self.page = 0;
self.total_pages = 0;
self.total_results = 0;
tokio::spawn(async move {
let res = search_files(url, query, page, page_size).await;
let _ = tx.send(res).await;
});
"Searching...".to_string()
}
pub fn process_channel(&mut self, ctx: &egui::Context) -> (String, bool) {
let done = false;
let mut return_str = "".to_string();
if self.search_rx.is_some() {
let mut clear_rx = false;
if let Some(rx) = self.search_rx.as_mut() {
match rx.try_recv() {
Ok(Ok(results)) => {
// Ok recv, ok results
self.search_results = results.0.clone();
return_str = "Ready!".to_string();
self.total_pages = results.1.total_pages;
self.page = results.1.page;
self.per_page = results.1.page_size;
self.is_searching = false;
clear_rx = true;
}
Ok(Err(e)) => {
// Ok recv, err results
return_str = format!("Search failed: {}", e);
self.is_searching = false;
clear_rx = true;
}
Err(mpsc::error::TryRecvError::Empty) => {
ctx.request_repaint();
}
Err(mpsc::error::TryRecvError::Disconnected) => {
return_str = "Search thread ended unexpectedly".to_string();
self.is_searching = false;
clear_rx = true;
}
}
}
if clear_rx {
self.search_rx = None;
}
}
(return_str, done)
}
}
pub async fn search_files(
url: String,
query: String,
page: usize,
page_size: usize,
) -> Result<(Vec<FileEntry>, Metadata), String> {
let full_url = format!("{}?q={}&p={}&s={}", url, query, page, page_size);
let client = match reqwest::Client::builder()
.user_agent(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:141.0) Gecko/20100101 Firefox/141.0",
)
.danger_accept_invalid_certs(true)
.build()
{
Ok(client) => client,
Err(e) => return Err(format!("Failed to create the client: {}", e)),
};
let res = client.get(full_url).send().await;
let response = match res {
Ok(response_ok) => response_ok,
Err(e) => return Err(format!("Failed to download the file: {}", e)),
};
if response.status() != reqwest::StatusCode::OK {
return Err(format!(
"Failed to download the file: {}",
response.status()
));
}
let results = match response.json::<Root>().await {
Ok(r) => r,
Err(e) => {
return Err(format!("Failed to deserialize results data: {}", e));
}
};
Ok((results.results, results.metadata))
}

View File

@@ -1,56 +1,7 @@
use serde::Deserialize;
use tokio::sync::mpsc; use tokio::sync::mpsc;
#[derive(Debug, Clone, Deserialize)]
pub struct Root {
pub results: Vec<FileEntry>,
pub metadata: Metadata,
}
#[derive(Debug, Clone, Deserialize)]
pub struct FileEntry {
pub name: String,
pub ext: String,
#[allow(dead_code)]
pub path: String,
pub url: String,
pub size: i64,
pub preview: String,
}
#[derive(Debug, Clone, Deserialize)]
pub struct Metadata {
pub page: usize,
pub total_pages: usize,
pub page_size: usize,
}
#[derive(Default)] #[derive(Default)]
pub struct DownloadContext { pub struct DownloadContext {
pub is_downloading: bool, pub is_downloading: bool,
pub download_rx: Option<mpsc::Receiver<Result<String, String>>>, pub download_rx: Option<mpsc::Receiver<Result<String, String>>>,
} }
pub struct SearchContext {
pub is_searching: bool,
pub search_rx: Option<mpsc::Receiver<Result<(Vec<FileEntry>, Metadata), String>>>,
pub search_results: Vec<FileEntry>,
pub page: usize,
pub per_page: usize,
pub total_pages: usize,
pub total_results: usize,
}
impl Default for SearchContext {
fn default() -> Self {
SearchContext {
is_searching: false,
search_rx: None,
search_results: vec![],
page: 1,
per_page: 25,
total_pages: 0,
total_results: 0,
}
}
}

View File

@@ -3,8 +3,6 @@ use std::cmp::min;
use std::io::Write; use std::io::Write;
use std::path::Path; use std::path::Path;
use crate::types;
pub fn clamp(val: usize, min: usize, max: usize) -> usize { pub fn clamp(val: usize, min: usize, max: usize) -> usize {
if val < min { if val < min {
min min
@@ -15,47 +13,6 @@ pub fn clamp(val: usize, min: usize, max: usize) -> usize {
} }
} }
pub async fn search_files(
url: String,
query: String,
page: usize,
page_size: usize,
) -> Result<(Vec<types::FileEntry>, types::Metadata), String> {
let full_url = format!("{}?q={}&p={}&s={}", url, query, page, page_size);
let client = match reqwest::Client::builder()
.user_agent(
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:141.0) Gecko/20100101 Firefox/141.0",
)
.danger_accept_invalid_certs(true)
.build()
{
Ok(client) => client,
Err(e) => return Err(format!("Failed to create the client: {}", e)),
};
let res = client.get(full_url).send().await;
let response = match res {
Ok(response_ok) => response_ok,
Err(e) => return Err(format!("Failed to download the file: {}", e)),
};
if response.status() != reqwest::StatusCode::OK {
return Err(format!(
"Failed to download the file: {}",
response.status()
));
}
let results = match response.json::<types::Root>().await {
Ok(r) => r,
Err(e) => {
return Err(format!("Failed to deserialize results data: {}", e));
}
};
Ok((results.results, results.metadata))
}
pub async fn download(url: String, dir: String) -> Result<String, String> { pub async fn download(url: String, dir: String) -> Result<String, String> {
let file_name = Path::new(&url) let file_name = Path::new(&url)
.file_name() .file_name()