first commit

This commit is contained in:
root
2025-11-02 00:11:16 +00:00
commit 00d1ff37ba
5 changed files with 369 additions and 0 deletions

188
src/index.rs Normal file
View File

@@ -0,0 +1,188 @@
use indicatif::{ProgressBar, ProgressStyle};
use meilisearch_sdk::client::Client;
use meilisearch_sdk::settings::Settings;
use std::{collections::hash_map::ExtractIf, path::PathBuf, time::Duration};
use tokio::{fs, time::sleep};
use tracing::{error, info};
#[derive(serde::Serialize)]
pub struct FileEntry {
pub name: String,
pub ext: String,
pub path: String,
pub url: String,
pub size: i64,
pub preview: String,
pub id: i64,
}
pub async fn index(
serve_path: &String,
url: &String,
meilisearch_key: &String,
meilisearch_url: &String,
) {
let pb = ProgressBar::new(0);
pb.set_style(ProgressStyle::default_bar()
.template("{msg}\n{spinner:.green} [{elapsed_precise}] [{wide_bar:.cyan/blue}] {pos}/{len} ({per_sec}, {eta})").unwrap()
.progress_chars("#>-"));
pb.set_message("Running index...");
// Main loop
loop {
info!("Running index");
delete_index(meilisearch_key, meilisearch_url).await;
let mut i = 0;
let mut idx = 0;
let mut failed_preview = 0;
let mut files: Vec<FileEntry> = Vec::new();
for res in walkdir::WalkDir::new(&serve_path) {
let entry = match res {
Ok(entry) => entry,
Err(e) => {
error!("Failed to get entry: {}", e);
continue;
}
};
// Make sure the entry is a file
if entry.file_type().is_dir() {
continue;
}
let ext = entry.path().extension();
let ext = match ext {
None => "none".to_string(),
Some(ok) => ok.to_string_lossy().to_string(),
};
if ext != "zip" {
continue;
}
let name = entry.file_name().to_string_lossy().to_string();
let path = entry.path().to_string_lossy().to_string();
let size = entry.metadata().unwrap().len() as i64;
let file_url = format!("{}{}", url, path[serve_path.len() + 1..].to_string());
let parent_dir = path[..path.len() - ext.len() - 1].to_string();
let mut preview = "None".to_string();
for file in walkdir::WalkDir::new(&parent_dir) {
let f = match file {
Ok(f) => f,
Err(e) => {
error!("Failed to read file while looking for preview: {}", e);
failed_preview += 1;
continue;
}
};
let file_name = f.file_name().to_string_lossy().to_string();
if file_name.contains("Preview") {
let preview_path = f.path().to_string_lossy().to_string();
preview = format!(
"{}{}",
url,
preview_path[serve_path.len() + 1..].to_string()
);
}
}
let file_entry = FileEntry {
name,
ext,
path,
url: file_url,
size,
preview,
id: idx,
};
files.push(file_entry);
i += 1;
idx += 1;
pb.inc(1);
if i > 10000 {
send_index_chunk(files, meilisearch_key, meilisearch_url).await;
files = Vec::new();
i = 0;
}
}
// Push any removing entries
send_index_chunk(files, meilisearch_key, meilisearch_url).await;
pb.finish_and_clear();
info!("Done! Total: {}, Failed previews: {}", idx, failed_preview);
sleep(Duration::from_secs(900)).await;
}
}
async fn send_index_chunk(
files: Vec<FileEntry>,
meilisearch_key: &String,
meilisearch_url: &String,
) {
let client = match Client::new(meilisearch_url, Some(meilisearch_key)) {
Ok(client) => client,
Err(e) => {
error!("Failed to create meilisearch client: \n\t{}", e);
return;
}
};
let searchable_attributes = ["name", "path", "ext"];
let ranking_rules = ["words", "typo", "attribute", "exactness", "cost::asc"];
let settings = Settings::new()
.with_ranking_rules(ranking_rules)
.with_searchable_attributes(searchable_attributes);
let result = client
.index("shared_files")
.set_settings(&settings)
.await
.unwrap()
.wait_for_completion(&client, None, None)
.await
.unwrap();
if result.is_failure() {
error!("Failed to set index: \n\t{:?}", result.unwrap_failure());
return;
}
let result = client
.index("shared_files")
.add_or_update(&files, Some("id"))
.await
.unwrap()
.wait_for_completion(&client, None, None)
.await
.unwrap();
if result.is_failure() {
error!("Failed to add files: \n\t{:?}", result.unwrap_failure());
return;
}
}
async fn delete_index(meilisearch_key: &String, meilisearch_url: &String) {
let client = match Client::new(meilisearch_url, Some(meilisearch_key)) {
Ok(ok) => ok,
Err(e) => {
error!("Failed to create meilisearch client: \n\t{}", e);
return;
}
};
let index = client.index("shared_files");
index
.delete()
.await
.unwrap()
.wait_for_completion(&client, None, None)
.await
.unwrap();
}

37
src/main.rs Normal file
View File

@@ -0,0 +1,37 @@
mod index;
mod serve;
use std::process::exit;
use tracing::{debug, error, info};
#[tokio::main]
async fn main() {
tracing_subscriber::fmt::init();
debug!("Loading .env file");
if let Err(e) = dotenv::dotenv() {
error!("Failed to get .env file: {}", e);
exit(1);
}
let bind_addr = std::env::var("BIND_ADDR").unwrap_or_else(|_| "127.0.0.1:8080".to_string());
let serve_path = std::env::var("SERVE_PATH").unwrap_or_else(|_| "./files/".to_string());
let url = std::env::var("URL").unwrap_or_else(|_| "http://localhost:8080/files".to_string());
let meilisearch_url =
std::env::var("MEILISEARCH_URL").unwrap_or_else(|_| "http://localhost:7700".to_string());
let meilisearch_key = match std::env::var("MEILISEARCH_KEY") {
Ok(key) => key,
Err(e) => {
error!(
"Failed to get meilisearch key from environment. Error: \n\t{}\n\n Exiting...",
e
);
exit(1);
}
};
info!("Starting services...");
tokio::join!(
serve::serve(bind_addr, &serve_path, &meilisearch_key, &meilisearch_url),
index::index(&serve_path, &url, &meilisearch_key, &meilisearch_url),
);
}

123
src/serve.rs Normal file
View File

@@ -0,0 +1,123 @@
use axum::Router;
use axum::extract::{Query, State};
use axum::routing::get;
use meilisearch_sdk::client::Client;
use meilisearch_sdk::search::SearchResult;
use serde::{Deserialize, Serialize};
use std::net::SocketAddr;
use tower_http::services::ServeDir;
use tower_http::trace::TraceLayer;
use tracing::{error, info};
#[derive(Debug, Deserialize)]
pub struct SearchInput {
pub q: String,
pub p: Option<usize>,
pub s: Option<usize>,
}
#[derive(Debug, Clone, Serialize)]
pub struct Root {
pub results: Vec<SearchResult<FileEntryDisplay>>,
pub metadata: Metadata,
}
#[derive(Serialize, Debug, Clone)]
pub struct Metadata {
pub page: usize,
pub total_pages: usize,
pub page_size: usize,
}
#[derive(Clone)]
pub struct AppContext {
pub meilisearch_client: Client,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FileEntryDisplay {
pub name: String,
pub ext: String,
pub path: String,
pub url: String,
pub size: i64,
pub preview: String,
}
pub async fn serve(
addr: String,
path: &String,
meilisearch_key: &String,
meilisearch_url: &String,
) {
let addr = SocketAddr::from(addr.parse::<SocketAddr>().unwrap());
let listener = tokio::net::TcpListener::bind(addr).await.unwrap();
let client = match Client::new(meilisearch_url, Some(meilisearch_key)) {
Ok(client) => client,
Err(e) => {
error!(
"Failed to create meilisearch client for http context: {}",
e
);
return;
}
};
let context = AppContext {
meilisearch_client: client,
};
let app = Router::new()
.nest_service("/files", ServeDir::new(path))
.route("/search", get(search))
.with_state(context);
tracing::info!("listening on {}", listener.local_addr().unwrap());
axum::serve(listener, app.layer(TraceLayer::new_for_http()))
.await
.unwrap();
}
pub async fn search(query: Query<SearchInput>, state: State<AppContext>) -> Result<String, String> {
info!(
"Got query for {} p: {:?} s: {:?}",
query.q, query.p, query.s
);
let client = &state.meilisearch_client;
let size = query.s;
let index = client.index("shared_files");
let mut search = index.search();
let request = search.with_query(query.q.as_str());
if let Some(page) = query.p {
request.page = Some(page);
}
if let Some(size) = query.s {
request.hits_per_page = Some(size);
}
let res = match request.execute::<FileEntryDisplay>().await {
Ok(ok) => ok,
Err(e) => {
return Err(format!("Failed with error: {}", e));
}
};
let hits = res.hits;
let total_pages = res.total_pages;
let metadata = Metadata {
page: res.page.unwrap_or(0),
total_pages: total_pages.unwrap_or(0),
page_size: size.unwrap_or(0),
};
let root = Root {
results: hits,
metadata,
};
match serde_json::to_string(&root) {
Ok(json_str) => Ok(json_str),
Err(e) => Err(format!("Failed to serialize: {}", e)),
}
}