Organize storage by type directories with human-readable filenames
- Files now stored in type subdirectories: images/, pdf/, documents/,
video/, audio/, others/
- Filename format: YYYY-MM-DD_HH-MM_{hash8}.{ext} (human-readable)
- Added mime_to_type_dir() for MIME-to-directory classification
- write_asset() auto-creates type subdirectories
- Backward compatible: old flat filenames still work via DB lookup
- Filemanager: added TYPE/ virtual tree root for browsing by content type
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
parent
c77e6d4105
commit
e60c880232
@ -531,6 +531,24 @@ function mimeToExt(mime) {
|
||||
return map[mime] || mime.split('/').pop() || 'bin';
|
||||
}
|
||||
|
||||
function mimeToTypeCategory(mime) {
|
||||
if (mime.startsWith('image/')) return 'images';
|
||||
if (mime === 'application/pdf') return 'pdf';
|
||||
if (mime.startsWith('video/')) return 'video';
|
||||
if (mime.startsWith('audio/')) return 'audio';
|
||||
if (mime.startsWith('text/')
|
||||
|| mime === 'application/json'
|
||||
|| mime === 'application/xml'
|
||||
|| mime === 'application/msword'
|
||||
|| mime === 'application/rtf'
|
||||
|| mime.startsWith('application/vnd.openxmlformats')
|
||||
|| mime.startsWith('application/vnd.ms-')
|
||||
|| mime === 'application/vnd.oasis.opendocument.text'
|
||||
|| mime === 'application/vnd.oasis.opendocument.spreadsheet')
|
||||
return 'documents';
|
||||
return 'others';
|
||||
}
|
||||
|
||||
function buildVirtualTree(assets) {
|
||||
const root = { name: '', type: 'dir', children: {}, items: [] };
|
||||
|
||||
@ -591,6 +609,12 @@ function buildVirtualTree(assets) {
|
||||
addFile(tagDir, friendlyName, asset);
|
||||
}
|
||||
}
|
||||
|
||||
// TYPE/
|
||||
const typeRoot = ensureDir(root, 'TYPE');
|
||||
const typeCat = mimeToTypeCategory(asset.mime_type);
|
||||
const typeDir = ensureDir(typeRoot, typeCat);
|
||||
addFile(typeDir, friendlyName, asset);
|
||||
}
|
||||
|
||||
return root;
|
||||
|
||||
247
src/storage.rs
247
src/storage.rs
@ -1,43 +1,55 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
/// Build the physical filename per the spec:
|
||||
/// `{timestamp}_{sha256}_{truncated_tags}.{extension}`
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
/// Classify a MIME type into a storage subdirectory.
|
||||
pub fn mime_to_type_dir(mime: &str) -> &str {
|
||||
if mime.starts_with("image/") {
|
||||
"images"
|
||||
} else if mime == "application/pdf" {
|
||||
"pdf"
|
||||
} else if mime.starts_with("video/") {
|
||||
"video"
|
||||
} else if mime.starts_with("audio/") {
|
||||
"audio"
|
||||
} else if mime.starts_with("text/")
|
||||
|| mime == "application/json"
|
||||
|| mime == "application/xml"
|
||||
|| mime == "application/msword"
|
||||
|| mime == "application/rtf"
|
||||
|| mime.starts_with("application/vnd.openxmlformats")
|
||||
|| mime.starts_with("application/vnd.ms-")
|
||||
|| mime == "application/vnd.oasis.opendocument.text"
|
||||
|| mime == "application/vnd.oasis.opendocument.spreadsheet"
|
||||
{
|
||||
"documents"
|
||||
} else {
|
||||
"others"
|
||||
}
|
||||
}
|
||||
|
||||
/// Build the physical filename (including type subdirectory) per the spec:
|
||||
/// `{type_dir}/{YYYY-MM-DD_HH-MM}_{hash8}.{extension}`
|
||||
///
|
||||
/// Example: `images/2026-03-13_14-30_a3b2c4d5.jpg`
|
||||
pub fn build_filename(
|
||||
timestamp: i64,
|
||||
hash: &str,
|
||||
tags: &[String],
|
||||
_tags: &[String],
|
||||
mime_type: &str,
|
||||
) -> String {
|
||||
let extension = mime_to_extension(mime_type);
|
||||
let type_dir = mime_to_type_dir(mime_type);
|
||||
|
||||
let base = format!("{}_{}", timestamp, hash);
|
||||
// Convert timestamp_ms to human-readable YYYY-MM-DD_HH-MM
|
||||
let dt = DateTime::<Utc>::from_timestamp_millis(timestamp)
|
||||
.unwrap_or_else(|| DateTime::<Utc>::from_timestamp(0, 0).unwrap());
|
||||
let time_part = dt.format("%Y-%m-%d_%H-%M").to_string();
|
||||
|
||||
if tags.is_empty() {
|
||||
return format!("{}.{}", base, extension);
|
||||
}
|
||||
// Use first 8 chars of hash for short identifier
|
||||
let short_hash = if hash.len() >= 8 { &hash[..8] } else { hash };
|
||||
|
||||
// Sanitize tags: strip non-alphanumeric, join with underscore
|
||||
let sanitized_tags: Vec<String> = tags
|
||||
.iter()
|
||||
.map(|t| t.chars().filter(|c| c.is_alphanumeric()).collect::<String>())
|
||||
.filter(|t| !t.is_empty())
|
||||
.collect();
|
||||
|
||||
if sanitized_tags.is_empty() {
|
||||
return format!("{}.{}", base, extension);
|
||||
}
|
||||
|
||||
let tag_part = sanitized_tags.join("_");
|
||||
|
||||
// Truncate to keep total filename under ~200 chars (safely under 255)
|
||||
let max_tag_len = 200usize.saturating_sub(base.len() + extension.len() + 2); // 2 for _ and .
|
||||
let truncated = if tag_part.len() > max_tag_len {
|
||||
&tag_part[..max_tag_len]
|
||||
} else {
|
||||
&tag_part
|
||||
};
|
||||
|
||||
format!("{}_{}. {}", base, truncated, extension)
|
||||
format!("{}/{}_{}. {}", type_dir, time_part, short_hash, extension)
|
||||
.replace(". ", ".")
|
||||
}
|
||||
|
||||
@ -75,47 +87,92 @@ pub fn mime_to_extension(mime: &str) -> &str {
|
||||
}
|
||||
}
|
||||
|
||||
/// Write asset bytes to the storage root. Returns the full path.
|
||||
/// Write asset bytes to the storage root. Creates the type subdirectory if needed.
|
||||
/// `filename` may include a subdirectory prefix (e.g. "images/2026-01-01_12-00_abcd1234.jpg").
|
||||
pub fn write_asset(root: &Path, filename: &str, data: &[u8]) -> std::io::Result<PathBuf> {
|
||||
let path = root.join(filename);
|
||||
// Ensure parent directory exists (handles type subdirectories)
|
||||
if let Some(parent) = path.parent() {
|
||||
std::fs::create_dir_all(parent)?;
|
||||
}
|
||||
std::fs::write(&path, data)?;
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// Read asset bytes from the storage root.
|
||||
/// `filename` may include a subdirectory prefix.
|
||||
pub fn read_asset(root: &Path, filename: &str) -> std::io::Result<Vec<u8>> {
|
||||
let path = root.join(filename);
|
||||
std::fs::read(path)
|
||||
}
|
||||
|
||||
/// Move an asset file to the .trash directory.
|
||||
/// Handles filenames with subdirectory prefixes (e.g. "images/file.jpg").
|
||||
pub fn trash_asset_file(root: &Path, filename: &str) -> std::io::Result<()> {
|
||||
let src = root.join(filename);
|
||||
let trash_dir = root.join(".trash");
|
||||
std::fs::create_dir_all(&trash_dir)?;
|
||||
let dst = trash_dir.join(filename);
|
||||
// Use just the file basename in trash (flatten subdirectory structure)
|
||||
let basename = Path::new(filename)
|
||||
.file_name()
|
||||
.unwrap_or_else(|| std::ffi::OsStr::new(filename));
|
||||
let dst = trash_dir.join(basename);
|
||||
std::fs::rename(src, dst)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Parse a physical filename to extract the hash component.
|
||||
/// Format: `{timestamp}_{sha256}_{tags}.{ext}` or `{timestamp}_{sha256}.{ext}`
|
||||
///
|
||||
/// New format: `{type_dir}/{YYYY-MM-DD_HH-MM}_{hash8}.{ext}`
|
||||
/// Legacy format: `{timestamp}_{sha256_64}_{tags}.{ext}`
|
||||
///
|
||||
/// Returns the hash portion (8 chars for new format, 64 chars for legacy).
|
||||
pub fn parse_hash_from_filename(filename: &str) -> Option<String> {
|
||||
// Strip any directory prefix
|
||||
let basename = filename.rsplit('/').next().unwrap_or(filename);
|
||||
let basename = basename.rsplit('\\').next().unwrap_or(basename);
|
||||
|
||||
// Remove extension
|
||||
let stem = filename.rsplit_once('.')?.0;
|
||||
// Split by underscore: first part is timestamp, second is hash (64 hex chars)
|
||||
let stem = basename.rsplit_once('.')?.0;
|
||||
let parts: Vec<&str> = stem.splitn(3, '_').collect();
|
||||
if parts.len() >= 2 && parts[1].len() == 64 {
|
||||
Some(parts[1].to_string())
|
||||
} else {
|
||||
None
|
||||
|
||||
// New format: YYYY-MM-DD_HH-MM_hash8
|
||||
// After splitn(3, '_'): ["YYYY-MM-DD", "HH-MM", "hash8"]
|
||||
if parts.len() >= 3 && parts[0].len() == 10 && parts[0].contains('-') {
|
||||
// New format: third part is the short hash
|
||||
return Some(parts[2].to_string());
|
||||
}
|
||||
|
||||
// Legacy format: {timestamp}_{sha256_64}_{tags}
|
||||
if parts.len() >= 2 && parts[1].len() == 64 {
|
||||
return Some(parts[1].to_string());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Parse a physical filename to extract the timestamp component.
|
||||
///
|
||||
/// New format: `{type_dir}/{YYYY-MM-DD_HH-MM}_{hash8}.{ext}` → parses date to epoch ms
|
||||
/// Legacy format: `{timestamp}_{sha256}_{tags}.{ext}` → raw epoch ms
|
||||
pub fn parse_timestamp_from_filename(filename: &str) -> Option<i64> {
|
||||
let stem = filename.rsplit_once('.')?.0;
|
||||
let ts_str = stem.split('_').next()?;
|
||||
// Strip any directory prefix
|
||||
let basename = filename.rsplit('/').next().unwrap_or(filename);
|
||||
let basename = basename.rsplit('\\').next().unwrap_or(basename);
|
||||
|
||||
let stem = basename.rsplit_once('.')?.0;
|
||||
let parts: Vec<&str> = stem.splitn(3, '_').collect();
|
||||
|
||||
// New format: YYYY-MM-DD_HH-MM_hash8
|
||||
if parts.len() >= 2 && parts[0].len() == 10 && parts[0].contains('-') {
|
||||
let date_str = format!("{}_{}", parts[0], parts[1]);
|
||||
let dt = chrono::NaiveDateTime::parse_from_str(&date_str, "%Y-%m-%d_%H-%M").ok()?;
|
||||
let utc = dt.and_utc();
|
||||
return Some(utc.timestamp_millis());
|
||||
}
|
||||
|
||||
// Legacy format: first part is raw epoch ms
|
||||
let ts_str = parts.first()?;
|
||||
ts_str.parse().ok()
|
||||
}
|
||||
|
||||
@ -125,23 +182,66 @@ mod tests {
|
||||
use tempfile::TempDir;
|
||||
|
||||
#[test]
|
||||
fn test_build_filename_no_tags() {
|
||||
let name = build_filename(1773014400123, "a3b2c4d5e6f7", &[], "application/pdf");
|
||||
assert_eq!(name, "1773014400123_a3b2c4d5e6f7.pdf");
|
||||
fn test_mime_to_type_dir() {
|
||||
assert_eq!(mime_to_type_dir("image/jpeg"), "images");
|
||||
assert_eq!(mime_to_type_dir("image/png"), "images");
|
||||
assert_eq!(mime_to_type_dir("application/pdf"), "pdf");
|
||||
assert_eq!(mime_to_type_dir("text/plain"), "documents");
|
||||
assert_eq!(mime_to_type_dir("application/json"), "documents");
|
||||
assert_eq!(mime_to_type_dir("video/mp4"), "video");
|
||||
assert_eq!(mime_to_type_dir("audio/mpeg"), "audio");
|
||||
assert_eq!(mime_to_type_dir("application/zip"), "others");
|
||||
assert_eq!(mime_to_type_dir("application/octet-stream"), "others");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_filename_with_tags() {
|
||||
let tags = vec!["photo".to_string(), "vacation".to_string()];
|
||||
let name = build_filename(1773014400123, "a3b2c4d5e6f7", &tags, "image/jpeg");
|
||||
assert_eq!(name, "1773014400123_a3b2c4d5e6f7_photo_vacation.jpg");
|
||||
fn test_build_filename_image() {
|
||||
// 2026-03-13 14:30:00 UTC in ms
|
||||
let ts = 1773412200000i64;
|
||||
let hash = "a3b2c4d5e6f7a8b9".to_string();
|
||||
let name = build_filename(ts, &hash, &[], "image/jpeg");
|
||||
assert_eq!(name, "images/2026-03-13_14-30_a3b2c4d5.jpg");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_filename_strips_special_chars_from_tags() {
|
||||
let tags = vec!["hello world!".to_string(), "test@123".to_string()];
|
||||
let name = build_filename(100, "abc", &tags, "text/plain");
|
||||
assert_eq!(name, "100_abc_helloworld_test123.txt");
|
||||
fn test_build_filename_pdf() {
|
||||
let ts = 1773412200000i64;
|
||||
let hash = "deadbeef12345678".to_string();
|
||||
let name = build_filename(ts, &hash, &[], "application/pdf");
|
||||
assert_eq!(name, "pdf/2026-03-13_14-30_deadbeef.pdf");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_filename_text() {
|
||||
let ts = 1773412200000i64;
|
||||
let hash = "abcdef0123456789".to_string();
|
||||
let name = build_filename(ts, &hash, &["ignored".to_string()], "text/plain");
|
||||
// Tags are ignored in new format
|
||||
assert_eq!(name, "documents/2026-03-13_14-30_abcdef01.txt");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_filename_video() {
|
||||
let ts = 1773412200000i64;
|
||||
let hash = "ff00ff00ff00ff00".to_string();
|
||||
let name = build_filename(ts, &hash, &[], "video/mp4");
|
||||
assert_eq!(name, "video/2026-03-13_14-30_ff00ff00.mp4");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_filename_audio() {
|
||||
let ts = 1773412200000i64;
|
||||
let hash = "aa11bb22cc33dd44".to_string();
|
||||
let name = build_filename(ts, &hash, &[], "audio/mpeg");
|
||||
assert_eq!(name, "audio/2026-03-13_14-30_aa11bb22.mp3");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_filename_others() {
|
||||
let ts = 1773412200000i64;
|
||||
let hash = "1234567890abcdef".to_string();
|
||||
let name = build_filename(ts, &hash, &[], "application/zip");
|
||||
assert_eq!(name, "others/2026-03-13_14-30_12345678.zip");
|
||||
}
|
||||
|
||||
#[test]
|
||||
@ -153,38 +253,55 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_and_read_asset() {
|
||||
fn test_write_and_read_asset_with_subdir() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
let data = b"hello world";
|
||||
let path = write_asset(dir.path(), "test_file.txt", data).unwrap();
|
||||
let filename = "images/2026-01-01_12-00_abcd1234.jpg";
|
||||
let path = write_asset(dir.path(), filename, data).unwrap();
|
||||
assert!(path.exists());
|
||||
assert!(dir.path().join("images").is_dir());
|
||||
|
||||
let read_back = read_asset(dir.path(), "test_file.txt").unwrap();
|
||||
let read_back = read_asset(dir.path(), filename).unwrap();
|
||||
assert_eq!(read_back, data);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trash_asset_file() {
|
||||
fn test_trash_asset_file_with_subdir() {
|
||||
let dir = TempDir::new().unwrap();
|
||||
write_asset(dir.path(), "to_trash.txt", b"bye").unwrap();
|
||||
let filename = "images/2026-01-01_12-00_abcd1234.jpg";
|
||||
write_asset(dir.path(), filename, b"bye").unwrap();
|
||||
|
||||
trash_asset_file(dir.path(), "to_trash.txt").unwrap();
|
||||
assert!(!dir.path().join("to_trash.txt").exists());
|
||||
assert!(dir.path().join(".trash").join("to_trash.txt").exists());
|
||||
trash_asset_file(dir.path(), filename).unwrap();
|
||||
assert!(!dir.path().join(filename).exists());
|
||||
assert!(dir.path().join(".trash").join("2026-01-01_12-00_abcd1234.jpg").exists());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_hash_from_filename() {
|
||||
fn test_parse_hash_from_new_filename() {
|
||||
assert_eq!(
|
||||
parse_hash_from_filename("images/2026-03-13_14-30_a3b2c4d5.jpg"),
|
||||
Some("a3b2c4d5".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_hash_from_legacy_filename() {
|
||||
let hash_64 = "a".repeat(64);
|
||||
let filename = format!("1773014400123_{}.pdf", hash_64);
|
||||
assert_eq!(parse_hash_from_filename(&filename), Some(hash_64.clone()));
|
||||
|
||||
let filename_tags = format!("1773014400123_{}_photo_vacation.jpg", hash_64);
|
||||
assert_eq!(parse_hash_from_filename(&filename_tags), Some(hash_64));
|
||||
assert_eq!(parse_hash_from_filename(&filename), Some(hash_64));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_timestamp_from_filename() {
|
||||
fn test_parse_timestamp_from_new_filename() {
|
||||
let ts = parse_timestamp_from_filename("images/2026-03-13_14-30_a3b2c4d5.jpg");
|
||||
assert!(ts.is_some());
|
||||
let ts = ts.unwrap();
|
||||
// Should be 2026-03-13 14:30 UTC in millis
|
||||
assert_eq!(ts, 1773412200000);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_timestamp_from_legacy_filename() {
|
||||
let hash_64 = "b".repeat(64);
|
||||
let filename = format!("1773014400123_{}.pdf", hash_64);
|
||||
assert_eq!(parse_timestamp_from_filename(&filename), Some(1773014400123));
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user