diff --git a/.cspell/project_words.txt b/.cspell/project_words.txt index d0d6976e8fb4..99d7e1e6ed79 100644 --- a/.cspell/project_words.txt +++ b/.cspell/project_words.txt @@ -59,6 +59,7 @@ rspcws rywalker sanjay sdvol +sdvolume sharma skippable spacedrive @@ -67,6 +68,7 @@ spacetunnel specta storedkey stringly +swapy thumbstrips tobiaslutke tokio diff --git a/.gitignore b/.gitignore index ee85b90abaa3..3fb647e76a11 100644 --- a/.gitignore +++ b/.gitignore @@ -345,6 +345,8 @@ playwright-report .spacedrive dev.db-journal .build/ +# swift vscode extension index +.index-build/ .swiftpm /core/migration_test sd_init.json diff --git a/Cargo.lock b/Cargo.lock index f10466234c12..7f43d7862086 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -729,6 +729,12 @@ dependencies = [ "failure", ] +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + [[package]] name = "base64" version = "0.21.7" @@ -3133,7 +3139,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f2f12607f92c69b12ed746fabf9ca4f5c482cba46679c1a75b874ed7c26adb" dependencies = [ "futures-io", - "rustls", + "rustls 0.23.16", "rustls-pki-types", ] @@ -4270,6 +4276,20 @@ dependencies = [ "want", ] +[[package]] +name = "hyper-rustls" +version = "0.24.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +dependencies = [ + "futures-util", + "http 0.2.12", + "hyper 0.14.31", + "rustls 0.21.12", + "tokio", + "tokio-rustls 0.24.1", +] + [[package]] name = "hyper-rustls" version = "0.27.3" @@ -4280,12 +4300,12 @@ dependencies = [ "http 1.1.0", "hyper 1.5.0", "hyper-util", - "rustls", + "rustls 0.23.16", "rustls-pki-types", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tower-service", - "webpki-roots", + "webpki-roots 0.26.6", ] [[package]] @@ -4807,7 +4827,7 @@ dependencies = [ "reqwest 0.12.8", "ring 0.17.8", "rtnetlink 0.13.1", - "rustls", + "rustls 0.23.16", "rustls-pemfile 2.2.0", "rustls-webpki 0.102.8", "serde", @@ -4820,7 +4840,7 @@ dependencies = [ "thiserror 1.0.64", "time", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "tokio-rustls-acme", "tokio-stream", "tokio-tungstenite 0.21.0", @@ -4832,7 +4852,7 @@ dependencies = [ "tungstenite 0.21.0", "url", "watchable", - "webpki-roots", + "webpki-roots 0.26.6", "windows 0.51.1", "wmi", "x509-parser", @@ -4850,7 +4870,7 @@ dependencies = [ "iroh-quinn-udp", "pin-project-lite", "rustc-hash 2.0.0", - "rustls", + "rustls 0.23.16", "socket2", "thiserror 1.0.64", "tokio", @@ -4867,7 +4887,7 @@ dependencies = [ "rand 0.8.5", "ring 0.17.8", "rustc-hash 2.0.0", - "rustls", + "rustls 0.23.16", "rustls-platform-verifier 0.3.4", "slab", "thiserror 1.0.64", @@ -5623,7 +5643,7 @@ dependencies = [ "quinn", "rand 0.8.5", "ring 0.17.8", - "rustls", + "rustls 0.23.16", "socket2", "thiserror 1.0.64", "tokio", @@ -5748,7 +5768,7 @@ dependencies = [ "libp2p-identity", "rcgen 0.11.3", "ring 0.17.8", - "rustls", + "rustls 0.23.16", "rustls-webpki 0.101.7", "thiserror 1.0.64", "x509-parser", @@ -6863,6 +6883,26 @@ dependencies = [ "syn 2.0.87", ] +[[package]] +name = "oauth2" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c38841cdd844847e3e7c8d29cef9dcfed8877f8f56f9071f77843ecf3baf937f" +dependencies = [ + "base64 0.13.1", + "chrono", + "getrandom 0.2.15", + "http 0.2.12", + "rand 0.8.5", + "reqwest 0.11.27", + "serde", + "serde_json", + "serde_path_to_error", + "sha2", + "thiserror 1.0.64", + "url", +] + [[package]] name = "objc" version = "0.2.7" @@ -8594,7 +8634,7 @@ dependencies = [ "quinn-proto", "quinn-udp", "rustc-hash 2.0.0", - "rustls", + "rustls 0.23.16", "socket2", "thiserror 1.0.64", "tokio", @@ -8611,7 +8651,7 @@ dependencies = [ "rand 0.8.5", "ring 0.17.8", "rustc-hash 2.0.0", - "rustls", + "rustls 0.23.16", "slab", "thiserror 1.0.64", "tinyvec", @@ -9007,6 +9047,7 @@ dependencies = [ "http 0.2.12", "http-body 0.4.6", "hyper 0.14.31", + "hyper-rustls 0.24.2", "hyper-tls 0.5.0", "ipnet", "js-sys", @@ -9016,6 +9057,7 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", + "rustls 0.21.12", "rustls-pemfile 1.0.4", "serde", "serde_json", @@ -9024,11 +9066,13 @@ dependencies = [ "system-configuration 0.5.1", "tokio", "tokio-native-tls", + "tokio-rustls 0.24.1", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", + "webpki-roots 0.25.4", "winreg 0.50.0", ] @@ -9050,7 +9094,7 @@ dependencies = [ "http-body 1.0.1", "http-body-util", "hyper 1.5.0", - "hyper-rustls", + "hyper-rustls 0.27.3", "hyper-tls 0.6.0", "hyper-util", "ipnet", @@ -9062,7 +9106,7 @@ dependencies = [ "percent-encoding", "pin-project-lite", "quinn", - "rustls", + "rustls 0.23.16", "rustls-pemfile 2.2.0", "rustls-pki-types", "serde", @@ -9072,7 +9116,7 @@ dependencies = [ "system-configuration 0.6.1", "tokio", "tokio-native-tls", - "tokio-rustls", + "tokio-rustls 0.26.0", "tokio-util", "tower-service", "url", @@ -9080,7 +9124,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots", + "webpki-roots 0.26.6", "windows-registry 0.2.0", ] @@ -9421,6 +9465,18 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "rustls" +version = "0.21.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" +dependencies = [ + "log", + "ring 0.17.8", + "rustls-webpki 0.101.7", + "sct", +] + [[package]] name = "rustls" version = "0.23.16" @@ -9486,13 +9542,13 @@ dependencies = [ "jni 0.19.0", "log", "once_cell", - "rustls", + "rustls 0.23.16", "rustls-native-certs", "rustls-platform-verifier-android", "rustls-webpki 0.102.8", "security-framework", "security-framework-sys", - "webpki-roots", + "webpki-roots 0.26.6", "winapi", ] @@ -9507,7 +9563,7 @@ dependencies = [ "jni 0.19.0", "log", "once_cell", - "rustls", + "rustls 0.23.16", "rustls-native-certs", "rustls-platform-verifier-android", "rustls-webpki 0.102.8", @@ -9711,6 +9767,16 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" +[[package]] +name = "sct" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +dependencies = [ + "ring 0.17.8", + "untrusted 0.9.0", +] + [[package]] name = "sd-actors" version = "0.1.0" @@ -9814,6 +9880,7 @@ dependencies = [ "mini-moka", "normpath", "notify", + "oauth2", "openssl", "openssl-sys", "pin-project-lite", @@ -9896,7 +9963,7 @@ dependencies = [ "reqwest-retry", "rmp-serde", "rspc", - "rustls", + "rustls 0.23.16", "rustls-platform-verifier 0.4.0", "sd-actors", "sd-cloud-schema", @@ -12265,13 +12332,23 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls 0.21.12", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4" dependencies = [ - "rustls", + "rustls 0.23.16", "rustls-pki-types", "tokio", ] @@ -12293,15 +12370,15 @@ dependencies = [ "rcgen 0.12.1", "reqwest 0.12.8", "ring 0.17.8", - "rustls", + "rustls 0.23.16", "serde", "serde_json", "thiserror 1.0.64", "time", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.0", "url", - "webpki-roots", + "webpki-roots 0.26.6", "x509-parser", ] @@ -12968,10 +13045,10 @@ dependencies = [ "base64 0.22.1", "log", "once_cell", - "rustls", + "rustls 0.23.16", "rustls-pki-types", "url", - "webpki-roots", + "webpki-roots 0.26.6", ] [[package]] @@ -13475,6 +13552,12 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "webpki-roots" +version = "0.25.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" + [[package]] name = "webpki-roots" version = "0.26.6" diff --git a/core/Cargo.toml b/core/Cargo.toml index 967553ab50e8..fa4b5cdc7fdc 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -95,6 +95,7 @@ http-range = "0.1.5" hyper-util = { version = "0.1.9", features = ["tokio"] } int-enum = "0.5" # Update blocked due to API breaking changes mini-moka = "0.10.3" +oauth2 = "4.4" serde-hashkey = "0.4.5" serde_repr = "0.1.19" serde_with = "3.8" diff --git a/core/prisma/migrations/20241107090427_storage_statistics_drop/migration.sql b/core/prisma/migrations/20241107090427_storage_statistics_drop/migration.sql new file mode 100644 index 000000000000..4d80d37c136b --- /dev/null +++ b/core/prisma/migrations/20241107090427_storage_statistics_drop/migration.sql @@ -0,0 +1,2 @@ +-- This is no longer in the database itself, it can be dropped +DROP TABLE "storage_statistics"; \ No newline at end of file diff --git a/core/src/api/devices.rs b/core/src/api/devices.rs new file mode 100644 index 000000000000..36e7e179989b --- /dev/null +++ b/core/src/api/devices.rs @@ -0,0 +1,14 @@ +use super::{utils::library, Ctx, R}; +use rspc::alpha::AlphaRouter; +use serde::Deserialize; +use specta::Type; + +pub(crate) fn mount() -> AlphaRouter { + R.router().procedure( + "list", + R.with2(library()) + .query(|(node, library), _: ()| async move { + Ok(library.db.device().find_many(vec![]).exec().await?) + }), + ) +} diff --git a/core/src/api/libraries.rs b/core/src/api/libraries.rs index 76c4a4301827..d7e5cfe6c819 100644 --- a/core/src/api/libraries.rs +++ b/core/src/api/libraries.rs @@ -1,7 +1,7 @@ use crate::{ api::CoreEvent, invalidate_query, - library::{Library, LibraryConfig, LibraryName}, + library::{update_library_statistics, Library, LibraryConfig, LibraryName}, location::{scan_location, LocationCreateArgs, ScanState}, util::MaybeUndefined, Node, @@ -644,13 +644,13 @@ async fn update_statistics_loop( while let Some(msg) = msg_stream.next().await { match msg { Message::Tick => { - // if last_received_at.elapsed() < FIVE_MINUTES { - // if let Err(e) = update_library_statistics(&node, &library).await { - // error!(?e, "Failed to update library statistics;"); - // } else { - // invalidate_query!(&library, "library.statistics"); - // } - // } + if last_received_at.elapsed() < FIVE_MINUTES { + if let Err(e) = update_library_statistics(&node, &library).await { + error!(?e, "Failed to update library statistics;"); + } else { + invalidate_query!(&library, "library.statistics"); + } + } } Message::Requested(instant) => { if instant - last_received_at > TWO_MINUTES { diff --git a/core/src/api/mod.rs b/core/src/api/mod.rs index 7a1dd1597d58..3f2ba1acc386 100644 --- a/core/src/api/mod.rs +++ b/core/src/api/mod.rs @@ -26,6 +26,7 @@ use tracing::warn; mod backups; mod cloud; +pub mod devices; mod ephemeral_files; mod files; mod jobs; @@ -197,6 +198,7 @@ pub(crate) fn mount() -> Arc { .merge("search.", search::mount()) .merge("library.", libraries::mount()) .merge("volumes.", volumes::mount()) + .merge("devices.", devices::mount()) .merge("tags.", tags::mount()) .merge("labels.", labels::mount()) .merge("locations.", locations::mount()) diff --git a/core/src/library/statistics.rs b/core/src/library/statistics.rs index 9b1d6c085fa9..9fe1cd770d90 100644 --- a/core/src/library/statistics.rs +++ b/core/src/library/statistics.rs @@ -1,110 +1,116 @@ -// use crate::{ -// api::utils::get_size, invalidate_query, library::Library, volume::os::get_volumes, Node, -// }; - -// use sd_prisma::prisma::{statistics, storage_statistics}; -// use sd_utils::db::size_in_bytes_from_db; - -// use chrono::Utc; -// use tracing::{error, info}; - -// use super::LibraryManagerError; - -// pub async fn update_library_statistics( -// node: &Node, -// library: &Library, -// ) -> Result { -// let (mut total_capacity, mut available_capacity) = library -// .db -// .storage_statistics() -// .find_many(vec![]) -// .select(storage_statistics::select!({ total_capacity available_capacity })) -// .exec() -// .await? -// .into_iter() -// .fold((0, 0), |(mut total, mut available), stat| { -// total += stat.total_capacity as u64; -// available += stat.available_capacity as u64; -// (total, available) -// }); - -// if total_capacity == 0 && available_capacity == 0 { -// // Failed to fetch storage statistics from database, so we compute from local volumes -// let volumes = get_volumes().await; - -// let mut local_total_capacity: u64 = 0; -// let mut local_available_capacity: u64 = 0; -// for volume in volumes { -// local_total_capacity += volume.total_bytes_capacity; -// local_available_capacity += volume.total_bytes_available; -// } - -// total_capacity = local_total_capacity; -// available_capacity = local_available_capacity; -// } - -// let total_bytes_used = total_capacity - available_capacity; - -// let library_db_size = get_size( -// node.config -// .data_directory() -// .join("libraries") -// .join(format!("{}.db", library.id)), -// ) -// .await -// .unwrap_or(0); - -// let total_library_bytes = library -// .db -// .location() -// .find_many(vec![]) -// .exec() -// .await -// .unwrap_or_else(|e| { -// error!(?e, "Failed to get locations;"); -// vec![] -// }) -// .into_iter() -// .map(|location| { -// location -// .size_in_bytes -// .map(|size| size_in_bytes_from_db(&size)) -// .unwrap_or(0) -// }) -// .sum::(); - -// let thumbnail_folder_size = get_size(node.config.data_directory().join("thumbnails")) -// .await -// .unwrap_or(0); - -// use statistics::*; -// let params = vec![ -// id::set(1), // Each library is a database so only one of these ever exists -// date_captured::set(Utc::now().into()), -// total_object_count::set(0), -// library_db_size::set(library_db_size.to_string()), -// total_library_bytes::set(total_library_bytes.to_string()), -// total_local_bytes_used::set(total_bytes_used.to_string()), -// total_local_bytes_capacity::set(total_capacity.to_string()), -// total_local_bytes_free::set(available_capacity.to_string()), -// total_library_preview_media_bytes::set(thumbnail_folder_size.to_string()), -// ]; - -// let stats = library -// .db -// .statistics() -// .upsert( -// // Each library is a database so only one of these ever exists -// statistics::id::equals(1), -// statistics::create(params.clone()), -// params, -// ) -// .exec() -// .await?; - -// info!(?stats, "Updated library statistics;"); - -// invalidate_query!(&library, "library.statistics"); - -// Ok(stats) -// } +use crate::{api::utils::get_size, invalidate_query, library::Library, Node}; + +use sd_prisma::prisma::{statistics, volume}; +use sd_utils::db::size_in_bytes_from_db; + +use chrono::Utc; +use tracing::{error, info}; + +use super::LibraryManagerError; + +pub async fn update_library_statistics( + node: &Node, + library: &Library, +) -> Result { + let (mut total_capacity, mut available_capacity) = library + .db + .volume() + .find_many(vec![]) + .select(volume::select!({ total_bytes_capacity total_bytes_available })) + .exec() + .await? + .into_iter() + .fold((0, 0), |(mut total, mut available), stat| { + total += stat + .total_bytes_capacity + .unwrap_or_else(|| "0".to_string()) + .parse::() + .unwrap_or(0); + available += stat + .total_bytes_available + .unwrap_or_else(|| "0".to_string()) + .parse::() + .unwrap_or(0); + (total, available) + }); + + // if total_capacity == 0 && available_capacity == 0 { + // // Failed to fetch storage statistics from database, so we compute from local volumes + // let volumes = get_volumes().await; + + // let mut local_total_capacity: u64 = 0; + // let mut local_available_capacity: u64 = 0; + // for volume in volumes { + // local_total_capacity += volume.total_bytes_capacity; + // local_available_capacity += volume.total_bytes_available; + // } + + // total_capacity = local_total_capacity; + // available_capacity = local_available_capacity; + // } + + let total_bytes_used = total_capacity - available_capacity; + + let library_db_size = get_size( + node.config + .data_directory() + .join("libraries") + .join(format!("{}.db", library.id)), + ) + .await + .unwrap_or(0); + + let total_library_bytes = library + .db + .location() + .find_many(vec![]) + .exec() + .await + .unwrap_or_else(|e| { + error!(?e, "Failed to get locations;"); + vec![] + }) + .into_iter() + .map(|location| { + location + .size_in_bytes + .map(|size| size_in_bytes_from_db(&size)) + .unwrap_or(0) + }) + .sum::(); + + let thumbnail_folder_size = get_size(node.config.data_directory().join("thumbnails")) + .await + .unwrap_or(0); + + use statistics::*; + let params = vec![ + id::set(1), // Each library is a database so only one of these ever exists + date_captured::set(Utc::now().into()), + total_object_count::set(0), + library_db_size::set(library_db_size.to_string()), + total_library_bytes::set(total_library_bytes.to_string()), + total_local_bytes_used::set(total_bytes_used.to_string()), + total_local_bytes_capacity::set(total_capacity.to_string()), + total_local_bytes_free::set(available_capacity.to_string()), + total_library_preview_media_bytes::set(thumbnail_folder_size.to_string()), + ]; + + let stats = library + .db + .statistics() + .upsert( + // Each library is a database so only one of these ever exists + statistics::id::equals(1), + statistics::create(params.clone()), + params, + ) + .exec() + .await?; + + info!(?stats, "Updated library statistics;"); + + invalidate_query!(&library, "library.statistics"); + + Ok(stats) +} diff --git a/core/src/llm/README.md b/core/src/llm/README.md new file mode 100644 index 000000000000..d1a70bf2fbf2 --- /dev/null +++ b/core/src/llm/README.md @@ -0,0 +1,211 @@ +# AI Engine + +The AI Engine is a core module of Spacedrive that provides intelligent file system operations and context-aware assistance through configurable LLM-powered agents. It uses a flexible RON-based configuration system to define agent behaviors, tools, and workflows. + +## Overview + +The AI Engine module enables Spacedrive to perform intelligent operations on the file system by: + +- Processing natural language queries about files and directories +- Understanding file context and relationships +- Executing complex file operations through LLM reasoning +- Maintaining conversational context about file operations + +## Architecture + +### Core Components + +``` +ai-engine/ +├── agents/ +│ ├── directory_agent.rs # File system navigation agent +│ ├── context_agent.rs # Content understanding agent +│ └── mod.rs +├── tools/ +│ ├── spacedrive_fs.rs # File system operations +│ ├── content_analyzer.rs # File content analysis +│ └── mod.rs +├── config/ +│ ├── parser.rs # RON configuration parser +│ ├── validator.rs # Configuration validation +│ └── templates/ +│ └── directory_agent.ron +├── memory/ +│ ├── conversation.rs # Conversation state management +│ └── storage.rs # Memory backend implementations +└── mod.rs +``` + +### Agent Configuration + +Agents are configured using RON (Rust Object Notation) files that define: + +- Model parameters and provider settings +- Available tools and their parameters +- Workflow execution strategies +- Memory management +- Prompt templates +- Validation rules + +Example configuration for the Directory Agent: + +```ron +( + agent: ( + name: "DirectoryAgent", + description: "File system navigation and context evaluation agent", + model: ( + provider: "ollama", + name: "llama3.1-70b-instruct", + // ... other model settings + ), + // ... tool definitions, workflow config, etc. + ) +) +``` + +## Usage + +### Basic Integration + +```rust +use spacedrive_core::ai_engine::{Agent, AgentConfig}; + +async fn create_directory_agent() -> Result { + // Load configuration from RON file + let config = AgentConfig::from_file("config/directory_agent.ron")?; + + // Initialize agent + let agent = Agent::new(config).await?; + + // Execute queries + let response = agent.execute("Find all images in the Downloads folder").await?; + + Ok(agent) +} +``` + +### Custom Tool Implementation + +```rust +use spacedrive_core::ai_engine::tools::{Tool, ToolResult}; + +#[derive(Debug)] +struct SpacedriveFs; + +#[async_trait] +impl Tool for SpacedriveFs { + async fn execute(&self, params: HashMap) -> ToolResult { + // Implement file system operations + // ... + } +} +``` + +## Memory Management + +The AI Engine supports different memory backends: + +- In-Memory (default): Temporary storage for the session +- Redis: Distributed memory storage +- PostgreSQL: Persistent conversation history + +Configure memory settings in the agent's RON file: + +```ron +memory: ( + type: "Conversational", + storage: ( + type: "Redis", + connection_string: Some("redis://localhost:6379"), + ttl_seconds: Some(3600), + ), +) +``` + +## Error Handling + +The module implements comprehensive error handling: + +- Automatic retries with exponential backoff +- Fallback responses for failed operations +- Detailed error logging and reporting + +Configuration example: + +```ron +error_strategy: ( + max_retries: 3, + backoff_seconds: 2, + fallback_response: "Operation failed, please try again.", +) +``` + +## Development + +### Adding New Tools + +1. Create a new tool implementation in `tools/`: + +```rust +#[derive(Debug)] +pub struct NewTool; + +#[async_trait] +impl Tool for NewTool { + async fn execute(&self, params: HashMap) -> ToolResult { + // Tool implementation + } +} +``` + +2. Add tool configuration to agent RON file: + +```ron +tools: [ + ( + name: "new_tool", + description: "Description of the new tool", + required_params: [ + // Parameter definitions + ], + ), +] +``` + +### Testing + +Run the test suite: + +```bash +cargo test -p spacedrive-core ai_engine +``` + +Run specific agent tests: + +```bash +cargo test -p spacedrive-core ai_engine::agents::directory_agent +``` + +## Contributing + +When contributing to the AI Engine: + +1. Ensure all new tools implement the `Tool` trait +2. Add appropriate tests for new functionality +3. Update RON schema documentation +4. Follow Rust best practices and Spacedrive's coding style + +## Future Developments + +Planned features: + +- [ ] Additional LLM provider integrations +- [ ] Enhanced file content understanding +- [ ] Improved memory management systems +- [ ] Multi-agent collaboration +- [ ] Custom tool development framework + +## License + +This module is part of Spacedrive and is licensed under the same terms as the main project. diff --git a/core/src/llm/mod.rs b/core/src/llm/mod.rs new file mode 100644 index 000000000000..916e984374e3 --- /dev/null +++ b/core/src/llm/mod.rs @@ -0,0 +1,76 @@ +/// LLM Engine +/// +/// This module contains the LLM engine for the Spacedrive core. +/// It is responsible for generating and executing AI tasks. +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use std::collections::HashMap; +use thiserror::Error; + +pub mod agents; +pub mod config; +pub mod memory; +pub mod tools; + +// Core error type for LLM operations +#[derive(Error, Debug)] +pub enum LLMError { + #[error("Configuration error: {0}")] + Config(String), + #[error("Model error: {0}")] + Model(String), + #[error("Tool execution error: {0}")] + Tool(String), + #[error("Memory error: {0}")] + Memory(String), +} + +// Result type alias for LLM operations +pub type Result = std::result::Result; + +// Core agent configuration structures +#[derive(Debug, Deserialize, Serialize)] +pub struct AgentConfig { + pub name: String, + pub description: String, + pub model: ModelConfig, + pub tools: Vec, + pub workflow: WorkflowConfig, + pub memory: MemoryConfig, + pub prompts: PromptTemplates, + pub validation: ValidationConfig, +} + +#[derive(Debug, Deserialize, Serialize)] +pub struct ModelConfig { + pub provider: String, + pub name: String, + pub temperature: f32, + pub max_tokens: usize, + pub system_prompt: String, +} + +// Core agent trait +#[async_trait] +pub trait Agent: Send + Sync { + async fn execute(&self, query: &str) -> Result; + async fn load_tools(&mut self) -> Result<()>; + async fn initialize_memory(&mut self) -> Result<()>; +} + +// Core tool trait +#[async_trait] +pub trait Tool: Send + Sync { + async fn execute( + &self, + params: HashMap, + ) -> Result; + fn name(&self) -> &str; + fn description(&self) -> &str; +} + +// Re-exports +pub use self::agents::DirectoryAgent; +pub use self::config::ConfigParser; +pub use self::memory::MemoryManager; +pub use self::tools::SpacedriveFs; diff --git a/core/src/llm/processes/example.ron b/core/src/llm/processes/example.ron new file mode 100644 index 000000000000..f669f44aa73d --- /dev/null +++ b/core/src/llm/processes/example.ron @@ -0,0 +1,105 @@ +// Agent Configuration Schema +( + // Core agent configuration + agent: ( + // The directory agent is used to navigate the file system and evaluate context + name: "DirectoryAgent", + description: "A configurable LLM-powered agent that can navigate the file system and evaluate context", + model: ( + provider: "ollama", // or "openai", "ollama", etc. + name: "llama3.1-70b-instruct", + temperature: 0.7, + max_tokens: 4096, + system_prompt: "{{system.ron}}", + ), + + // Tool definitions + tools: [ + ( + name: "spacedrive_fs", + description: "Search the file system for files and directories", + required_params: [ + ( + name: "path", + type: "String", + description: "The path to search for", + ), + ], + optional_params: [ + ( + name: "num_results", + type: "Integer", + default: 5, + description: "Number of results to return", + ), + ], + ), + ], + + // Workflow configuration + workflow: ( + // How the agent should process tasks + execution_strategy: "Sequential", // or "Parallel", "Adaptive" + max_steps: 10, + timeout_seconds: 300, + + // Error handling + error_strategy: ( + max_retries: 3, + backoff_seconds: 2, + fallback_response: "I encountered an error and couldn't complete the task.", + ), + ), + + // Memory configuration + memory: ( + type: "Conversational", // or "Episodic", "Semantic" + storage: ( + type: "InMemory", // or "Redis", "Postgres" + connection_string: None, + ttl_seconds: Some(3600), + ), + max_history_messages: 10, + ), + + // Prompt templates for different agent states + prompts: ( + task_planning: r#" + Given the following task: {task} + And access to these tools: {available_tools} + Create a plan to accomplish this task. + Plan: + "#, + + tool_selection: r#" + Based on the current step: {current_step} + And available tools: {available_tools} + Select the most appropriate tool. + Reasoning: + "#, + + result_synthesis: r#" + Given the tool results: {tool_results} + And the original task: {original_task} + Synthesize a response. + Response: + "#, + ), + + // Validation rules + validation: ( + required_fields: [ + "model.provider", + "model.name", + "workflow.execution_strategy", + ], + custom_validators: [ + ( + name: "validate_temperature", + condition: "model.temperature >= 0.0 && model.temperature <= 1.0", + error_message: "Temperature must be between 0.0 and 1.0", + ), + ], + ), + ), +) diff --git a/core/src/llm/processes/system.ron b/core/src/llm/processes/system.ron new file mode 100644 index 000000000000..737aa5b601c6 --- /dev/null +++ b/core/src/llm/processes/system.ron @@ -0,0 +1,5 @@ +( + system: ( + prompt: "You are a helpful AI assistant with access to various tools.", + ) +) diff --git a/core/src/location/error.rs b/core/src/location/error.rs index 3e0f2b80ea3f..21779bd86ace 100644 --- a/core/src/location/error.rs +++ b/core/src/location/error.rs @@ -80,6 +80,8 @@ pub enum LocationError { InvalidScanStateValue(i32), #[error(transparent)] Sync(#[from] sd_core_sync::Error), + #[error("other error: {0}")] + Other(String), } impl From for rspc::Error { diff --git a/core/src/location/mod.rs b/core/src/location/mod.rs index e89639285267..35e47f18577c 100644 --- a/core/src/location/mod.rs +++ b/core/src/location/mod.rs @@ -166,6 +166,7 @@ impl LocationCreateArgs { let uuid = Uuid::now_v7(); let location = create_location( + node, library, uuid, &self.path, @@ -249,6 +250,7 @@ impl LocationCreateArgs { let uuid = Uuid::now_v7(); let location = create_location( + node, library, uuid, &self.path, @@ -705,6 +707,7 @@ pub(crate) fn normalize_path(path: impl AsRef) -> io::Result<(String, Stri } async fn create_location( + _node: &Node, library @ Library { db, sync, .. }: &Library, location_pub_id: Uuid, location_path: impl AsRef, @@ -733,6 +736,35 @@ async fn create_location( return Ok(None); } + // let library_arc = Arc::new(*library); + // // Track the volume before creating the location + // // Get the volume fingerprint for the location path + // let system_volumes = node + // .volumes + // .list_system_volumes(library_arc) + // .await + // .map_err(|e| { + // warn!("Failed to list system volumes: {}", e); + // LocationError::Other(e.to_string()) + // })?; + + // for volume in system_volumes { + // if let Some(mount_point) = volume.mount_point.as_ref() { + // if location_path.starts_with(mount_point) { + // // Track this volume since we're creating a location on it + // if let Err(e) = node + // .volumes + // .track_volume(volume.fingerprint, library.clone()) + // .await + // { + // warn!("Failed to track volume for new location: {}", e); + // // Continue with location creation even if volume tracking fails + // } + // break; + // } + // } + // } + let (sync_values, mut db_params) = [ sync_db_entry!(&name, location::name), sync_db_entry!(path, location::path), diff --git a/core/src/volume/actor.rs b/core/src/volume/actor.rs index 60fb456ab0db..fcb0b99cdaab 100644 --- a/core/src/volume/actor.rs +++ b/core/src/volume/actor.rs @@ -450,17 +450,45 @@ impl VolumeManagerActor { let state = self.state.write().await; let device_pub_id = self.ctx.device_id.clone(); - // Find the volume in our current system volumes let mut registry = state.registry.write().await; let mut volume = match registry.get_volume_mut(&fingerprint) { Some(v) => v.clone(), None => return Err(VolumeError::InvalidFingerprint(fingerprint.clone())), }; - // Create in database with current device association - volume.create(&library.db, device_pub_id.into()).await?; + // Check for existing .sdvolume file + if let Some(volume_file) = volume.read_volume_file().await? { + // If pub_id exists in database, use that volume record + if let Some(existing_volume) = library + .db + .volume() + .find_unique(volume::pub_id::equals(volume_file.pub_id.clone())) + .exec() + .await? + .map(Volume::from) + { + // Update volume with existing data + volume = Volume::merge_with_db(&volume, &existing_volume); + registry.update_volume(volume.clone()); + } + } + + // Create or update in database with sync + if volume.pub_id.is_none() { + volume = volume + .sync_db_create(&library, device_pub_id.into()) + .await?; + } else { + volume.sync_db_update(&library).await?; + } + + // Write .sdvolume file + volume.write_volume_file().await?; + + // Update registry with final state + registry.update_volume(volume.clone()); - // Spawn a background task to perform the speed test + // Spawn speed test let event_tx = self.event_tx.clone(); let mut volume = volume.clone(); tokio::spawn(async move { diff --git a/core/src/volume/cloud/mod.rs b/core/src/volume/cloud/mod.rs new file mode 100644 index 000000000000..9aa2e92d00e7 --- /dev/null +++ b/core/src/volume/cloud/mod.rs @@ -0,0 +1,58 @@ +use super::{error::VolumeError, types::CloudProvider}; +use async_trait::async_trait; +use serde::{Deserialize, Serialize}; +use specta::Type; + +#[derive(Debug, Clone, Serialize, Deserialize, Type)] +pub struct CloudStorageInfo { + pub total_bytes_capacity: u64, + pub total_bytes_available: u64, + pub quota_info: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Type)] +pub struct QuotaInfo { + pub used: u64, + pub allocated: u64, + pub max: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize, Type)] +pub enum CloudCredentials { + OAuth { + access_token: String, + refresh_token: Option, + expires_at: Option, + }, + ApiKey(String), + Custom(serde_json::Value), +} + +#[async_trait] +pub trait CloudVolumeProvider: Send + Sync { + /// Get storage capacity and usage information + async fn get_storage_info(&self) -> Result; + + /// Check if the current credentials are valid + async fn is_authenticated(&self) -> bool; + + /// Attempt to authenticate with the provider + async fn authenticate(&self) -> Result<(), VolumeError>; + + /// Refresh authentication tokens if needed + async fn refresh_token(&self) -> Result<(), VolumeError>; +} + +// Factory function to create provider implementations +pub fn get_cloud_provider( + provider: &CloudProvider, + credentials: CloudCredentials, +) -> Result, VolumeError> { + match provider { + // CloudProvider::GoogleDrive => Ok(Box::new(GoogleDriveProvider::new(credentials))), + // CloudProvider::Dropbox => Ok(Box::new(DropboxProvider::new(credentials))), + // CloudProvider::OneDrive => Ok(Box::new(OneDriveProvider::new(credentials))), + // Add other providers as they're implemented + _ => Err(VolumeError::UnsupportedCloudProvider(provider.clone())), + } +} diff --git a/core/src/volume/error.rs b/core/src/volume/error.rs index ecc0313225ed..14d703814c41 100644 --- a/core/src/volume/error.rs +++ b/core/src/volume/error.rs @@ -117,6 +117,28 @@ pub enum VolumeError { /// Resource exhausted #[error("Resource exhausted: {0}")] ResourceExhausted(String), + + /// Volume is not tracked + #[error("Volume is not tracked")] + NotTracked, + + /// Volume fingerprint is missing + #[error("Volume fingerprint is missing")] + MissingFingerprint, + + /// IO error + #[error("IO error: {0}")] + IoError(std::io::Error), + + /// Serialization error + #[error("Serialization error: {0}")] + SerializationError(serde_json::Error), + + #[error(transparent)] + Sync(#[from] sd_core_sync::Error), + + #[error(transparent)] + Cloud(#[from] CloudVolumeError), } /// Specific kinds of speed test errors @@ -307,3 +329,19 @@ impl fmt::Display for SpeedTestErrorKind { write!(f, "{}", kind_str) } } + +#[derive(Debug, Error)] +pub enum CloudVolumeError { + #[error("Authentication failed: {0}")] + AuthenticationError(String), + #[error("Rate limit exceeded")] + RateLimitExceeded, + #[error("Quota exceeded")] + QuotaExceeded, + #[error("API Error: {0}")] + ApiError(String), + #[error("Network Error: {0}")] + NetworkError(String), + // #[error("Unsupported cloud provider: {0}")] + // UnsupportedCloudProvider(CloudProvider), +} diff --git a/core/src/volume/mod.rs b/core/src/volume/mod.rs index 1f7e11861dce..ab3fb4181475 100644 --- a/core/src/volume/mod.rs +++ b/core/src/volume/mod.rs @@ -5,6 +5,7 @@ //! Volumes use a fingerprint to identify them as they sometimes are not persisted in the database //! pub(crate) mod actor; +// pub(crate) mod cloud; mod error; mod os; mod speed; diff --git a/core/src/volume/types.rs b/core/src/volume/types.rs index 91d4cb7430eb..5e529a21ab0f 100644 --- a/core/src/volume/types.rs +++ b/core/src/volume/types.rs @@ -1,11 +1,18 @@ -use super::error::VolumeError; -use crate::volume::speed::SpeedTest; +use super::{ + // cloud::CloudCredentials, + error::{CloudVolumeError, VolumeError}, +}; +use crate::library::Library; use sd_core_sync::DevicePubId; -use sd_prisma::prisma::{ - device, - volume::{self}, - PrismaClient, +use sd_prisma::{ + prisma::{ + device, + volume::{self}, + PrismaClient, + }, + prisma_sync, }; +use sd_sync::*; use serde::{Deserialize, Serialize}; use serde_with::{serde_as, DisplayFromStr}; use specta::Type; @@ -29,7 +36,7 @@ impl VolumeFingerprint { hasher.update(&volume.total_bytes_capacity.to_be_bytes()); hasher.update(volume.file_system.to_string().as_bytes()); // These are all properties that are unique to a volume and unlikely to change - // If a .spacedrive file is found in the volume, and is fingerprint does not match, + // If a `.sdvolume` file is found in the volume, and is fingerprint does not match, // but the `pub_id` is the same, we can update the values and regenerate the fingerprint // preserving the tracked instance of the volume Self(hasher.finalize().as_bytes().to_vec()) @@ -348,6 +355,243 @@ impl Volume { .await?; Ok(()) } + + /// Writes the .sdvolume file to the volume's root + pub async fn write_volume_file(&self) -> Result<(), VolumeError> { + if !self.is_mounted || self.read_only { + return Ok(()); // Skip if volume isn't mounted or is read-only + } + + let fingerprint = self + .fingerprint + .as_ref() + .ok_or(VolumeError::MissingFingerprint)?; + let pub_id = self.pub_id.as_ref().ok_or(VolumeError::NotTracked)?; + + let volume_file = SdVolumeFile { + pub_id: pub_id.clone(), + fingerprint: fingerprint.to_string(), + last_seen: chrono::Utc::now(), + }; + + let path = self.mount_point.join(".sdvolume"); + let file = tokio::fs::File::create(&path) + .await + .map_err(|e| VolumeError::IoError(e))?; + + serde_json::to_writer(file.into_std().await, &volume_file) + .map_err(|e| VolumeError::SerializationError(e))?; + + Ok(()) + } + + /// Reads the .sdvolume file from the volume's root if it exists + pub async fn read_volume_file(&self) -> Result, VolumeError> { + if !self.is_mounted { + return Ok(None); + } + + let path = self.mount_point.join(".sdvolume"); + if !path.exists() { + return Ok(None); + } + + let file = tokio::fs::File::open(&path) + .await + .map_err(|e| VolumeError::IoError(e))?; + + let volume_file = serde_json::from_reader(file.into_std().await) + .map_err(|e| VolumeError::SerializationError(e))?; + + Ok(Some(volume_file)) + } + + pub async fn sync_db_create( + &self, + library: &Library, + device_pub_id: Vec, + ) -> Result { + let Library { db, sync, .. } = library; + let pub_id = Uuid::now_v7().as_bytes().to_vec(); + + let device_id = db + .device() + .find_unique(device::pub_id::equals(device_pub_id.clone())) + .select(device::select!({ id })) + .exec() + .await? + .ok_or(VolumeError::DeviceNotFound(device_pub_id))? + .id; + + let (sync_params, db_params) = [ + sync_db_entry!(self.name.clone(), volume::name), + sync_db_entry!( + self.mount_point.to_str().unwrap_or_default().to_string(), + volume::mount_point + ), + sync_db_entry!(self.mount_type.to_string(), volume::mount_type), + sync_db_entry!( + self.total_bytes_capacity.to_string(), + volume::total_bytes_capacity + ), + sync_db_entry!( + self.total_bytes_available.to_string(), + volume::total_bytes_available + ), + sync_db_entry!(self.disk_type.to_string(), volume::disk_type), + sync_db_entry!(self.file_system.to_string(), volume::file_system), + sync_db_entry!(self.is_mounted, volume::is_mounted), + sync_db_entry!( + self.read_speed_mbps.unwrap_or(0) as i64, + volume::read_speed_mbps + ), + sync_db_entry!( + self.write_speed_mbps.unwrap_or(0) as i64, + volume::write_speed_mbps + ), + sync_db_entry!(self.read_only, volume::read_only), + sync_db_entry!( + self.error_status.clone().unwrap_or_default(), + volume::error_status + ), + ] + .into_iter() + .unzip::<_, _, Vec<_>, Vec<_>>(); + + // Add device connection to db_params + let mut db_params = db_params; + db_params.push(volume::device::connect(device::id::equals(device_id))); + + let volume = sync + .write_op( + db, + sync.shared_create( + prisma_sync::volume::SyncId { + pub_id: pub_id.clone(), + }, + sync_params, + ), + db.volume().create(pub_id, db_params), + ) + .await?; + + Ok(volume.into()) + } + + pub async fn sync_db_update(&self, library: &Library) -> Result<(), VolumeError> { + let Library { db, sync, .. } = library; + let pub_id = self.pub_id.as_ref().ok_or(VolumeError::NotTracked)?; + + let (sync_params, db_params) = [ + sync_db_entry!(self.name.clone(), volume::name), + sync_db_entry!( + self.mount_point.to_str().unwrap_or_default().to_string(), + volume::mount_point + ), + sync_db_entry!(self.mount_type.to_string(), volume::mount_type), + sync_db_entry!( + self.total_bytes_capacity.to_string(), + volume::total_bytes_capacity + ), + sync_db_entry!( + self.total_bytes_available.to_string(), + volume::total_bytes_available + ), + sync_db_entry!(self.disk_type.to_string(), volume::disk_type), + sync_db_entry!(self.file_system.to_string(), volume::file_system), + sync_db_entry!(self.is_mounted, volume::is_mounted), + sync_db_entry!( + self.read_speed_mbps.unwrap_or(0) as i64, + volume::read_speed_mbps + ), + sync_db_entry!( + self.write_speed_mbps.unwrap_or(0) as i64, + volume::write_speed_mbps + ), + sync_db_entry!(self.read_only, volume::read_only), + sync_db_entry!( + self.error_status.clone().unwrap_or_default(), + volume::error_status + ), + ] + .into_iter() + .unzip::<_, _, Vec<_>, Vec<_>>(); + + sync.write_op( + db, + sync.shared_update( + prisma_sync::volume::SyncId { + pub_id: pub_id.clone(), + }, + sync_params, + ), + db.volume() + .update(volume::pub_id::equals(pub_id.clone()), db_params), + ) + .await?; + + Ok(()) + } + + pub async fn sync_db_delete(&self, library: &Library) -> Result<(), VolumeError> { + let Library { db, sync, .. } = library; + let pub_id = self.pub_id.as_ref().ok_or(VolumeError::NotTracked)?; + + sync.write_op( + db, + sync.shared_delete(prisma_sync::volume::SyncId { + pub_id: pub_id.clone(), + }), + db.volume().delete(volume::pub_id::equals(pub_id.clone())), + ) + .await?; + + Ok(()) + } + + // pub async fn new_cloud_volume( + // provider: CloudProvider, + // credentials: CloudCredentials, + // ) -> Result { + // let provider_impl = match provider { + // // CloudProvider::GoogleDrive => Box::new(GoogleDriveProvider::new(credentials)), + // // CloudProvider::Dropbox => Box::new(DropboxProvider::new(credentials)), + // _ => return Err(CloudVolumeError::UnsupportedCloudProvider(provider)), + // }; + + // let storage_info = provider_impl.get_storage_info().await?; + + // Ok(Self { + // id: None, + // pub_id: None, + // device_id: None, + // name: format!("{} Cloud Storage", provider), + // mount_type: MountType::Cloud(provider), + // mount_point: PathBuf::from("/"), // Virtual root path + // mount_points: vec![], + // is_mounted: true, + // disk_type: DiskType::Virtual, + // file_system: FileSystem::Cloud, + // read_only: false, + // error_status: None, + // read_speed_mbps: None, + // write_speed_mbps: None, + // total_bytes_capacity: storage_info.total_bytes_capacity, + // total_bytes_available: storage_info.total_bytes_available, + // fingerprint: None, + // }) + // } + + // pub async fn refresh_cloud_storage_info(&mut self) -> Result<(), VolumeError> { + // if let MountType::Cloud(provider) = &self.mount_type { + // let provider_impl = get_cloud_provider(provider)?; + // let storage_info = provider_impl.get_storage_info().await?; + + // self.total_bytes_capacity = storage_info.total_bytes_capacity; + // self.total_bytes_available = storage_info.total_bytes_available; + // } + // Ok(()) + // } } /// Represents the type of physical storage device @@ -358,6 +602,8 @@ pub enum DiskType { SSD, /// Hard Disk Drive HDD, + /// Virtual disk type + Virtual, /// Unknown or virtual disk type Unknown, } @@ -413,6 +659,8 @@ pub enum MountType { Network, /// Virtual/container volume Virtual, + // Cloud mounted as a virtual volume + Cloud(CloudProvider), } impl MountType { @@ -427,6 +675,29 @@ impl MountType { } } +/// Represents the cloud storage provider +#[derive(Serialize, Deserialize, Debug, Clone, Type, Hash, PartialEq, Eq, Display)] +pub enum CloudProvider { + SpacedriveCloud, + GoogleDrive, + Dropbox, + OneDrive, + ICloud, + AmazonS3, + Mega, + Box, + pCloud, + Proton, + Sync, + Backblaze, + Wasabi, + DigitalOcean, + Azure, + OwnCloud, + NextCloud, + WebDAV, +} + /// Configuration options for volume operations #[derive(Debug, Clone)] pub struct VolumeOptions { @@ -472,3 +743,10 @@ impl<'de> Deserialize<'de> for VolumeFingerprint { .map_err(serde::de::Error::custom) } } + +#[derive(Serialize, Deserialize, Debug)] +pub struct SdVolumeFile { + pub pub_id: Vec, + pub fingerprint: String, // Store as hex string + pub last_seen: chrono::DateTime, +} diff --git a/interface/app/$libraryId/404.tsx b/interface/app/$libraryId/404.tsx index 6ac16cc5cea3..c6302e638df1 100644 --- a/interface/app/$libraryId/404.tsx +++ b/interface/app/$libraryId/404.tsx @@ -13,10 +13,23 @@ export const Component = () => { role="alert" className="flex size-full flex-col items-center justify-center rounded-lg p-4" > -

Error: 404

+

+ Spacedrive is in beta! +

There's nothing here.

- Its likely that this page has not been built yet, if so we're on it! + This is most likely a bug, please report it to us on{' '} + + Discord + {' '} + or{' '} + + GitHub + + .

+ ))} +
+ + ); +}; + +export default DeviceList; diff --git a/interface/app/$libraryId/overview/cards/FavoriteItems.tsx b/interface/app/$libraryId/overview/cards/FavoriteItems.tsx new file mode 100644 index 000000000000..2acbc13be064 --- /dev/null +++ b/interface/app/$libraryId/overview/cards/FavoriteItems.tsx @@ -0,0 +1,27 @@ +import { useLibraryQuery } from '@sd/client'; + +import { ItemsCard } from './ItemsCard'; + +const FavoriteItems = () => { + const favoriteItemsQuery = useLibraryQuery([ + 'search.objects', + { + take: 6, + orderAndPagination: { + orderOnly: { field: 'dateAccessed', value: 'Desc' } + }, + filters: [{ object: { favorite: true } }] + } + ]); + + return ( + + ); +}; + +export default FavoriteItems; diff --git a/interface/app/$libraryId/overview/FileKindStats.tsx b/interface/app/$libraryId/overview/cards/FileKindStats.tsx similarity index 67% rename from interface/app/$libraryId/overview/FileKindStats.tsx rename to interface/app/$libraryId/overview/cards/FileKindStats.tsx index d5b2dd07ec6b..43fb702a4454 100644 --- a/interface/app/$libraryId/overview/FileKindStats.tsx +++ b/interface/app/$libraryId/overview/cards/FileKindStats.tsx @@ -13,15 +13,14 @@ import { import { Card, Loader, Tooltip } from '@sd/ui'; import { useIsDark, useLocale } from '~/hooks'; -import { FileKind } from '.'; +import { FileKind, OverviewCard } from '..'; const INFO_ICON_CLASSLIST = 'inline size-3 text-ink-faint opacity-0 ml-1 transition-opacity duration-300 group-hover:opacity-70'; const TOTAL_FILES_CLASSLIST = 'flex items-center justify-between whitespace-nowrap text-sm font-medium text-ink-dull mt-2 px-1 font-plex'; -const UNIDENTIFIED_FILES_CLASSLIST = 'relative flex items-center text-xs font-plex text-ink-faint'; const BARS_CONTAINER_CLASSLIST = - 'relative mx-2.5 grid grow grid-cols-[repeat(auto-fit,_minmax(0,_1fr))] grid-rows-[136px_12px] font-plex tracking-wide items-end justify-items-center gap-x-1.5 gap-y-1 self-stretch'; + 'relative mt-[-50px] flex grow flex-wrap items-end gap-1 self-stretch'; const mapFractionalValue = (numerator: bigint, denominator: bigint, maxValue: bigint): string => { if (denominator === 0n) return '0'; @@ -73,10 +72,10 @@ const FileKindStats: React.FC = () => { const [fileKinds, setFileKinds] = useState>(new Map()); const [cardWidth, setCardWidth] = useState(0); const [loading, setLoading] = useState(true); - const containerRef = useRef(null); + const barsContainerRef = useRef(null); const iconsRef = useRef<{ [key: string]: HTMLImageElement }>({}); - const BAR_MAX_HEIGHT = 115n; + const BAR_MAX_HEIGHT = 130n; const BAR_COLOR_START = '#36A3FF'; const BAR_COLOR_END = '#004C99'; @@ -106,35 +105,26 @@ const FileKindStats: React.FC = () => { }; const handleResize = useCallback(() => { - if (containerRef.current) { - const factor = window.innerWidth > 1500 ? 0.35 : 0.4; - setCardWidth(window.innerWidth * factor); + if (barsContainerRef.current) { + const width = barsContainerRef.current.getBoundingClientRect().width; + setCardWidth(width); } }, []); useEffect(() => { - window.addEventListener('resize', handleResize); handleResize(); + window.addEventListener('resize', handleResize); - const containerElement = containerRef.current; - if (containerElement) { - const observer = new MutationObserver(handleResize); - observer.observe(containerElement, { - attributes: true, - childList: true, - subtree: true, - attributeFilter: ['style'] - }); - - return () => { - observer.disconnect(); - }; + const resizeObserver = new ResizeObserver(handleResize); + if (barsContainerRef.current) { + resizeObserver.observe(barsContainerRef.current); } return () => { window.removeEventListener('resize', handleResize); + resizeObserver.disconnect(); }; - }, [handleResize, fileKinds]); + }, [handleResize]); useEffect(() => { if (data) { @@ -203,22 +193,30 @@ const FileKindStats: React.FC = () => { navigate(path); }; + const getVisibleFileKinds = useCallback(() => { + if (cardWidth === 0) return sortedFileKinds; + const barWidth = 32; + const gapWidth = 4; + const maxBars = Math.max(1, Math.floor((cardWidth + gapWidth) / (barWidth + gapWidth))); + return sortedFileKinds.slice(0, maxBars); + }, [cardWidth, sortedFileKinds]); + return ( -
- - {loading ? ( -
-
- -

{t('fetching_file_kind_statistics')}

-
+ <> + {loading ? ( +
+
+ +

{t('fetching_file_kind_statistics')}

- ) : ( - <> -
+
+ ) : ( + <> +
+
+ +
+
{ ? formatNumberWithCommas(data.total_identified_files) : '0'}{' '} -
- {t('total_files')} - -
+ {t('total_files')}
-
+
{data?.total_unidentified_files @@ -248,72 +243,72 @@ const FileKindStats: React.FC = () => {
-
- {sortedFileKinds.map((fileKind, index) => { - const iconImage = iconsRef.current[fileKind.name]; - const barColor = interpolateHexColor( - BAR_COLOR_START, - BAR_COLOR_END, - index / (barCount - 1) - ); +
+
+ {getVisibleFileKinds().map((fileKind, index) => { + const iconImage = iconsRef.current[fileKind.name]; + const barColor = interpolateHexColor( + BAR_COLOR_START, + BAR_COLOR_END, + index / (getVisibleFileKinds().length - 1) + ); - const barHeight = - mapFractionalValue( - fileKind.count, - maxFileCount, - BAR_MAX_HEIGHT - ) + 'px'; + const barHeight = + mapFractionalValue(fileKind.count, maxFileCount, BAR_MAX_HEIGHT) + + 'px'; - return ( - <> - + +
-
- {iconImage && ( - {fileKind.name} - )} - -
- -
- {formatCount(fileKind.count)} + {iconImage && ( + {fileKind.name} + )} +
- - ); - })} -
- - )} - -
+ +
+ {formatCount(fileKind.count)} +
+
+ ); + })} +
+ + )} + ); }; diff --git a/interface/app/$libraryId/overview/cards/ItemsCard.tsx b/interface/app/$libraryId/overview/cards/ItemsCard.tsx new file mode 100644 index 000000000000..378e437d7f76 --- /dev/null +++ b/interface/app/$libraryId/overview/cards/ItemsCard.tsx @@ -0,0 +1,116 @@ +import { UseQueryResult } from '@tanstack/react-query'; +import { useNavigate } from 'react-router'; +import { ObjectOrder, objectOrderingKeysSchema, useExplorerLayoutStore } from '@sd/client'; +import { Button } from '@sd/ui'; +import { useLocale } from '~/hooks'; + +import { OverviewCard } from '..'; +import { ExplorerContextProvider } from '../../Explorer/Context'; +import { createDefaultExplorerSettings } from '../../Explorer/store'; +import { useExplorer, useExplorerSettings } from '../../Explorer/useExplorer'; +import { uniqueId } from '../../Explorer/util'; +import { ExplorerViewContext } from '../../Explorer/View/Context'; +import { SimpleGridItem } from '../../Explorer/View/Grid/SimpleGridItem'; +import { GridViewItem } from '../../Explorer/View/GridView/Item'; +import HorizontalScroll from '../Layout/HorizontalScroll'; + +interface ItemsCardProps { + title: string; + query: UseQueryResult<{ items: any[] }>; + buttonText: string; + buttonLink: string; + maxItems?: number; +} + +export const ItemsCard = ({ + title, + query, + buttonText, + buttonLink, + maxItems = 20 +}: ItemsCardProps) => { + const navigate = useNavigate(); + const { t } = useLocale(); + const layoutStore = useExplorerLayoutStore(); + + const explorerSettings = useExplorerSettings({ + settings: { + ...createDefaultExplorerSettings({ + order: { field: 'dateAccessed', value: 'Desc' } + }), + gridItemSize: 80, + gridGap: 9 + }, + orderingKeys: objectOrderingKeysSchema + }); + + const items = query.data?.items ?? []; + const displayItems = items.slice(0, maxItems); + + const explorer = useExplorer({ + items: displayItems, + settings: explorerSettings, + isFetching: query.isLoading, + isFetchingNextPage: false + }); + + const itemDetailsHeight = + (layoutStore.showTags ? 60 : 44) + + (explorerSettings.settingsStore.showBytesInGridView ? 20 : 0); + const itemHeight = explorerSettings.settingsStore.gridItemSize + itemDetailsHeight; + + return ( + <> + + -1, + getFirstActiveItemIndex: () => -1, + updateActiveItem: () => {}, + updateFirstActiveItem: () => {}, + handleWindowsGridShiftSelection: () => {} + }} + > + +
+ {displayItems.map((item) => ( +
+ + {({ selected, cut }) => ( + + )} + +
+ ))} +
+
+
+
+ {/* */} + + ); +}; diff --git a/interface/app/$libraryId/overview/LibraryStats.tsx b/interface/app/$libraryId/overview/cards/LibraryStats.tsx similarity index 94% rename from interface/app/$libraryId/overview/LibraryStats.tsx rename to interface/app/$libraryId/overview/cards/LibraryStats.tsx index 780f2fe88f10..18478ccfb502 100644 --- a/interface/app/$libraryId/overview/LibraryStats.tsx +++ b/interface/app/$libraryId/overview/cards/LibraryStats.tsx @@ -14,8 +14,8 @@ import { Card, Loader, Tooltip } from '@sd/ui'; import i18n from '~/app/I18n'; import { useCounter, useIsDark, useLocale } from '~/hooks'; -import { FileKind } from '.'; -import StorageBar from './StorageBar'; +import { FileKind, OverviewCard } from '..'; +import StorageBar from '../StorageBar'; interface StatItemProps { title: string; @@ -65,7 +65,7 @@ const StatItem = ({ title, bytes, isLoading, info }: StatItemProps) => { return (
@@ -194,9 +194,8 @@ const LibraryStats = () => { tooltip: `${humanizeSize(otherTotalBytes).value} ${t(`size_${humanizeSize(otherTotalBytes).unit.toLowerCase()}`)}` } ]; - return ( - + <> {loading ? (
@@ -205,8 +204,8 @@ const LibraryStats = () => {
) : ( - <> -
+
+
{Object.entries(libraryStats ?? {}) .sort( ([a], [b]) => @@ -231,12 +230,10 @@ const LibraryStats = () => { ); })}
-
- -
- + +
)} - + ); }; diff --git a/interface/app/$libraryId/overview/cards/RecentItems.tsx b/interface/app/$libraryId/overview/cards/RecentItems.tsx new file mode 100644 index 000000000000..99a64d6c4d55 --- /dev/null +++ b/interface/app/$libraryId/overview/cards/RecentItems.tsx @@ -0,0 +1,27 @@ +import { ObjectOrder, objectOrderingKeysSchema, useLibraryQuery } from '@sd/client'; + +import { ItemsCard } from './ItemsCard'; + +const RecentFiles = () => { + const recentItemsQuery = useLibraryQuery([ + 'search.objects', + { + take: 20, + orderAndPagination: { + orderOnly: { field: 'dateAccessed', value: 'Desc' } + }, + filters: [{ object: { dateAccessed: { from: new Date(0).toISOString() } } }] + } + ]); + + return ( + + ); +}; + +export default RecentFiles; diff --git a/interface/app/$libraryId/overview/cards/RecentLocations.tsx b/interface/app/$libraryId/overview/cards/RecentLocations.tsx new file mode 100644 index 000000000000..e1f4888067d3 --- /dev/null +++ b/interface/app/$libraryId/overview/cards/RecentLocations.tsx @@ -0,0 +1,94 @@ +import { FolderDashed } from '@phosphor-icons/react'; +import { keepPreviousData } from '@tanstack/react-query'; +import { useMemo } from 'react'; +import { useNavigate } from 'react-router'; +import { arraysEqual, Device, humanizeSize, useLibraryQuery, useOnlineLocations } from '@sd/client'; +import { Button, buttonStyles, Card, Tooltip } from '@sd/ui'; +import { Icon as SdIcon } from '~/components'; +import { useLocale } from '~/hooks'; + +import { OverviewCard } from '..'; +import { AddLocationButton } from '../../settings/library/locations/AddLocationButton'; + +const RecentLocations = () => { + const navigate = useNavigate(); + const { t } = useLocale(); + const onlineLocations = useOnlineLocations(); + + const devicesQuery = useLibraryQuery(['devices.list'], { + // placeholderData: keepPreviousData + }); + // eslint-disable-next-line react-hooks/exhaustive-deps + const devices = devicesQuery.data ?? []; + + const devicesAsHashmap = useMemo(() => { + return devices.reduce( + (acc, device) => { + acc[device.id] = device; + return acc; + }, + {} as Record + ); + }, [devices]); + + const locationsQuery = useLibraryQuery(['locations.list'], { + // placeholderData: keepPreviousData + }); + const locations = locationsQuery.data ?? []; + + return ( + <> +
+ {locations.length > 0 ? ( + locations.slice(0, 6).map((location) => ( + + )) + ) : ( +
No locations found
+ )} + +
+ + ); +}; + +export default RecentLocations; diff --git a/interface/app/$libraryId/overview/cards/SpaceWizard.tsx b/interface/app/$libraryId/overview/cards/SpaceWizard.tsx new file mode 100644 index 000000000000..d78f9b137fea --- /dev/null +++ b/interface/app/$libraryId/overview/cards/SpaceWizard.tsx @@ -0,0 +1,46 @@ +import { Desktop, FilePlus, MagicWand } from '@phosphor-icons/react'; +import { useNavigate } from 'react-router'; +import { Device, HardwareModel, useLibraryQuery } from '@sd/client'; +import { Button, buttonStyles, TextArea, Tooltip } from '@sd/ui'; +import { Icon, Icon as SdIcon } from '~/components'; +import { useLocale } from '~/hooks'; +import { hardwareModelAsNumberToIcon, hardwareModelToIcon } from '~/util/hardware'; + +import StarfieldEffect from '../../peer/StarfieldEffect'; + +const SpaceWizard = () => { + const navigate = useNavigate(); + const { t } = useLocale(); + + return ( + <> +
+