Files
flalingo/src-tauri/src/repositories/repository_manager.rs

389 lines
13 KiB
Rust

use sqlx::sqlite::SqlitePool;
use super::{
exercise_repository::ExerciseRepository, metadata_repository::MetadataRepository,
node_repository::NodeRepository, path_repository::PathRepository,
};
/// Repository manager that coordinates access to all repositories
/// and provides a single entry point for database operations
pub struct RepositoryManager<'a> {
pool: &'a SqlitePool,
path_repo: PathRepository<'a>,
metadata_repo: MetadataRepository<'a>,
node_repo: NodeRepository<'a>,
exercise_repo: ExerciseRepository<'a>,
}
impl<'a> RepositoryManager<'a> {
pub fn new(pool: &'a SqlitePool) -> Self {
Self {
pool,
path_repo: PathRepository::new(pool),
metadata_repo: MetadataRepository::new(pool),
node_repo: NodeRepository::new(pool),
exercise_repo: ExerciseRepository::new(pool),
}
}
/// Get the path repository
pub fn paths(&self) -> &PathRepository<'a> {
&self.path_repo
}
/// Get the metadata repository
pub fn metadata(&self) -> &MetadataRepository<'a> {
&self.metadata_repo
}
/// Get the node repository
pub fn nodes(&self) -> &NodeRepository<'a> {
&self.node_repo
}
/// Get the exercises repository
pub fn exercises(&self) -> &ExerciseRepository<'a> {
&self.exercise_repo
}
/// Get the database pool
pub fn pool(&self) -> &SqlitePool {
self.pool
}
/// Check database health by performing a simple query
pub async fn health_check(&self) -> Result<bool, String> {
let result = sqlx::query("SELECT 1")
.fetch_optional(self.pool)
.await
.map_err(|e| format!("Database health check failed: {}", e))?;
Ok(result.is_some())
}
/// Begin a database transaction
/// This is useful for operations that need to be atomic across multiple repositories
pub async fn begin_transaction(&self) -> Result<sqlx::Transaction<'_, sqlx::Sqlite>, String> {
self.pool
.begin()
.await
.map_err(|e| format!("Failed to begin transaction: {}", e))
}
/// Get database statistics
pub async fn get_stats(&self) -> Result<DatabaseStats, String> {
let path_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM path")
.fetch_one(self.pool)
.await
.map_err(|e| format!("Failed to count paths: {}", e))?;
let node_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM node")
.fetch_one(self.pool)
.await
.map_err(|e| format!("Failed to count nodes: {}", e))?;
let exercise_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM exercise")
.fetch_one(self.pool)
.await
.map_err(|e| format!("Failed to count exercises: {}", e))?;
let metadata_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM pathMetadata")
.fetch_one(self.pool)
.await
.map_err(|e| format!("Failed to count metadata: {}", e))?;
Ok(DatabaseStats {
path_count: path_count.0,
node_count: node_count.0,
exercise_count: exercise_count.0,
metadata_count: metadata_count.0,
})
}
}
/// Database statistics structure
#[derive(Debug, Clone)]
pub struct DatabaseStats {
pub path_count: i64,
pub node_count: i64,
pub exercise_count: i64,
pub metadata_count: i64,
}
impl DatabaseStats {
pub fn total_records(&self) -> i64 {
self.path_count + self.node_count + self.exercise_count + self.metadata_count
}
pub fn is_empty(&self) -> bool {
self.total_records() == 0
}
}
impl<'a> RepositoryManager<'a> {
/// Advanced operations combining multiple repositories
/// Import a path from JSON string with full validation
pub async fn import_path_from_json(&self, json_content: &str) -> Result<String, String> {
let json_utils = super::path_json_utils::PathJsonUtils::new(&self.path_repo);
json_utils.import_from_json(json_content).await
}
/// Export a path to JSON string
pub async fn export_path_to_json(&self, path_id: i32) -> Result<String, String> {
let json_utils = super::path_json_utils::PathJsonUtils::new(&self.path_repo);
json_utils.export_to_json(path_id).await
}
/// Clone a path with all its dependencies
pub async fn clone_path_complete(
&self,
source_path_id: i32,
new_path_id: &str,
new_title: &str,
) -> Result<String, String> {
self.path_repo
.clone_path(source_path_id, new_path_id, new_title)
.await
}
/// Get comprehensive path statistics
pub async fn get_path_statistics(&self, path_id: i32) -> Result<PathStatistics, String> {
let path = self.path_repo.get_path_by_id(path_id).await?;
let total_exercises = path.nodes.iter().map(|n| n.exercises.len()).sum();
let exercise_types: std::collections::HashMap<String, usize> = path
.nodes
.iter()
.flat_map(|n| &n.exercises)
.fold(std::collections::HashMap::new(), |mut acc, ex| {
*acc.entry(ex.ex_type.clone()).or_insert(0) += 1;
acc
});
let avg_exercises_per_node = if path.nodes.is_empty() {
0.0
} else {
total_exercises as f64 / path.nodes.len() as f64
};
Ok(PathStatistics {
path_id: path.id,
title: path.title,
description: path.description,
node_count: path.nodes.len(),
total_exercises,
exercise_types,
metadata_count: path.metadata.len(),
avg_exercises_per_node,
})
}
/// Validate path integrity across all repositories
pub async fn validate_path_integrity(&self, path_id: i32) -> Result<Vec<String>, String> {
let mut issues = Vec::new();
// Check if path exists
if !self.path_repo.path_exists(path_id).await? {
issues.push(format!("Path with ID {} does not exist", path_id));
return Ok(issues);
}
let path = self.path_repo.get_path_by_id(path_id).await?;
// Check metadata consistency
if path.metadata.is_empty() {
issues.push("Path has no metadata".to_string());
} else {
for metadata in &path.metadata {
if metadata.path_id != path.id {
issues.push(format!(
"Metadata path_id '{}' doesn't match path ID '{}'",
metadata.path_id, path.id
));
}
}
}
// Check nodes consistency
if path.nodes.is_empty() {
issues.push("Path has no nodes".to_string());
} else {
for node in &path.nodes {
if node.path_id != path.id {
issues.push(format!(
"Node {} path_id '{}' doesn't match path ID '{}'",
node.id, node.path_id, path.id
));
}
// Check exercises consistency
for exercise in &node.exercises {
if exercise.node_id != node.id {
issues.push(format!(
"Exercise {} node_id {} doesn't match node ID {}",
exercise.id, exercise.node_id, node.id
));
}
// Validate exercise content is valid JSON
if let Err(e) = serde_json::from_str::<serde_json::Value>(&exercise.content) {
issues.push(format!(
"Exercise {} has invalid JSON content: {}",
exercise.id, e
));
}
}
}
}
Ok(issues)
}
/// Bulk operations for multiple paths
pub async fn validate_all_paths(
&self,
) -> Result<std::collections::HashMap<String, Vec<String>>, String> {
let paths = self.path_repo.get_all_paths().await?;
let mut results = std::collections::HashMap::new();
for path in paths {
if let Ok(path_id) = path.id.parse::<i32>() {
match self.validate_path_integrity(path_id).await {
Ok(issues) => {
if !issues.is_empty() {
results.insert(path.id, issues);
}
}
Err(e) => {
results.insert(path.id, vec![format!("Validation failed: {}", e)]);
}
}
} else {
results.insert(path.id.clone(), vec!["Invalid path ID format".to_string()]);
}
}
Ok(results)
}
/// Search paths by content
pub async fn search_paths(&self, query: &str) -> Result<Vec<SearchResult>, String> {
let paths = self.path_repo.get_all_paths().await?;
let mut results = Vec::new();
let query_lower = query.to_lowercase();
for path in paths {
let mut relevance_score = 0;
let mut matching_content = Vec::new();
// Check title
if path.title.to_lowercase().contains(&query_lower) {
relevance_score += 10;
matching_content.push(format!("Title: {}", path.title));
}
// Check description
if path.description.to_lowercase().contains(&query_lower) {
relevance_score += 5;
matching_content.push(format!("Description: {}", path.description));
}
// Check nodes
for node in &path.nodes {
if node.title.to_lowercase().contains(&query_lower) {
relevance_score += 3;
matching_content.push(format!("Node: {}", node.title));
}
if node.description.to_lowercase().contains(&query_lower) {
relevance_score += 2;
matching_content.push(format!("Node description: {}", node.description));
}
// Check exercises
for exercise in &node.exercises {
if exercise.content.to_lowercase().contains(&query_lower) {
relevance_score += 1;
matching_content
.push(format!("Exercise ({}): {}", exercise.ex_type, exercise.id));
}
}
}
if relevance_score > 0 {
results.push(SearchResult {
path_id: path.id,
title: path.title,
relevance_score,
matching_content,
});
}
}
// Sort by relevance score (descending)
results.sort_by(|a, b| b.relevance_score.cmp(&a.relevance_score));
Ok(results)
}
}
/// Comprehensive path statistics
#[derive(Debug, Clone)]
pub struct PathStatistics {
pub path_id: String,
pub title: String,
pub description: String,
pub node_count: usize,
pub total_exercises: usize,
pub exercise_types: std::collections::HashMap<String, usize>,
pub metadata_count: usize,
pub avg_exercises_per_node: f64,
}
impl PathStatistics {
pub fn print_detailed_summary(&self) {
println!("=== Detailed Path Statistics ===");
println!("ID: {}", self.path_id);
println!("Title: {}", self.title);
println!("Description: {}", self.description);
println!("Nodes: {}", self.node_count);
println!("Total Exercises: {}", self.total_exercises);
println!(
"Average Exercises per Node: {:.2}",
self.avg_exercises_per_node
);
println!("Metadata Records: {}", self.metadata_count);
println!("Exercise Types:");
for (ex_type, count) in &self.exercise_types {
println!(
" {}: {} ({:.1}%)",
ex_type,
count,
(*count as f64 / self.total_exercises as f64) * 100.0
);
}
}
}
/// Search result for path content search
#[derive(Debug, Clone)]
pub struct SearchResult {
pub path_id: String,
pub title: String,
pub relevance_score: i32,
pub matching_content: Vec<String>,
}
impl SearchResult {
pub fn print_summary(&self) {
println!("=== Search Result ===");
println!("Path: {} - {}", self.path_id, self.title);
println!("Relevance Score: {}", self.relevance_score);
println!("Matching Content:");
for content in &self.matching_content {
println!(" - {}", content);
}
}
}