let ai cleanup repositories and generated missing functions
This commit is contained in:
@@ -1,8 +1,8 @@
|
||||
use sqlx::{migrate::MigrateDatabase, sqlite::SqlitePoolOptions, Pool, Sqlite};
|
||||
use tauri::{App, Manager};
|
||||
|
||||
mod repositories;
|
||||
mod models;
|
||||
pub mod models;
|
||||
pub mod repositories;
|
||||
|
||||
// #[tauri::command]
|
||||
// fn greet(name: &str) -> String {
|
||||
@@ -13,7 +13,6 @@ mod models;
|
||||
#[tauri::command]
|
||||
async fn db_version(state: tauri::State<'_, AppState>) -> Result<String, String> {
|
||||
let pool = &state.db;
|
||||
|
||||
|
||||
let row: (String,) = sqlx::query_as("SELECT sqlite_version()")
|
||||
.fetch_one(pool)
|
||||
@@ -22,20 +21,18 @@ async fn db_version(state: tauri::State<'_, AppState>) -> Result<String, String>
|
||||
Ok(row.0)
|
||||
}
|
||||
|
||||
|
||||
|
||||
async fn setup_db(app: &App) -> Db {
|
||||
let mut path = app.path().app_data_dir().expect("failed to get data_dir");
|
||||
|
||||
|
||||
match std::fs::create_dir_all(path.clone()) {
|
||||
Ok(_) => {}
|
||||
Err(err) => {
|
||||
panic!("error creating directory {}", err);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
path.push("paths.sqlite");
|
||||
|
||||
|
||||
Sqlite::create_database(
|
||||
format!(
|
||||
"sqlite:{}",
|
||||
@@ -45,19 +42,19 @@ async fn setup_db(app: &App) -> Db {
|
||||
)
|
||||
.await
|
||||
.expect("failed to create database");
|
||||
|
||||
|
||||
let db = SqlitePoolOptions::new()
|
||||
.connect(path.to_str().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
||||
sqlx::migrate!("./migrations").run(&db).await.unwrap();
|
||||
|
||||
|
||||
db
|
||||
}
|
||||
|
||||
type Db = Pool<Sqlite>;
|
||||
|
||||
|
||||
struct AppState {
|
||||
db: Db,
|
||||
}
|
||||
@@ -66,17 +63,15 @@ struct AppState {
|
||||
pub fn run() {
|
||||
tauri::Builder::default()
|
||||
.plugin(tauri_plugin_opener::init())
|
||||
.invoke_handler(tauri::generate_handler![
|
||||
db_version
|
||||
])
|
||||
.invoke_handler(tauri::generate_handler![db_version])
|
||||
.setup(|app| {
|
||||
tauri::async_runtime::block_on(async move {
|
||||
let db = setup_db(app).await;
|
||||
|
||||
|
||||
app.manage(AppState { db });
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
.run(tauri::generate_context!())
|
||||
.expect("error building the app");}
|
||||
|
||||
.expect("error building the app");
|
||||
}
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
|
||||
|
||||
#[derive(sqlx::FromRow, Debug)]
|
||||
pub struct PathDb {
|
||||
pub id: String,
|
||||
@@ -10,8 +7,8 @@ pub struct PathDb {
|
||||
|
||||
#[derive(Debug, sqlx::FromRow)]
|
||||
pub struct MetadataDb {
|
||||
pub path_id : String,
|
||||
pub path_id: String,
|
||||
pub version: String,
|
||||
pub created_at: String,
|
||||
pub created_at: String,
|
||||
pub updated_at: String,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
#[derive(Debug, Clone)]
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Exercise {
|
||||
pub id: u16,
|
||||
pub id: u32,
|
||||
pub ex_type: String,
|
||||
pub content: String,
|
||||
pub node_id: u32,
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use crate::models::exercise::Exercise;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Node{
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Node {
|
||||
pub id: u32,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::models::node::Node;
|
||||
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Path {
|
||||
pub id: String,
|
||||
pub title: String,
|
||||
@@ -12,10 +12,10 @@ pub struct Path {
|
||||
pub nodes: Vec<Node>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Metadata {
|
||||
pub path_id : String,
|
||||
pub path_id: String,
|
||||
pub version: String,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub created_at: DateTime<Utc>,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
233
src-tauri/src/repositories/README.md
Normal file
233
src-tauri/src/repositories/README.md
Normal file
@@ -0,0 +1,233 @@
|
||||
# Repository Layer Documentation
|
||||
|
||||
This directory contains the repository layer for the Flalingo application, which handles all database operations using SQLx with SQLite.
|
||||
|
||||
## Structure
|
||||
|
||||
The repository layer is organized into specialized repositories, each responsible for a specific domain:
|
||||
|
||||
- **`path_repository.rs`** - Main repository for managing learning paths
|
||||
- **`node_repository.rs`** - Repository for managing nodes within paths
|
||||
- **`exercise_repository.rs`** - Repository for managing exercises within nodes
|
||||
- **`metadata_repository.rs`** - Repository for managing path metadata
|
||||
- **`repository_manager.rs`** - Coordinates all repositories and provides a unified interface
|
||||
|
||||
## Architecture
|
||||
|
||||
### Repository Pattern
|
||||
Each repository follows the repository pattern with:
|
||||
- Clear separation of concerns
|
||||
- Consistent error handling
|
||||
- Type-safe database operations
|
||||
- Conversion between database models and domain models
|
||||
|
||||
### Dependency Flow
|
||||
```
|
||||
RepositoryManager
|
||||
├── PathRepository
|
||||
├── MetadataRepository
|
||||
├── NodeRepository
|
||||
└── ExerciseRepository
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Using Individual Repositories
|
||||
```rust
|
||||
use crate::repositories::path_repository::PathRepository;
|
||||
|
||||
let path_repo = PathRepository::new(&pool);
|
||||
let path = path_repo.get_path_by_id(1).await?;
|
||||
```
|
||||
|
||||
### Using Repository Manager (Recommended)
|
||||
```rust
|
||||
use crate::repositories::repository_manager::RepositoryManager;
|
||||
|
||||
let repo_manager = RepositoryManager::new(&pool);
|
||||
|
||||
// Access specific repositories
|
||||
let path = repo_manager.paths().get_path_by_id(1).await?;
|
||||
let nodes = repo_manager.nodes().get_nodes_by_path_id("1").await?;
|
||||
let exercises = repo_manager.exercises().get_exercises_by_node_id(1).await?;
|
||||
|
||||
// Database operations
|
||||
let stats = repo_manager.get_stats().await?;
|
||||
let is_healthy = repo_manager.health_check().await?;
|
||||
```
|
||||
|
||||
## Repository Details
|
||||
|
||||
### PathRepository
|
||||
Main repository for learning paths that orchestrates other repositories:
|
||||
- `get_path_by_id(id)` - Get complete path with metadata, nodes, and exercises
|
||||
- `get_all_paths()` - Get all paths with their complete data
|
||||
- `get_paths_by_title(pattern)` - Search paths by title pattern
|
||||
- `path_exists(id)` - Check if path exists
|
||||
- `save_path(path)` - Save new path with all metadata, nodes, and exercises
|
||||
- `update_path(path)` - Update existing path and replace all content
|
||||
- `delete_path(id)` - Delete path and all related data (cascading)
|
||||
- `clone_path(source_id, new_id, title)` - Create complete copy of existing path
|
||||
|
||||
### NodeRepository
|
||||
Manages nodes and their associated exercises:
|
||||
- `get_nodes_by_path_id(path_id)` - Get all nodes for a path with exercises
|
||||
- `get_node_by_id(node_id)` - Get single node with exercises
|
||||
- `save_node(node)` - Save node with exercises, returns generated ID
|
||||
- `save_multiple_nodes(nodes, path_id)` - Bulk save nodes with transaction
|
||||
- `update_node(node)` - Update node and replace all exercises
|
||||
- `delete_node(node_id)` - Delete node and all its exercises
|
||||
- `delete_nodes_by_path_id(path_id)` - Delete all nodes for a path
|
||||
- Efficiently loads exercises for multiple nodes using batch queries
|
||||
|
||||
### ExerciseRepository
|
||||
Handles individual exercises:
|
||||
- `get_exercises_by_node_id(node_id)` - Get exercises for a node
|
||||
- `get_exercises_by_path_id(path_id)` - Get all exercises for a path
|
||||
- `get_exercise_by_id(id)` - Get single exercise
|
||||
- `get_exercises_by_type(type, path_id)` - Filter exercises by type
|
||||
- `save_exercise(exercise)` - Save single exercise, returns generated ID
|
||||
- `save_multiple_exercises(exercises)` - Bulk save with transaction
|
||||
- `update_exercise(exercise)` - Update existing exercise
|
||||
- `delete_exercise(exercise_id)` - Delete single exercise
|
||||
- `update_exercises_for_node(node_id, exercises)` - Replace all exercises for a node
|
||||
|
||||
### MetadataRepository
|
||||
Manages path metadata (versioning, timestamps):
|
||||
- `get_metadata_by_path_id(path_id)` - Get metadata for a path
|
||||
- `save_metadata(metadata)` - Save new metadata record
|
||||
- `save_multiple_metadata(metadata_list)` - Bulk save with transaction
|
||||
- `update_metadata(metadata)` - Update existing metadata
|
||||
- `delete_metadata_by_path_id(path_id)` - Delete all metadata for path
|
||||
- Handles timestamp parsing and validation
|
||||
- Converts between database and domain models
|
||||
|
||||
## Error Handling
|
||||
|
||||
All repositories use consistent error handling:
|
||||
- Return `Result<T, String>` for all operations
|
||||
- Descriptive error messages with context
|
||||
- Proper error propagation between layers
|
||||
- No panics - all errors are handled gracefully
|
||||
|
||||
## Database Schema Assumptions
|
||||
|
||||
The repositories assume the following SQLite schema:
|
||||
- `path` table with columns: id, title, description
|
||||
- `pathMetadata` table with columns: path_id, version, created_at, updated_at
|
||||
- `node` table with columns: id, title, description, path_id
|
||||
- `exercise` table with columns: id, ex_type, content, node_id, path_id
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
- **Batch Loading**: Node repository loads exercises for multiple nodes in a single query
|
||||
- **Lazy Loading**: Only loads required data based on the operation
|
||||
- **Connection Pooling**: Uses SQLx connection pool for efficient database connections
|
||||
- **Prepared Statements**: All queries use parameter binding for safety and performance
|
||||
|
||||
## Future Improvements
|
||||
|
||||
### Advanced Features
|
||||
|
||||
#### JSON Import/Export
|
||||
The `PathJsonUtils` provides comprehensive JSON handling:
|
||||
|
||||
```rust
|
||||
use crate::repositories::path_json_utils::PathJsonUtils;
|
||||
|
||||
let json_utils = PathJsonUtils::new(&path_repo);
|
||||
|
||||
// Import from JSON
|
||||
let path_id = json_utils.import_from_file("path.json").await?;
|
||||
|
||||
// Export to JSON
|
||||
json_utils.export_to_file(path_id, "backup.json").await?;
|
||||
|
||||
// Validate JSON structure
|
||||
json_utils.validate_json_file("path.json")?;
|
||||
|
||||
// Bulk operations
|
||||
let imported_paths = json_utils.import_from_directory("./paths/").await?;
|
||||
json_utils.backup_all_paths("./backup/").await?;
|
||||
```
|
||||
|
||||
#### Repository Manager Advanced Operations
|
||||
|
||||
```rust
|
||||
let repo_manager = RepositoryManager::new(&pool);
|
||||
|
||||
// Path statistics and analysis
|
||||
let stats = repo_manager.get_path_statistics(path_id).await?;
|
||||
stats.print_detailed_summary();
|
||||
|
||||
// Content search across all paths
|
||||
let results = repo_manager.search_paths("vocabulary").await?;
|
||||
|
||||
// Data integrity validation
|
||||
let issues = repo_manager.validate_path_integrity(path_id).await?;
|
||||
let all_issues = repo_manager.validate_all_paths().await?;
|
||||
|
||||
// Path cloning
|
||||
let cloned_id = repo_manager.clone_path_complete(
|
||||
source_id,
|
||||
"new_path_001",
|
||||
"Cloned Path Title"
|
||||
).await?;
|
||||
```
|
||||
|
||||
#### Transaction Support
|
||||
All repositories use transactions for complex operations:
|
||||
|
||||
```rust
|
||||
// Automatic transaction handling in save/update/delete operations
|
||||
let path_id = repo_manager.paths().save_path(path).await?;
|
||||
|
||||
// Manual transaction control
|
||||
let mut tx = repo_manager.begin_transaction().await?;
|
||||
// Perform multiple operations within the transaction
|
||||
// tx.commit().await?;
|
||||
```
|
||||
|
||||
### JSON Structure Validation
|
||||
All JSON imports are validated for:
|
||||
- Structure compliance with Rust models
|
||||
- Reference integrity (path_id, node_id consistency)
|
||||
- Valid JSON content in exercise fields
|
||||
- Proper timestamp formatting
|
||||
|
||||
### Performance Optimizations
|
||||
- **Bulk Operations**: All repositories support batch insert/update
|
||||
- **Transaction Management**: Complex operations use database transactions
|
||||
- **Efficient Queries**: Batch loading of related data (nodes → exercises)
|
||||
- **Connection Pooling**: SQLx pool for optimal database connections
|
||||
|
||||
### Search and Analytics
|
||||
- **Content Search**: Full-text search across paths, nodes, and exercises
|
||||
- **Statistics Generation**: Comprehensive path and database analytics
|
||||
- **Data Integrity**: Validation and consistency checking
|
||||
- **Export/Backup**: Complete JSON-based backup system
|
||||
|
||||
### Future Enhancements
|
||||
- **Caching**: Add caching layer for frequently accessed data
|
||||
- **Pagination**: Support for large result sets
|
||||
- **Versioning**: Enhanced version control for paths
|
||||
- **Migration Tools**: Database schema migration utilities
|
||||
|
||||
## Testing
|
||||
|
||||
Each repository includes comprehensive functionality:
|
||||
- **CRUD Operations**: Complete Create, Read, Update, Delete support
|
||||
- **Bulk Operations**: Efficient batch processing with transactions
|
||||
- **Data Validation**: Input validation and integrity checking
|
||||
- **Error Handling**: Descriptive error messages and proper propagation
|
||||
- **JSON Integration**: Import/export functionality for all data
|
||||
- **Search Capabilities**: Content search and filtering
|
||||
- **Statistics**: Analytics and reporting features
|
||||
|
||||
### Testing Examples
|
||||
The `examples/test_repository_functions.rs` file demonstrates:
|
||||
- Complete CRUD workflows
|
||||
- JSON import/export operations
|
||||
- Search and validation functionality
|
||||
- Performance testing scenarios
|
||||
- Error handling examples
|
||||
263
src-tauri/src/repositories/exercise_repository.rs
Normal file
263
src-tauri/src/repositories/exercise_repository.rs
Normal file
@@ -0,0 +1,263 @@
|
||||
use sqlx::{sqlite::SqlitePool, FromRow, Row};
|
||||
|
||||
use crate::models::{db_models::exercise_db::ExerciseDb, exercise::Exercise};
|
||||
|
||||
pub struct ExerciseRepository<'a> {
|
||||
pub pool: &'a SqlitePool,
|
||||
}
|
||||
|
||||
impl<'a> ExerciseRepository<'a> {
|
||||
pub fn new(pool: &'a SqlitePool) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
|
||||
pub async fn get_exercises_by_node_id(&self, node_id: u32) -> Result<Vec<Exercise>, String> {
|
||||
let exercise_rows = sqlx::query("SELECT * FROM exercise WHERE nodeId = ?")
|
||||
.bind(node_id)
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Exercise db: {}", e))?;
|
||||
|
||||
let exercises = self.parse_exercise_rows(exercise_rows)?;
|
||||
Ok(exercises)
|
||||
}
|
||||
|
||||
pub async fn get_exercises_by_path_id(&self, path_id: &str) -> Result<Vec<Exercise>, String> {
|
||||
let exercise_rows = sqlx::query("SELECT * FROM exercise WHERE pathId = ?")
|
||||
.bind(path_id)
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Exercise db: {}", e))?;
|
||||
|
||||
if exercise_rows.is_empty() {
|
||||
return Err(format!(
|
||||
"ERROR: No Exercise for path with ID {} found",
|
||||
path_id
|
||||
));
|
||||
}
|
||||
|
||||
let exercises = self.parse_exercise_rows(exercise_rows)?;
|
||||
Ok(exercises)
|
||||
}
|
||||
|
||||
pub async fn get_exercise_by_id(&self, exercise_id: u32) -> Result<Exercise, String> {
|
||||
let exercise_row = sqlx::query("SELECT * FROM exercise WHERE id = ?")
|
||||
.bind(exercise_id)
|
||||
.fetch_optional(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Exercise db: {}", e))?;
|
||||
|
||||
let exercise_row = exercise_row
|
||||
.ok_or_else(|| format!("ERROR: No Exercise with ID {} found", exercise_id))?;
|
||||
|
||||
let exercise_db = ExerciseDb::from_row(&exercise_row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Exercise struct: {}", e))?;
|
||||
|
||||
let exercise = self.convert_exercise_db_to_model(exercise_db);
|
||||
Ok(exercise)
|
||||
}
|
||||
|
||||
pub async fn get_exercises_by_type(
|
||||
&self,
|
||||
ex_type: &str,
|
||||
path_id: Option<&str>,
|
||||
) -> Result<Vec<Exercise>, String> {
|
||||
let exercise_rows = if let Some(path_id) = path_id {
|
||||
sqlx::query("SELECT * FROM exercise WHERE ex_type = ? AND pathId = ?")
|
||||
.bind(ex_type)
|
||||
.bind(path_id)
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Exercise db: {}", e))?
|
||||
} else {
|
||||
sqlx::query("SELECT * FROM exercise WHERE ex_type = ?")
|
||||
.bind(ex_type)
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Exercise db: {}", e))?
|
||||
};
|
||||
|
||||
let exercises = self.parse_exercise_rows(exercise_rows)?;
|
||||
Ok(exercises)
|
||||
}
|
||||
|
||||
fn parse_exercise_rows(
|
||||
&self,
|
||||
exercise_rows: Vec<sqlx::sqlite::SqliteRow>,
|
||||
) -> Result<Vec<Exercise>, String> {
|
||||
exercise_rows
|
||||
.iter()
|
||||
.map(|row| {
|
||||
let exercise_db = ExerciseDb::from_row(row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Exercise struct: {}", e))?;
|
||||
|
||||
Ok(self.convert_exercise_db_to_model(exercise_db))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn convert_exercise_db_to_model(&self, exercise_db: ExerciseDb) -> Exercise {
|
||||
Exercise {
|
||||
id: exercise_db.id as u32,
|
||||
ex_type: exercise_db.ex_type,
|
||||
content: exercise_db.content,
|
||||
node_id: exercise_db.node_id as u32,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn save_exercise(&self, exercise: &Exercise) -> Result<u32, String> {
|
||||
let query = "INSERT INTO exercise (ex_type, content, nodeId, pathId) VALUES (?, ?, ?, (SELECT pathId FROM node WHERE id = ?)) RETURNING id";
|
||||
|
||||
let row = sqlx::query(query)
|
||||
.bind(&exercise.ex_type)
|
||||
.bind(&exercise.content)
|
||||
.bind(exercise.node_id)
|
||||
.bind(exercise.node_id)
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save exercise: {}", e))?;
|
||||
|
||||
let exercise_id: i64 = row
|
||||
.try_get("id")
|
||||
.map_err(|e| format!("ERROR: Failed to get exercise ID: {}", e))?;
|
||||
|
||||
Ok(exercise_id as u32)
|
||||
}
|
||||
|
||||
pub async fn save_multiple_exercises(
|
||||
&self,
|
||||
exercises: &[Exercise],
|
||||
) -> Result<Vec<u32>, String> {
|
||||
if exercises.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
let mut exercise_ids = Vec::new();
|
||||
|
||||
for exercise in exercises {
|
||||
let row = sqlx::query("INSERT INTO exercise (ex_type, content, nodeId, pathId) VALUES (?, ?, ?, (SELECT pathId FROM node WHERE id = ?)) RETURNING id")
|
||||
.bind(&exercise.ex_type)
|
||||
.bind(&exercise.content)
|
||||
.bind(exercise.node_id)
|
||||
.bind(exercise.node_id)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save exercise in transaction: {}", e))?;
|
||||
|
||||
let exercise_id: i64 = row
|
||||
.try_get("id")
|
||||
.map_err(|e| format!("ERROR: Failed to get exercise ID: {}", e))?;
|
||||
|
||||
exercise_ids.push(exercise_id as u32);
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit exercise transaction: {}", e))?;
|
||||
|
||||
Ok(exercise_ids)
|
||||
}
|
||||
|
||||
pub async fn update_exercise(&self, exercise: &Exercise) -> Result<(), String> {
|
||||
let query = "UPDATE exercise SET ex_type = ?, content = ? WHERE id = ?";
|
||||
|
||||
let result = sqlx::query(query)
|
||||
.bind(&exercise.ex_type)
|
||||
.bind(&exercise.content)
|
||||
.bind(exercise.id)
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to update exercise: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!("ERROR: No exercise found with ID {}", exercise.id));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_exercise(&self, exercise_id: u32) -> Result<(), String> {
|
||||
let query = "DELETE FROM exercise WHERE id = ?";
|
||||
|
||||
let result = sqlx::query(query)
|
||||
.bind(exercise_id)
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete exercise: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!("ERROR: No exercise found with ID {}", exercise_id));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_exercises_by_node_id(&self, node_id: u32) -> Result<u64, String> {
|
||||
let query = "DELETE FROM exercise WHERE nodeId = ?";
|
||||
|
||||
let result = sqlx::query(query)
|
||||
.bind(node_id)
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete exercises by node ID: {}", e))?;
|
||||
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
pub async fn delete_exercises_by_path_id(&self, path_id: &str) -> Result<u64, String> {
|
||||
let query = "DELETE FROM exercise WHERE pathId = ?";
|
||||
|
||||
let result = sqlx::query(query)
|
||||
.bind(path_id)
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete exercises by path ID: {}", e))?;
|
||||
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
|
||||
pub async fn update_exercises_for_node(
|
||||
&self,
|
||||
node_id: u32,
|
||||
exercises: &[Exercise],
|
||||
) -> Result<(), String> {
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
// Delete existing exercises for the node
|
||||
sqlx::query("DELETE FROM exercise WHERE nodeId = ?")
|
||||
.bind(node_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete existing exercises: {}", e))?;
|
||||
|
||||
// Insert new exercises
|
||||
for exercise in exercises {
|
||||
sqlx::query("INSERT INTO exercise (ex_type, content, nodeId, pathId) VALUES (?, ?, ?, (SELECT pathId FROM node WHERE id = ?))")
|
||||
.bind(&exercise.ex_type)
|
||||
.bind(&exercise.content)
|
||||
.bind(node_id)
|
||||
.bind(node_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to insert exercise in transaction: {}", e))?;
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit exercise update transaction: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
145
src-tauri/src/repositories/metadata_repository.rs
Normal file
145
src-tauri/src/repositories/metadata_repository.rs
Normal file
@@ -0,0 +1,145 @@
|
||||
use sqlx::{sqlite::SqlitePool, FromRow};
|
||||
|
||||
use crate::models::{db_models::path_db::MetadataDb, path::Metadata};
|
||||
|
||||
pub struct MetadataRepository<'a> {
|
||||
pub pool: &'a SqlitePool,
|
||||
}
|
||||
|
||||
impl<'a> MetadataRepository<'a> {
|
||||
pub fn new(pool: &'a SqlitePool) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
|
||||
pub async fn get_metadata_by_path_id(&self, path_id: &str) -> Result<Vec<Metadata>, String> {
|
||||
let metadata_rows = sqlx::query("SELECT * FROM pathMetadata WHERE pathId = ?")
|
||||
.bind(path_id)
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Metadata db: {}", e))?;
|
||||
|
||||
if metadata_rows.is_empty() {
|
||||
return Err(format!(
|
||||
"ERROR: No metadata for path with ID {} found",
|
||||
path_id
|
||||
));
|
||||
}
|
||||
|
||||
let metadata_db_result: Result<Vec<MetadataDb>, String> = metadata_rows
|
||||
.iter()
|
||||
.map(|row| {
|
||||
MetadataDb::from_row(row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Metadata struct: {}", e))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let metadata_db = metadata_db_result?;
|
||||
|
||||
let metadata = self.convert_metadata_db_to_model(metadata_db)?;
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
fn convert_metadata_db_to_model(
|
||||
&self,
|
||||
metadata_db: Vec<MetadataDb>,
|
||||
) -> Result<Vec<Metadata>, String> {
|
||||
metadata_db
|
||||
.iter()
|
||||
.map(|m| {
|
||||
Ok(Metadata {
|
||||
path_id: m.path_id.clone(),
|
||||
version: m.version.clone(),
|
||||
created_at: m.created_at.parse().map_err(|e| {
|
||||
format!("ERROR: Could not parse created_at timestamp: {}", e)
|
||||
})?,
|
||||
updated_at: m.updated_at.parse().map_err(|e| {
|
||||
format!("ERROR: Could not parse updated_at timestamp: {}", e)
|
||||
})?,
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub async fn save_metadata(&self, metadata: &Metadata) -> Result<(), String> {
|
||||
let query = "INSERT INTO pathMetadata (pathId, version, created_at, updated_at) VALUES (?, ?, ?, ?)";
|
||||
|
||||
sqlx::query(query)
|
||||
.bind(&metadata.path_id)
|
||||
.bind(&metadata.version)
|
||||
.bind(metadata.created_at.to_rfc3339())
|
||||
.bind(metadata.updated_at.to_rfc3339())
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save metadata: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_metadata(&self, metadata: &Metadata) -> Result<(), String> {
|
||||
let query = "UPDATE pathMetadata SET version = ?, updated_at = ? WHERE pathId = ?";
|
||||
|
||||
let result = sqlx::query(query)
|
||||
.bind(&metadata.version)
|
||||
.bind(metadata.updated_at.to_rfc3339())
|
||||
.bind(&metadata.path_id)
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to update metadata: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!(
|
||||
"ERROR: No metadata found for path_id {}",
|
||||
metadata.path_id
|
||||
));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_metadata_by_path_id(&self, path_id: &str) -> Result<(), String> {
|
||||
let query = "DELETE FROM pathMetadata WHERE pathId = ?";
|
||||
|
||||
let result = sqlx::query(query)
|
||||
.bind(path_id)
|
||||
.execute(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete metadata: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!("ERROR: No metadata found for path_id {}", path_id));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn save_multiple_metadata(&self, metadata_list: &[Metadata]) -> Result<(), String> {
|
||||
if metadata_list.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
for metadata in metadata_list {
|
||||
sqlx::query("INSERT INTO pathMetadata (pathId, version, created_at, updated_at) VALUES (?, ?, ?, ?)")
|
||||
.bind(&metadata.path_id)
|
||||
.bind(&metadata.version)
|
||||
.bind(metadata.created_at.to_rfc3339())
|
||||
.bind(metadata.updated_at.to_rfc3339())
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save metadata in transaction: {}", e))?;
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit metadata transaction: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
@@ -1 +1,6 @@
|
||||
pub mod exercise_repository;
|
||||
pub mod metadata_repository;
|
||||
pub mod node_repository;
|
||||
pub mod path_json_utils;
|
||||
pub mod path_repository;
|
||||
pub mod repository_manager;
|
||||
|
||||
363
src-tauri/src/repositories/node_repository.rs
Normal file
363
src-tauri/src/repositories/node_repository.rs
Normal file
@@ -0,0 +1,363 @@
|
||||
use sqlx::{sqlite::SqlitePool, FromRow, Row};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::models::{db_models::node_db::NodeDb, exercise::Exercise, node::Node};
|
||||
|
||||
pub struct NodeRepository<'a> {
|
||||
pub pool: &'a SqlitePool,
|
||||
}
|
||||
|
||||
impl<'a> NodeRepository<'a> {
|
||||
pub fn new(pool: &'a SqlitePool) -> Self {
|
||||
Self { pool }
|
||||
}
|
||||
|
||||
pub async fn get_nodes_by_path_id(&self, path_id: &str) -> Result<Vec<Node>, String> {
|
||||
let node_rows = sqlx::query("SELECT * FROM node WHERE pathId = ?")
|
||||
.bind(path_id)
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Node db: {}", e))?;
|
||||
|
||||
if node_rows.is_empty() {
|
||||
return Err(format!(
|
||||
"ERROR: No Nodes for path with ID {} found",
|
||||
path_id
|
||||
));
|
||||
}
|
||||
|
||||
let nodes_db = self.parse_node_rows(node_rows)?;
|
||||
let exercises_by_node = self.get_exercises_for_nodes(&nodes_db).await?;
|
||||
let nodes = self.convert_nodes_db_to_model(nodes_db, exercises_by_node);
|
||||
|
||||
Ok(nodes)
|
||||
}
|
||||
|
||||
pub async fn get_node_by_id(&self, node_id: u32) -> Result<Node, String> {
|
||||
let node_row = sqlx::query("SELECT * FROM node WHERE id = ?")
|
||||
.bind(node_id)
|
||||
.fetch_optional(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Node db: {}", e))?;
|
||||
|
||||
let node_row =
|
||||
node_row.ok_or_else(|| format!("ERROR: No Node with ID {} found", node_id))?;
|
||||
|
||||
let node_db = NodeDb::from_row(&node_row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Node struct: {}", e))?;
|
||||
|
||||
let exercises = self.get_exercises_for_node(node_id).await?;
|
||||
|
||||
let node = Node {
|
||||
id: node_db.id,
|
||||
title: node_db.title,
|
||||
description: node_db.description,
|
||||
path_id: node_db.path_id,
|
||||
exercises,
|
||||
};
|
||||
|
||||
Ok(node)
|
||||
}
|
||||
|
||||
async fn get_exercises_for_node(&self, node_id: u32) -> Result<Vec<Exercise>, String> {
|
||||
let exercise_rows = sqlx::query("SELECT * FROM exercise WHERE nodeId = ?")
|
||||
.bind(node_id)
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Exercise db: {}", e))?;
|
||||
|
||||
let exercises = exercise_rows
|
||||
.iter()
|
||||
.map(|row| {
|
||||
let exercise_db = crate::models::db_models::exercise_db::ExerciseDb::from_row(row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Exercise struct: {}", e))?;
|
||||
|
||||
Ok(Exercise {
|
||||
id: exercise_db.id as u32,
|
||||
ex_type: exercise_db.ex_type,
|
||||
content: exercise_db.content,
|
||||
node_id: exercise_db.node_id as u32,
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<Exercise>, String>>()?;
|
||||
|
||||
Ok(exercises)
|
||||
}
|
||||
|
||||
async fn get_exercises_for_nodes(
|
||||
&self,
|
||||
nodes: &[NodeDb],
|
||||
) -> Result<HashMap<u32, Vec<Exercise>>, String> {
|
||||
let node_ids: Vec<u32> = nodes.iter().map(|n| n.id).collect();
|
||||
|
||||
if node_ids.is_empty() {
|
||||
return Ok(HashMap::new());
|
||||
}
|
||||
|
||||
// Create placeholders for the IN clause
|
||||
let placeholders = node_ids.iter().map(|_| "?").collect::<Vec<_>>().join(",");
|
||||
let query = format!("SELECT * FROM exercise WHERE nodeId IN ({})", placeholders);
|
||||
|
||||
let mut query_builder = sqlx::query(&query);
|
||||
for node_id in &node_ids {
|
||||
query_builder = query_builder.bind(node_id);
|
||||
}
|
||||
|
||||
let exercise_rows = query_builder
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Exercise db: {}", e))?;
|
||||
|
||||
let mut exercises_by_node: HashMap<u32, Vec<Exercise>> = HashMap::new();
|
||||
|
||||
for row in exercise_rows {
|
||||
let exercise_db = crate::models::db_models::exercise_db::ExerciseDb::from_row(&row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Exercise struct: {}", e))?;
|
||||
|
||||
let exercise = Exercise {
|
||||
id: exercise_db.id as u32,
|
||||
ex_type: exercise_db.ex_type,
|
||||
content: exercise_db.content,
|
||||
node_id: exercise_db.node_id as u32,
|
||||
};
|
||||
|
||||
exercises_by_node
|
||||
.entry(exercise_db.node_id)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(exercise);
|
||||
}
|
||||
|
||||
Ok(exercises_by_node)
|
||||
}
|
||||
|
||||
fn parse_node_rows(
|
||||
&self,
|
||||
node_rows: Vec<sqlx::sqlite::SqliteRow>,
|
||||
) -> Result<Vec<NodeDb>, String> {
|
||||
node_rows
|
||||
.iter()
|
||||
.map(|row| {
|
||||
NodeDb::from_row(row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Node struct: {}", e))
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn convert_nodes_db_to_model(
|
||||
&self,
|
||||
nodes_db: Vec<NodeDb>,
|
||||
exercises_by_node: HashMap<u32, Vec<Exercise>>,
|
||||
) -> Vec<Node> {
|
||||
nodes_db
|
||||
.iter()
|
||||
.map(|node_db| Node {
|
||||
id: node_db.id,
|
||||
title: node_db.title.clone(),
|
||||
description: node_db.description.clone(),
|
||||
path_id: node_db.path_id.clone(),
|
||||
exercises: exercises_by_node
|
||||
.get(&node_db.id)
|
||||
.cloned()
|
||||
.unwrap_or_else(Vec::new),
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub async fn save_node(&self, node: &Node) -> Result<u32, String> {
|
||||
let query = "INSERT INTO node (title, description, pathId) VALUES (?, ?, ?) RETURNING id";
|
||||
|
||||
let row = sqlx::query(query)
|
||||
.bind(&node.title)
|
||||
.bind(&node.description)
|
||||
.bind(&node.path_id)
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save node: {}", e))?;
|
||||
|
||||
let node_id: i64 = row
|
||||
.try_get("id")
|
||||
.map_err(|e| format!("ERROR: Failed to get node ID: {}", e))?;
|
||||
let node_id = node_id as u32;
|
||||
|
||||
// Save exercises for this node
|
||||
if !node.exercises.is_empty() {
|
||||
let exercise_repo =
|
||||
crate::repositories::exercise_repository::ExerciseRepository::new(self.pool);
|
||||
let mut exercises_to_save = node.exercises.clone();
|
||||
|
||||
// Update node_id for all exercises
|
||||
for exercise in &mut exercises_to_save {
|
||||
exercise.node_id = node_id;
|
||||
}
|
||||
|
||||
exercise_repo
|
||||
.save_multiple_exercises(&exercises_to_save)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(node_id)
|
||||
}
|
||||
|
||||
pub async fn save_multiple_nodes(
|
||||
&self,
|
||||
nodes: &[Node],
|
||||
path_id: &str,
|
||||
) -> Result<Vec<u32>, String> {
|
||||
if nodes.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
let mut node_ids = Vec::new();
|
||||
|
||||
for node in nodes {
|
||||
// Insert node
|
||||
let row = sqlx::query(
|
||||
"INSERT INTO node (title, description, pathId) VALUES (?, ?, ?) RETURNING id",
|
||||
)
|
||||
.bind(&node.title)
|
||||
.bind(&node.description)
|
||||
.bind(path_id)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save node in transaction: {}", e))?;
|
||||
|
||||
let node_id: i64 = row
|
||||
.try_get("id")
|
||||
.map_err(|e| format!("ERROR: Failed to get node ID: {}", e))?;
|
||||
let node_id = node_id as u32;
|
||||
|
||||
node_ids.push(node_id);
|
||||
|
||||
// Save exercises for this node
|
||||
if !node.exercises.is_empty() {
|
||||
let mut exercises_to_save = node.exercises.clone();
|
||||
|
||||
// Update node_id for all exercises
|
||||
for exercise in &mut exercises_to_save {
|
||||
exercise.node_id = node_id;
|
||||
}
|
||||
|
||||
for exercise in &exercises_to_save {
|
||||
sqlx::query("INSERT INTO exercise (ex_type, content, nodeId, pathId) VALUES (?, ?, ?, ?)")
|
||||
.bind(&exercise.ex_type)
|
||||
.bind(&exercise.content)
|
||||
.bind(node_id)
|
||||
.bind(path_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save exercise in transaction: {}", e))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit node transaction: {}", e))?;
|
||||
|
||||
Ok(node_ids)
|
||||
}
|
||||
|
||||
pub async fn update_node(&self, node: &Node) -> Result<(), String> {
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
// Update node
|
||||
let result = sqlx::query("UPDATE node SET title = ?, description = ? WHERE id = ?")
|
||||
.bind(&node.title)
|
||||
.bind(&node.description)
|
||||
.bind(node.id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to update node: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!("ERROR: No node found with ID {}", node.id));
|
||||
}
|
||||
|
||||
// Update exercises for this node
|
||||
let exercise_repo =
|
||||
crate::repositories::exercise_repository::ExerciseRepository::new(self.pool);
|
||||
exercise_repo
|
||||
.update_exercises_for_node(node.id, &node.exercises)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit node update transaction: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_node(&self, node_id: u32) -> Result<(), String> {
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
// First delete all exercises for this node
|
||||
sqlx::query("DELETE FROM exercise WHERE nodeId = ?")
|
||||
.bind(node_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete node exercises: {}", e))?;
|
||||
|
||||
// Then delete the node
|
||||
let result = sqlx::query("DELETE FROM node WHERE id = ?")
|
||||
.bind(node_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete node: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!("ERROR: No node found with ID {}", node_id));
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit node deletion transaction: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_nodes_by_path_id(&self, path_id: &str) -> Result<u64, String> {
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
// First delete all exercises for nodes in this path
|
||||
sqlx::query("DELETE FROM exercise WHERE pathId = ?")
|
||||
.bind(path_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path exercises: {}", e))?;
|
||||
|
||||
// Then delete all nodes for this path
|
||||
let result = sqlx::query("DELETE FROM node WHERE pathId = ?")
|
||||
.bind(path_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path nodes: {}", e))?;
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit nodes deletion transaction: {}", e))?;
|
||||
|
||||
Ok(result.rows_affected())
|
||||
}
|
||||
}
|
||||
373
src-tauri/src/repositories/path_json_utils.rs
Normal file
373
src-tauri/src/repositories/path_json_utils.rs
Normal file
@@ -0,0 +1,373 @@
|
||||
use chrono::Utc;
|
||||
use serde_json;
|
||||
use std::fs;
|
||||
|
||||
use crate::models::{
|
||||
exercise::Exercise,
|
||||
node::Node,
|
||||
path::{Metadata, Path},
|
||||
};
|
||||
|
||||
use super::path_repository::PathRepository;
|
||||
|
||||
/// Utilities for importing and exporting paths to/from JSON
|
||||
pub struct PathJsonUtils<'a> {
|
||||
path_repo: &'a PathRepository<'a>,
|
||||
}
|
||||
|
||||
impl<'a> PathJsonUtils<'a> {
|
||||
pub fn new(path_repo: &'a PathRepository<'a>) -> Self {
|
||||
Self { path_repo }
|
||||
}
|
||||
|
||||
/// Import a path from a JSON file
|
||||
pub async fn import_from_file(&self, file_path: &str) -> Result<String, String> {
|
||||
let json_content = fs::read_to_string(file_path)
|
||||
.map_err(|e| format!("ERROR: Failed to read JSON file {}: {}", file_path, e))?;
|
||||
|
||||
self.import_from_json(&json_content).await
|
||||
}
|
||||
|
||||
/// Import a path from JSON string
|
||||
pub async fn import_from_json(&self, json_content: &str) -> Result<String, String> {
|
||||
let path = self.parse_path_from_json(json_content)?;
|
||||
let path_id = self.path_repo.save_path(path).await?;
|
||||
|
||||
Ok(path_id)
|
||||
}
|
||||
|
||||
/// Export a path to JSON file
|
||||
pub async fn export_to_file(&self, path_id: i32, file_path: &str) -> Result<(), String> {
|
||||
let json_content = self.export_to_json(path_id).await?;
|
||||
|
||||
fs::write(file_path, json_content)
|
||||
.map_err(|e| format!("ERROR: Failed to write JSON file {}: {}", file_path, e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Export a path to JSON string
|
||||
pub async fn export_to_json(&self, path_id: i32) -> Result<String, String> {
|
||||
let path = self.path_repo.get_path_by_id(path_id).await?;
|
||||
|
||||
serde_json::to_string_pretty(&path)
|
||||
.map_err(|e| format!("ERROR: Failed to serialize path to JSON: {}", e))
|
||||
}
|
||||
|
||||
/// Parse a Path from JSON string
|
||||
pub fn parse_path_from_json(&self, json_content: &str) -> Result<Path, String> {
|
||||
let mut path: Path = serde_json::from_str(json_content)
|
||||
.map_err(|e| format!("ERROR: Failed to parse JSON: {}", e))?;
|
||||
|
||||
// Validate and fix the path data
|
||||
self.validate_and_fix_path(&mut path)?;
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// Validate and fix path data after parsing from JSON
|
||||
fn validate_and_fix_path(&self, path: &mut Path) -> Result<(), String> {
|
||||
// Validate basic fields
|
||||
if path.id.is_empty() {
|
||||
return Err("ERROR: Path ID cannot be empty".to_string());
|
||||
}
|
||||
|
||||
if path.title.is_empty() {
|
||||
return Err("ERROR: Path title cannot be empty".to_string());
|
||||
}
|
||||
|
||||
// Ensure metadata has correct path_id references
|
||||
for metadata in &mut path.metadata {
|
||||
if metadata.path_id != path.id {
|
||||
metadata.path_id = path.id.clone();
|
||||
}
|
||||
}
|
||||
|
||||
// Validate and fix nodes
|
||||
for node in &mut path.nodes {
|
||||
if node.path_id != path.id {
|
||||
node.path_id = path.id.clone();
|
||||
}
|
||||
|
||||
if node.title.is_empty() {
|
||||
return Err(format!("ERROR: Node {} title cannot be empty", node.id));
|
||||
}
|
||||
|
||||
// Validate exercises
|
||||
for exercise in &mut node.exercises {
|
||||
if exercise.node_id != node.id {
|
||||
exercise.node_id = node.id;
|
||||
}
|
||||
|
||||
if exercise.ex_type.is_empty() {
|
||||
return Err(format!(
|
||||
"ERROR: Exercise {} type cannot be empty",
|
||||
exercise.id
|
||||
));
|
||||
}
|
||||
|
||||
if exercise.content.is_empty() {
|
||||
return Err(format!(
|
||||
"ERROR: Exercise {} content cannot be empty",
|
||||
exercise.id
|
||||
));
|
||||
}
|
||||
|
||||
// Validate that content is valid JSON
|
||||
if let Err(e) = serde_json::from_str::<serde_json::Value>(&exercise.content) {
|
||||
return Err(format!(
|
||||
"ERROR: Exercise {} has invalid JSON content: {}",
|
||||
exercise.id, e
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Import multiple paths from a directory of JSON files
|
||||
pub async fn import_from_directory(&self, directory_path: &str) -> Result<Vec<String>, String> {
|
||||
let entries = fs::read_dir(directory_path)
|
||||
.map_err(|e| format!("ERROR: Failed to read directory {}: {}", directory_path, e))?;
|
||||
|
||||
let mut imported_paths = Vec::new();
|
||||
|
||||
for entry in entries {
|
||||
let entry =
|
||||
entry.map_err(|e| format!("ERROR: Failed to read directory entry: {}", e))?;
|
||||
|
||||
let file_path = entry.path();
|
||||
|
||||
// Only process .json files
|
||||
if let Some(extension) = file_path.extension() {
|
||||
if extension == "json" {
|
||||
if let Some(file_path_str) = file_path.to_str() {
|
||||
match self.import_from_file(file_path_str).await {
|
||||
Ok(path_id) => {
|
||||
println!(
|
||||
"Successfully imported path {} from {}",
|
||||
path_id, file_path_str
|
||||
);
|
||||
imported_paths.push(path_id);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to import {}: {}", file_path_str, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(imported_paths)
|
||||
}
|
||||
|
||||
/// Export multiple paths to a directory
|
||||
pub async fn export_to_directory(
|
||||
&self,
|
||||
path_ids: &[i32],
|
||||
directory_path: &str,
|
||||
) -> Result<(), String> {
|
||||
// Create directory if it doesn't exist
|
||||
fs::create_dir_all(directory_path).map_err(|e| {
|
||||
format!(
|
||||
"ERROR: Failed to create directory {}: {}",
|
||||
directory_path, e
|
||||
)
|
||||
})?;
|
||||
|
||||
for &path_id in path_ids {
|
||||
let path = self.path_repo.get_path_by_id(path_id).await?;
|
||||
let filename = format!("{}/path_{}.json", directory_path, path.id);
|
||||
|
||||
match self.export_to_file(path_id, &filename).await {
|
||||
Ok(()) => println!("Successfully exported path {} to {}", path.id, filename),
|
||||
Err(e) => eprintln!("Failed to export path {}: {}", path_id, e),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Create a template path with sample data
|
||||
pub fn create_template_path(
|
||||
&self,
|
||||
path_id: &str,
|
||||
title: &str,
|
||||
description: &str,
|
||||
) -> Result<Path, String> {
|
||||
let now = Utc::now();
|
||||
|
||||
let metadata = vec![Metadata {
|
||||
path_id: path_id.to_string(),
|
||||
version: "1.0.0".to_string(),
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}];
|
||||
|
||||
let sample_exercise = Exercise {
|
||||
id: 1,
|
||||
ex_type: "vocabulary".to_string(),
|
||||
content:
|
||||
r#"{"word": "Hallo", "translation": "Hello", "example": "Hallo, wie geht's?"}"#
|
||||
.to_string(),
|
||||
node_id: 1,
|
||||
};
|
||||
|
||||
let sample_node = Node {
|
||||
id: 1,
|
||||
title: "Sample Node".to_string(),
|
||||
description: "This is a sample node for demonstration".to_string(),
|
||||
path_id: path_id.to_string(),
|
||||
exercises: vec![sample_exercise],
|
||||
};
|
||||
|
||||
let path = Path {
|
||||
id: path_id.to_string(),
|
||||
title: title.to_string(),
|
||||
description: description.to_string(),
|
||||
metadata,
|
||||
nodes: vec![sample_node],
|
||||
};
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
/// Generate a template JSON file
|
||||
pub fn generate_template_json_file(
|
||||
&self,
|
||||
file_path: &str,
|
||||
path_id: &str,
|
||||
title: &str,
|
||||
description: &str,
|
||||
) -> Result<(), String> {
|
||||
let template_path = self.create_template_path(path_id, title, description)?;
|
||||
|
||||
let json_content = serde_json::to_string_pretty(&template_path)
|
||||
.map_err(|e| format!("ERROR: Failed to serialize template to JSON: {}", e))?;
|
||||
|
||||
fs::write(file_path, json_content)
|
||||
.map_err(|e| format!("ERROR: Failed to write template file {}: {}", file_path, e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Validate JSON file without importing
|
||||
pub fn validate_json_file(&self, file_path: &str) -> Result<(), String> {
|
||||
let json_content = fs::read_to_string(file_path)
|
||||
.map_err(|e| format!("ERROR: Failed to read JSON file {}: {}", file_path, e))?;
|
||||
|
||||
let mut path = self.parse_path_from_json(&json_content)?;
|
||||
self.validate_and_fix_path(&mut path)?;
|
||||
|
||||
println!("JSON file {} is valid", file_path);
|
||||
println!("Path: {} - {}", path.id, path.title);
|
||||
println!("Nodes: {}", path.nodes.len());
|
||||
println!(
|
||||
"Total exercises: {}",
|
||||
path.nodes.iter().map(|n| n.exercises.len()).sum::<usize>()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Backup all paths to JSON files
|
||||
pub async fn backup_all_paths(&self, backup_directory: &str) -> Result<usize, String> {
|
||||
let paths = self.path_repo.get_all_paths().await?;
|
||||
|
||||
// Create backup directory with timestamp
|
||||
let now = Utc::now();
|
||||
let timestamp = now.format("%Y%m%d_%H%M%S");
|
||||
let backup_dir = format!("{}/backup_{}", backup_directory, timestamp);
|
||||
|
||||
fs::create_dir_all(&backup_dir).map_err(|e| {
|
||||
format!(
|
||||
"ERROR: Failed to create backup directory {}: {}",
|
||||
backup_dir, e
|
||||
)
|
||||
})?;
|
||||
|
||||
let mut backed_up_count = 0;
|
||||
|
||||
for path in &paths {
|
||||
let filename = format!("{}/path_{}.json", backup_dir, path.id);
|
||||
|
||||
let json_content = serde_json::to_string_pretty(path).map_err(|e| {
|
||||
format!("ERROR: Failed to serialize path {} to JSON: {}", path.id, e)
|
||||
})?;
|
||||
|
||||
match fs::write(&filename, json_content) {
|
||||
Ok(()) => {
|
||||
backed_up_count += 1;
|
||||
println!("Backed up path {} to {}", path.id, filename);
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("Failed to backup path {}: {}", path.id, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"Backup completed: {}/{} paths backed up to {}",
|
||||
backed_up_count,
|
||||
paths.len(),
|
||||
backup_dir
|
||||
);
|
||||
|
||||
Ok(backed_up_count)
|
||||
}
|
||||
|
||||
/// Get statistics about a JSON file
|
||||
pub fn get_json_file_stats(&self, file_path: &str) -> Result<JsonFileStats, String> {
|
||||
let json_content = fs::read_to_string(file_path)
|
||||
.map_err(|e| format!("ERROR: Failed to read JSON file {}: {}", file_path, e))?;
|
||||
|
||||
let path = self.parse_path_from_json(&json_content)?;
|
||||
|
||||
let total_exercises = path.nodes.iter().map(|n| n.exercises.len()).sum();
|
||||
let exercise_types: std::collections::HashMap<String, usize> = path
|
||||
.nodes
|
||||
.iter()
|
||||
.flat_map(|n| &n.exercises)
|
||||
.fold(std::collections::HashMap::new(), |mut acc, ex| {
|
||||
*acc.entry(ex.ex_type.clone()).or_insert(0) += 1;
|
||||
acc
|
||||
});
|
||||
|
||||
Ok(JsonFileStats {
|
||||
path_id: path.id,
|
||||
title: path.title,
|
||||
node_count: path.nodes.len(),
|
||||
total_exercises,
|
||||
exercise_types,
|
||||
metadata_count: path.metadata.len(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Statistics about a JSON file
|
||||
#[derive(Debug)]
|
||||
pub struct JsonFileStats {
|
||||
pub path_id: String,
|
||||
pub title: String,
|
||||
pub node_count: usize,
|
||||
pub total_exercises: usize,
|
||||
pub exercise_types: std::collections::HashMap<String, usize>,
|
||||
pub metadata_count: usize,
|
||||
}
|
||||
|
||||
impl JsonFileStats {
|
||||
pub fn print_summary(&self) {
|
||||
println!("=== Path Statistics ===");
|
||||
println!("ID: {}", self.path_id);
|
||||
println!("Title: {}", self.title);
|
||||
println!("Nodes: {}", self.node_count);
|
||||
println!("Total Exercises: {}", self.total_exercises);
|
||||
println!("Metadata Records: {}", self.metadata_count);
|
||||
println!("Exercise Types:");
|
||||
for (ex_type, count) in &self.exercise_types {
|
||||
println!(" {}: {}", ex_type, count);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,200 +1,35 @@
|
||||
use sqlx::{
|
||||
sqlite::{SqlitePool, SqliteRow},
|
||||
FromRow,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
use sqlx::{sqlite::SqlitePool, FromRow, Row};
|
||||
|
||||
use crate::models::{
|
||||
db_models::{
|
||||
exercise_db::ExerciseDb,
|
||||
node_db::NodeDb,
|
||||
path_db::{MetadataDb, PathDb},
|
||||
},
|
||||
exercise::Exercise,
|
||||
node::Node,
|
||||
path::{Metadata, Path},
|
||||
};
|
||||
use crate::models::{db_models::path_db::PathDb, path::Path};
|
||||
|
||||
use super::{metadata_repository::MetadataRepository, node_repository::NodeRepository};
|
||||
|
||||
pub struct PathRepository<'a> {
|
||||
pub pool: &'a SqlitePool,
|
||||
metadata_repo: MetadataRepository<'a>,
|
||||
node_repo: NodeRepository<'a>,
|
||||
}
|
||||
|
||||
impl<'a> PathRepository<'a> {
|
||||
pub fn new(pool: &'a SqlitePool) -> Self {
|
||||
Self {
|
||||
pool,
|
||||
metadata_repo: MetadataRepository::new(pool),
|
||||
node_repo: NodeRepository::new(pool),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_path_by_id(&self, id: i32) -> Result<Path, String> {
|
||||
// Get Path
|
||||
let path_result = sqlx::query("SELECT * FROM path WHERE id = ?")
|
||||
.bind(id)
|
||||
.fetch_all(self.pool)
|
||||
.await;
|
||||
let path_db = self.fetch_path_from_db(id).await?;
|
||||
let path_id = &path_db.id;
|
||||
|
||||
let path_result: Vec<SqliteRow> = match path_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Err(format!("ERROR: Failed to query Path db: {} ", e));
|
||||
}
|
||||
};
|
||||
|
||||
if path_result.len() > 1 {
|
||||
return Err(format!("ERROR: Multiple paths for ID {} found", id));
|
||||
} else if path_result.is_empty() {
|
||||
return Err(format!("ERROR: No Path with ID {} found", id));
|
||||
}
|
||||
|
||||
let path_result = match path_result.first() {
|
||||
Some(p) => match PathDb::from_row(p) {
|
||||
Ok(p) => p,
|
||||
Err(e) => {
|
||||
return Err(format!("ERROR: Could not parse Path: {}", e));
|
||||
}
|
||||
},
|
||||
None => return Err(format!("ERROR: No path for ID {} found", id)),
|
||||
};
|
||||
|
||||
// Get Metadata for path
|
||||
let metadata_result = sqlx::query("SELECT * From pathMetadata where pathId = ?")
|
||||
.bind(path_result.id.clone())
|
||||
.fetch_all(self.pool)
|
||||
.await;
|
||||
|
||||
let metadata_result = match metadata_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Err(format!("ERROR: Failed to query Metadata db: {}", e));
|
||||
}
|
||||
};
|
||||
|
||||
if metadata_result.is_empty() {
|
||||
return Err(format!(
|
||||
"ERROR: No metadata for path [{:?}] found",
|
||||
path_result
|
||||
));
|
||||
}
|
||||
|
||||
let metadata_result: Result<Vec<MetadataDb>, String> = metadata_result
|
||||
.iter()
|
||||
.map(|row| {
|
||||
MetadataDb::from_row(row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Metadata struct: {}", e))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let metadata_result = match metadata_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
// Get nodes for path
|
||||
let node_result = sqlx::query("SELECT * From node where pathId = ?")
|
||||
.bind(path_result.id.clone())
|
||||
.fetch_all(self.pool)
|
||||
.await;
|
||||
|
||||
let node_result = match node_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Err(format!("ERROR: Failed to query Node db: {}", e));
|
||||
}
|
||||
};
|
||||
|
||||
if node_result.is_empty() {
|
||||
return Err(format!(
|
||||
"ERROR: No Nodes for path [{:?}] found",
|
||||
path_result
|
||||
));
|
||||
}
|
||||
|
||||
let node_result: Result<Vec<NodeDb>, String> = node_result
|
||||
.iter()
|
||||
.map(|row| {
|
||||
NodeDb::from_row(row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Node struct: {}", e))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let node_result = match node_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
// Get exercises for path
|
||||
let exercise_result = sqlx::query("SELECT * From exercise where pathId = ?")
|
||||
.bind(path_result.id.clone())
|
||||
.fetch_all(self.pool)
|
||||
.await;
|
||||
|
||||
let exercise_result = match exercise_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
return Err(format!("ERROR: Failed to query Exercise db: {}", e));
|
||||
}
|
||||
};
|
||||
|
||||
if exercise_result.is_empty() {
|
||||
return Err(format!(
|
||||
"ERROR: No Exercise for path [{:?}] found",
|
||||
path_result
|
||||
));
|
||||
}
|
||||
|
||||
let exercise_result: Result<Vec<ExerciseDb>, String> = exercise_result
|
||||
.iter()
|
||||
.map(|row| {
|
||||
ExerciseDb::from_row(row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Exercise struct: {}", e))
|
||||
})
|
||||
.collect();
|
||||
|
||||
let exercise_result = match exercise_result {
|
||||
Ok(r) => r,
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
|
||||
// Convert metadata
|
||||
let metadata: Vec<Metadata> = metadata_result
|
||||
.iter()
|
||||
.map(|m| Metadata {
|
||||
path_id: m.path_id.clone(),
|
||||
version: m.version.clone(),
|
||||
created_at: m.created_at.parse().unwrap(),
|
||||
updated_at: m.updated_at.parse().unwrap(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Group exercises by node_id
|
||||
let mut exercises_by_node: HashMap<u32, Vec<Exercise>> = HashMap::new();
|
||||
for exercise_db in exercise_result {
|
||||
let exercise = Exercise {
|
||||
id: exercise_db.id,
|
||||
ex_type: exercise_db.ex_type,
|
||||
content: exercise_db.content,
|
||||
node_id: exercise_db.node_id,
|
||||
};
|
||||
|
||||
exercises_by_node
|
||||
.entry(exercise_db.node_id)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(exercise);
|
||||
}
|
||||
|
||||
// Create nodes with their respective exercises
|
||||
let nodes: Vec<Node> = node_result
|
||||
.iter()
|
||||
.map(|node_db| Node {
|
||||
id: node_db.id,
|
||||
title: node_db.title.clone(),
|
||||
description: node_db.description.clone(),
|
||||
path_id: node_db.path_id.clone(),
|
||||
exercises: exercises_by_node
|
||||
.get(&node_db.id)
|
||||
.cloned()
|
||||
.unwrap_or_else(Vec::new),
|
||||
})
|
||||
.collect();
|
||||
let metadata = self.metadata_repo.get_metadata_by_path_id(path_id).await?;
|
||||
let nodes = self.node_repo.get_nodes_by_path_id(path_id).await?;
|
||||
|
||||
let path = Path {
|
||||
id: path_result.id,
|
||||
title: path_result.title,
|
||||
description: path_result.description,
|
||||
id: path_db.id,
|
||||
title: path_db.title,
|
||||
description: path_db.description,
|
||||
metadata,
|
||||
nodes,
|
||||
};
|
||||
@@ -203,15 +38,12 @@ impl<'a> PathRepository<'a> {
|
||||
}
|
||||
|
||||
pub async fn get_all_paths(&self) -> Result<Vec<Path>, String> {
|
||||
let rows = sqlx::query_as::<_, PathDb>("SELECT * FROM path")
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
let path_rows = self.fetch_all_paths_from_db().await?;
|
||||
let mut paths = Vec::new();
|
||||
|
||||
for path_db in rows {
|
||||
match self.get_path_by_id(path_db.id.parse().unwrap_or(0)).await {
|
||||
for path_db in path_rows {
|
||||
let path_id = path_db.id.parse().unwrap_or(0);
|
||||
match self.get_path_by_id(path_id).await {
|
||||
Ok(path) => paths.push(path),
|
||||
Err(e) => {
|
||||
eprintln!("Warning: Failed to load path {}: {}", path_db.id, e);
|
||||
@@ -224,8 +56,351 @@ impl<'a> PathRepository<'a> {
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
pub async fn save_path(&self, _path: Path) -> Result<(), String> {
|
||||
// TODO: Implement path saving logic
|
||||
todo!("Implement save_path functionality")
|
||||
pub async fn get_paths_by_title(&self, title_pattern: &str) -> Result<Vec<Path>, String> {
|
||||
let path_rows = sqlx::query_as::<_, PathDb>("SELECT * FROM path WHERE title LIKE ?")
|
||||
.bind(format!("%{}%", title_pattern))
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query paths by title: {}", e))?;
|
||||
|
||||
let mut paths = Vec::new();
|
||||
|
||||
for path_db in path_rows {
|
||||
let path_id = path_db.id.parse().unwrap_or(0);
|
||||
match self.get_path_by_id(path_id).await {
|
||||
Ok(path) => paths.push(path),
|
||||
Err(e) => {
|
||||
eprintln!("Warning: Failed to load path {}: {}", path_db.id, e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(paths)
|
||||
}
|
||||
|
||||
pub async fn path_exists(&self, id: i32) -> Result<bool, String> {
|
||||
let count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM path WHERE id = ?")
|
||||
.bind(id)
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to check path existence: {}", e))?;
|
||||
|
||||
Ok(count.0 > 0)
|
||||
}
|
||||
|
||||
async fn fetch_path_from_db(&self, id: i32) -> Result<PathDb, String> {
|
||||
let path_row = sqlx::query("SELECT * FROM path WHERE id = ?")
|
||||
.bind(id)
|
||||
.fetch_optional(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query Path db: {}", e))?;
|
||||
|
||||
let path_row = path_row.ok_or_else(|| format!("ERROR: No Path with ID {} found", id))?;
|
||||
|
||||
let path_db = PathDb::from_row(&path_row)
|
||||
.map_err(|e| format!("ERROR: Could not parse Path: {}", e))?;
|
||||
|
||||
Ok(path_db)
|
||||
}
|
||||
|
||||
async fn fetch_all_paths_from_db(&self) -> Result<Vec<PathDb>, String> {
|
||||
sqlx::query_as::<_, PathDb>("SELECT * FROM path")
|
||||
.fetch_all(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to query all paths: {}", e))
|
||||
}
|
||||
|
||||
pub async fn save_path(&self, path: Path) -> Result<String, String> {
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
// Insert the main path record
|
||||
let result = sqlx::query("INSERT INTO path (id, title, description) VALUES (?, ?, ?)")
|
||||
.bind(&path.id)
|
||||
.bind(&path.title)
|
||||
.bind(&path.description)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save path: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err("ERROR: Failed to insert path".to_string());
|
||||
}
|
||||
|
||||
// Save metadata
|
||||
if !path.metadata.is_empty() {
|
||||
for metadata in &path.metadata {
|
||||
sqlx::query("INSERT INTO pathMetadata (pathId, version, created_at, updated_at) VALUES (?, ?, ?, ?)")
|
||||
.bind(&metadata.path_id)
|
||||
.bind(&metadata.version)
|
||||
.bind(metadata.created_at.to_rfc3339())
|
||||
.bind(metadata.updated_at.to_rfc3339())
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save metadata: {}", e))?;
|
||||
}
|
||||
}
|
||||
|
||||
// Save nodes and their exercises
|
||||
if !path.nodes.is_empty() {
|
||||
for node in &path.nodes {
|
||||
// Insert node
|
||||
let node_result = sqlx::query(
|
||||
"INSERT INTO node (title, description, pathId) VALUES (?, ?, ?) RETURNING id",
|
||||
)
|
||||
.bind(&node.title)
|
||||
.bind(&node.description)
|
||||
.bind(&path.id)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save node: {}", e))?;
|
||||
|
||||
let node_id: u32 = node_result
|
||||
.try_get("id")
|
||||
.map_err(|e| format!("ERROR: Failed to get node ID: {}", e))?;
|
||||
|
||||
// Insert exercises for this node
|
||||
for exercise in &node.exercises {
|
||||
sqlx::query("INSERT INTO exercise (ex_type, content, nodeId, pathId) VALUES (?, ?, ?, ?)")
|
||||
.bind(&exercise.ex_type)
|
||||
.bind(&exercise.content)
|
||||
.bind(node_id)
|
||||
.bind(&path.id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save exercise: {}", e))?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit path transaction: {}", e))?;
|
||||
|
||||
Ok(path.id)
|
||||
}
|
||||
|
||||
pub async fn update_path(&self, path: Path) -> Result<(), String> {
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
// Update the main path record
|
||||
let result = sqlx::query("UPDATE path SET title = ?, description = ? WHERE id = ?")
|
||||
.bind(&path.title)
|
||||
.bind(&path.description)
|
||||
.bind(&path.id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to update path: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!("ERROR: No path found with ID {}", path.id));
|
||||
}
|
||||
|
||||
// Update metadata - delete existing and insert new
|
||||
sqlx::query("DELETE FROM pathMetadata WHERE pathId = ?")
|
||||
.bind(&path.id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete existing metadata: {}", e))?;
|
||||
|
||||
for metadata in &path.metadata {
|
||||
sqlx::query("INSERT INTO pathMetadata (pathId, version, created_at, updated_at) VALUES (?, ?, ?, ?)")
|
||||
.bind(&metadata.path_id)
|
||||
.bind(&metadata.version)
|
||||
.bind(metadata.created_at.to_rfc3339())
|
||||
.bind(metadata.updated_at.to_rfc3339())
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save updated metadata: {}", e))?;
|
||||
}
|
||||
|
||||
// Update nodes and exercises - delete existing and insert new
|
||||
// First delete all exercises for this path
|
||||
sqlx::query("DELETE FROM exercise WHERE pathId = ?")
|
||||
.bind(&path.id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete existing exercises: {}", e))?;
|
||||
|
||||
// Then delete all nodes for this path
|
||||
sqlx::query("DELETE FROM node WHERE pathId = ?")
|
||||
.bind(&path.id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete existing nodes: {}", e))?;
|
||||
|
||||
// Insert updated nodes and exercises
|
||||
for node in &path.nodes {
|
||||
// Insert node
|
||||
let node_result = sqlx::query(
|
||||
"INSERT INTO node (title, description, pathId) VALUES (?, ?, ?) RETURNING id",
|
||||
)
|
||||
.bind(&node.title)
|
||||
.bind(&node.description)
|
||||
.bind(&path.id)
|
||||
.fetch_one(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save updated node: {}", e))?;
|
||||
|
||||
let node_id: u32 = node_result
|
||||
.try_get("id")
|
||||
.map_err(|e| format!("ERROR: Failed to get updated node ID: {}", e))?;
|
||||
|
||||
// Insert exercises for this node
|
||||
for exercise in &node.exercises {
|
||||
sqlx::query(
|
||||
"INSERT INTO exercise (ex_type, content, nodeId, pathId) VALUES (?, ?, ?, ?)",
|
||||
)
|
||||
.bind(&exercise.ex_type)
|
||||
.bind(&exercise.content)
|
||||
.bind(node_id)
|
||||
.bind(&path.id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to save updated exercise: {}", e))?;
|
||||
}
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit path update transaction: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_path(&self, path_id: i32) -> Result<(), String> {
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
let path_id_str = path_id.to_string();
|
||||
|
||||
// Delete in order: exercises -> nodes -> metadata -> path
|
||||
// Delete exercises
|
||||
sqlx::query("DELETE FROM exercise WHERE pathId = ?")
|
||||
.bind(&path_id_str)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path exercises: {}", e))?;
|
||||
|
||||
// Delete nodes
|
||||
sqlx::query("DELETE FROM node WHERE pathId = ?")
|
||||
.bind(&path_id_str)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path nodes: {}", e))?;
|
||||
|
||||
// Delete metadata
|
||||
sqlx::query("DELETE FROM pathMetadata WHERE pathId = ?")
|
||||
.bind(&path_id_str)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path metadata: {}", e))?;
|
||||
|
||||
// Delete path
|
||||
let result = sqlx::query("DELETE FROM path WHERE id = ?")
|
||||
.bind(path_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!("ERROR: No path found with ID {}", path_id));
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit path deletion transaction: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn delete_path_by_string_id(&self, path_id: &str) -> Result<(), String> {
|
||||
let mut transaction = self
|
||||
.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to begin transaction: {}", e))?;
|
||||
|
||||
// Delete in order: exercises -> nodes -> metadata -> path
|
||||
// Delete exercises
|
||||
sqlx::query("DELETE FROM exercise WHERE pathId = ?")
|
||||
.bind(path_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path exercises: {}", e))?;
|
||||
|
||||
// Delete nodes
|
||||
sqlx::query("DELETE FROM node WHERE pathId = ?")
|
||||
.bind(path_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path nodes: {}", e))?;
|
||||
|
||||
// Delete metadata
|
||||
sqlx::query("DELETE FROM pathMetadata WHERE pathId = ?")
|
||||
.bind(path_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path metadata: {}", e))?;
|
||||
|
||||
// Delete path
|
||||
let result = sqlx::query("DELETE FROM path WHERE id = ?")
|
||||
.bind(path_id)
|
||||
.execute(&mut *transaction)
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to delete path: {}", e))?;
|
||||
|
||||
if result.rows_affected() == 0 {
|
||||
return Err(format!("ERROR: No path found with ID {}", path_id));
|
||||
}
|
||||
|
||||
transaction
|
||||
.commit()
|
||||
.await
|
||||
.map_err(|e| format!("ERROR: Failed to commit path deletion transaction: {}", e))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn clone_path(
|
||||
&self,
|
||||
source_path_id: i32,
|
||||
new_path_id: &str,
|
||||
new_title: &str,
|
||||
) -> Result<String, String> {
|
||||
// Get the source path
|
||||
let source_path = self.get_path_by_id(source_path_id).await?;
|
||||
|
||||
// Create new path with updated ID and title
|
||||
let mut new_path = source_path;
|
||||
new_path.id = new_path_id.to_string();
|
||||
new_path.title = new_title.to_string();
|
||||
|
||||
// Update metadata path_id references
|
||||
for metadata in &mut new_path.metadata {
|
||||
metadata.path_id = new_path_id.to_string();
|
||||
}
|
||||
|
||||
// Update node path_id references
|
||||
for node in &mut new_path.nodes {
|
||||
node.path_id = new_path_id.to_string();
|
||||
}
|
||||
|
||||
// Save the cloned path
|
||||
self.save_path(new_path).await
|
||||
}
|
||||
}
|
||||
|
||||
388
src-tauri/src/repositories/repository_manager.rs
Normal file
388
src-tauri/src/repositories/repository_manager.rs
Normal file
@@ -0,0 +1,388 @@
|
||||
use sqlx::sqlite::SqlitePool;
|
||||
|
||||
use super::{
|
||||
exercise_repository::ExerciseRepository, metadata_repository::MetadataRepository,
|
||||
node_repository::NodeRepository, path_repository::PathRepository,
|
||||
};
|
||||
|
||||
/// Repository manager that coordinates access to all repositories
|
||||
/// and provides a single entry point for database operations
|
||||
pub struct RepositoryManager<'a> {
|
||||
pool: &'a SqlitePool,
|
||||
path_repo: PathRepository<'a>,
|
||||
metadata_repo: MetadataRepository<'a>,
|
||||
node_repo: NodeRepository<'a>,
|
||||
exercise_repo: ExerciseRepository<'a>,
|
||||
}
|
||||
|
||||
impl<'a> RepositoryManager<'a> {
|
||||
pub fn new(pool: &'a SqlitePool) -> Self {
|
||||
Self {
|
||||
pool,
|
||||
path_repo: PathRepository::new(pool),
|
||||
metadata_repo: MetadataRepository::new(pool),
|
||||
node_repo: NodeRepository::new(pool),
|
||||
exercise_repo: ExerciseRepository::new(pool),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the path repository
|
||||
pub fn paths(&self) -> &PathRepository<'a> {
|
||||
&self.path_repo
|
||||
}
|
||||
|
||||
/// Get the metadata repository
|
||||
pub fn metadata(&self) -> &MetadataRepository<'a> {
|
||||
&self.metadata_repo
|
||||
}
|
||||
|
||||
/// Get the node repository
|
||||
pub fn nodes(&self) -> &NodeRepository<'a> {
|
||||
&self.node_repo
|
||||
}
|
||||
|
||||
/// Get the exercises repository
|
||||
pub fn exercises(&self) -> &ExerciseRepository<'a> {
|
||||
&self.exercise_repo
|
||||
}
|
||||
|
||||
/// Get the database pool
|
||||
pub fn pool(&self) -> &SqlitePool {
|
||||
self.pool
|
||||
}
|
||||
|
||||
/// Check database health by performing a simple query
|
||||
pub async fn health_check(&self) -> Result<bool, String> {
|
||||
let result = sqlx::query("SELECT 1")
|
||||
.fetch_optional(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("Database health check failed: {}", e))?;
|
||||
|
||||
Ok(result.is_some())
|
||||
}
|
||||
|
||||
/// Begin a database transaction
|
||||
/// This is useful for operations that need to be atomic across multiple repositories
|
||||
pub async fn begin_transaction(&self) -> Result<sqlx::Transaction<'_, sqlx::Sqlite>, String> {
|
||||
self.pool
|
||||
.begin()
|
||||
.await
|
||||
.map_err(|e| format!("Failed to begin transaction: {}", e))
|
||||
}
|
||||
|
||||
/// Get database statistics
|
||||
pub async fn get_stats(&self) -> Result<DatabaseStats, String> {
|
||||
let path_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM path")
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to count paths: {}", e))?;
|
||||
|
||||
let node_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM node")
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to count nodes: {}", e))?;
|
||||
|
||||
let exercise_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM exercise")
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to count exercises: {}", e))?;
|
||||
|
||||
let metadata_count: (i64,) = sqlx::query_as("SELECT COUNT(*) FROM pathMetadata")
|
||||
.fetch_one(self.pool)
|
||||
.await
|
||||
.map_err(|e| format!("Failed to count metadata: {}", e))?;
|
||||
|
||||
Ok(DatabaseStats {
|
||||
path_count: path_count.0,
|
||||
node_count: node_count.0,
|
||||
exercise_count: exercise_count.0,
|
||||
metadata_count: metadata_count.0,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Database statistics structure
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DatabaseStats {
|
||||
pub path_count: i64,
|
||||
pub node_count: i64,
|
||||
pub exercise_count: i64,
|
||||
pub metadata_count: i64,
|
||||
}
|
||||
|
||||
impl DatabaseStats {
|
||||
pub fn total_records(&self) -> i64 {
|
||||
self.path_count + self.node_count + self.exercise_count + self.metadata_count
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.total_records() == 0
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> RepositoryManager<'a> {
|
||||
/// Advanced operations combining multiple repositories
|
||||
|
||||
/// Import a path from JSON string with full validation
|
||||
pub async fn import_path_from_json(&self, json_content: &str) -> Result<String, String> {
|
||||
let json_utils = super::path_json_utils::PathJsonUtils::new(&self.path_repo);
|
||||
json_utils.import_from_json(json_content).await
|
||||
}
|
||||
|
||||
/// Export a path to JSON string
|
||||
pub async fn export_path_to_json(&self, path_id: i32) -> Result<String, String> {
|
||||
let json_utils = super::path_json_utils::PathJsonUtils::new(&self.path_repo);
|
||||
json_utils.export_to_json(path_id).await
|
||||
}
|
||||
|
||||
/// Clone a path with all its dependencies
|
||||
pub async fn clone_path_complete(
|
||||
&self,
|
||||
source_path_id: i32,
|
||||
new_path_id: &str,
|
||||
new_title: &str,
|
||||
) -> Result<String, String> {
|
||||
self.path_repo
|
||||
.clone_path(source_path_id, new_path_id, new_title)
|
||||
.await
|
||||
}
|
||||
|
||||
/// Get comprehensive path statistics
|
||||
pub async fn get_path_statistics(&self, path_id: i32) -> Result<PathStatistics, String> {
|
||||
let path = self.path_repo.get_path_by_id(path_id).await?;
|
||||
|
||||
let total_exercises = path.nodes.iter().map(|n| n.exercises.len()).sum();
|
||||
let exercise_types: std::collections::HashMap<String, usize> = path
|
||||
.nodes
|
||||
.iter()
|
||||
.flat_map(|n| &n.exercises)
|
||||
.fold(std::collections::HashMap::new(), |mut acc, ex| {
|
||||
*acc.entry(ex.ex_type.clone()).or_insert(0) += 1;
|
||||
acc
|
||||
});
|
||||
|
||||
let avg_exercises_per_node = if path.nodes.is_empty() {
|
||||
0.0
|
||||
} else {
|
||||
total_exercises as f64 / path.nodes.len() as f64
|
||||
};
|
||||
|
||||
Ok(PathStatistics {
|
||||
path_id: path.id,
|
||||
title: path.title,
|
||||
description: path.description,
|
||||
node_count: path.nodes.len(),
|
||||
total_exercises,
|
||||
exercise_types,
|
||||
metadata_count: path.metadata.len(),
|
||||
avg_exercises_per_node,
|
||||
})
|
||||
}
|
||||
|
||||
/// Validate path integrity across all repositories
|
||||
pub async fn validate_path_integrity(&self, path_id: i32) -> Result<Vec<String>, String> {
|
||||
let mut issues = Vec::new();
|
||||
|
||||
// Check if path exists
|
||||
if !self.path_repo.path_exists(path_id).await? {
|
||||
issues.push(format!("Path with ID {} does not exist", path_id));
|
||||
return Ok(issues);
|
||||
}
|
||||
|
||||
let path = self.path_repo.get_path_by_id(path_id).await?;
|
||||
|
||||
// Check metadata consistency
|
||||
if path.metadata.is_empty() {
|
||||
issues.push("Path has no metadata".to_string());
|
||||
} else {
|
||||
for metadata in &path.metadata {
|
||||
if metadata.path_id != path.id {
|
||||
issues.push(format!(
|
||||
"Metadata path_id '{}' doesn't match path ID '{}'",
|
||||
metadata.path_id, path.id
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check nodes consistency
|
||||
if path.nodes.is_empty() {
|
||||
issues.push("Path has no nodes".to_string());
|
||||
} else {
|
||||
for node in &path.nodes {
|
||||
if node.path_id != path.id {
|
||||
issues.push(format!(
|
||||
"Node {} path_id '{}' doesn't match path ID '{}'",
|
||||
node.id, node.path_id, path.id
|
||||
));
|
||||
}
|
||||
|
||||
// Check exercises consistency
|
||||
for exercise in &node.exercises {
|
||||
if exercise.node_id != node.id {
|
||||
issues.push(format!(
|
||||
"Exercise {} node_id {} doesn't match node ID {}",
|
||||
exercise.id, exercise.node_id, node.id
|
||||
));
|
||||
}
|
||||
|
||||
// Validate exercise content is valid JSON
|
||||
if let Err(e) = serde_json::from_str::<serde_json::Value>(&exercise.content) {
|
||||
issues.push(format!(
|
||||
"Exercise {} has invalid JSON content: {}",
|
||||
exercise.id, e
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(issues)
|
||||
}
|
||||
|
||||
/// Bulk operations for multiple paths
|
||||
pub async fn validate_all_paths(
|
||||
&self,
|
||||
) -> Result<std::collections::HashMap<String, Vec<String>>, String> {
|
||||
let paths = self.path_repo.get_all_paths().await?;
|
||||
let mut results = std::collections::HashMap::new();
|
||||
|
||||
for path in paths {
|
||||
if let Ok(path_id) = path.id.parse::<i32>() {
|
||||
match self.validate_path_integrity(path_id).await {
|
||||
Ok(issues) => {
|
||||
if !issues.is_empty() {
|
||||
results.insert(path.id, issues);
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
results.insert(path.id, vec![format!("Validation failed: {}", e)]);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
results.insert(path.id.clone(), vec!["Invalid path ID format".to_string()]);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
/// Search paths by content
|
||||
pub async fn search_paths(&self, query: &str) -> Result<Vec<SearchResult>, String> {
|
||||
let paths = self.path_repo.get_all_paths().await?;
|
||||
let mut results = Vec::new();
|
||||
let query_lower = query.to_lowercase();
|
||||
|
||||
for path in paths {
|
||||
let mut relevance_score = 0;
|
||||
let mut matching_content = Vec::new();
|
||||
|
||||
// Check title
|
||||
if path.title.to_lowercase().contains(&query_lower) {
|
||||
relevance_score += 10;
|
||||
matching_content.push(format!("Title: {}", path.title));
|
||||
}
|
||||
|
||||
// Check description
|
||||
if path.description.to_lowercase().contains(&query_lower) {
|
||||
relevance_score += 5;
|
||||
matching_content.push(format!("Description: {}", path.description));
|
||||
}
|
||||
|
||||
// Check nodes
|
||||
for node in &path.nodes {
|
||||
if node.title.to_lowercase().contains(&query_lower) {
|
||||
relevance_score += 3;
|
||||
matching_content.push(format!("Node: {}", node.title));
|
||||
}
|
||||
|
||||
if node.description.to_lowercase().contains(&query_lower) {
|
||||
relevance_score += 2;
|
||||
matching_content.push(format!("Node description: {}", node.description));
|
||||
}
|
||||
|
||||
// Check exercises
|
||||
for exercise in &node.exercises {
|
||||
if exercise.content.to_lowercase().contains(&query_lower) {
|
||||
relevance_score += 1;
|
||||
matching_content
|
||||
.push(format!("Exercise ({}): {}", exercise.ex_type, exercise.id));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if relevance_score > 0 {
|
||||
results.push(SearchResult {
|
||||
path_id: path.id,
|
||||
title: path.title,
|
||||
relevance_score,
|
||||
matching_content,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by relevance score (descending)
|
||||
results.sort_by(|a, b| b.relevance_score.cmp(&a.relevance_score));
|
||||
|
||||
Ok(results)
|
||||
}
|
||||
}
|
||||
|
||||
/// Comprehensive path statistics
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PathStatistics {
|
||||
pub path_id: String,
|
||||
pub title: String,
|
||||
pub description: String,
|
||||
pub node_count: usize,
|
||||
pub total_exercises: usize,
|
||||
pub exercise_types: std::collections::HashMap<String, usize>,
|
||||
pub metadata_count: usize,
|
||||
pub avg_exercises_per_node: f64,
|
||||
}
|
||||
|
||||
impl PathStatistics {
|
||||
pub fn print_detailed_summary(&self) {
|
||||
println!("=== Detailed Path Statistics ===");
|
||||
println!("ID: {}", self.path_id);
|
||||
println!("Title: {}", self.title);
|
||||
println!("Description: {}", self.description);
|
||||
println!("Nodes: {}", self.node_count);
|
||||
println!("Total Exercises: {}", self.total_exercises);
|
||||
println!(
|
||||
"Average Exercises per Node: {:.2}",
|
||||
self.avg_exercises_per_node
|
||||
);
|
||||
println!("Metadata Records: {}", self.metadata_count);
|
||||
println!("Exercise Types:");
|
||||
for (ex_type, count) in &self.exercise_types {
|
||||
println!(
|
||||
" {}: {} ({:.1}%)",
|
||||
ex_type,
|
||||
count,
|
||||
(*count as f64 / self.total_exercises as f64) * 100.0
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Search result for path content search
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SearchResult {
|
||||
pub path_id: String,
|
||||
pub title: String,
|
||||
pub relevance_score: i32,
|
||||
pub matching_content: Vec<String>,
|
||||
}
|
||||
|
||||
impl SearchResult {
|
||||
pub fn print_summary(&self) {
|
||||
println!("=== Search Result ===");
|
||||
println!("Path: {} - {}", self.path_id, self.title);
|
||||
println!("Relevance Score: {}", self.relevance_score);
|
||||
println!("Matching Content:");
|
||||
for content in &self.matching_content {
|
||||
println!(" - {}", content);
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user