added database models and models for continous use. Also changed get all paths according to that. Nothing is testet yet!

This commit is contained in:
2025-11-01 20:47:04 +01:00
parent d02c1fa314
commit fdf335b346
13 changed files with 280 additions and 51 deletions

View File

@@ -3,10 +3,10 @@
"devenv": {
"locked": {
"dir": "src/modules",
"lastModified": 1761343822,
"lastModified": 1761922975,
"owner": "cachix",
"repo": "devenv",
"rev": "679d2951cee2d09da3c732d00b320ce752d21ee0",
"rev": "c9f0b47815a4895fadac87812de8a4de27e0ace1",
"type": "github"
},
"original": {
@@ -19,10 +19,10 @@
"flake-compat": {
"flake": false,
"locked": {
"lastModified": 1747046372,
"lastModified": 1761588595,
"owner": "edolstra",
"repo": "flake-compat",
"rev": "9100a0f413b0c601e0533d1d94ffd501ce2e7885",
"rev": "f387cd2afec9419c8ee37694406ca490c3f34ee5",
"type": "github"
},
"original": {
@@ -74,10 +74,10 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1758532697,
"lastModified": 1761313199,
"owner": "cachix",
"repo": "devenv-nixpkgs",
"rev": "207a4cb0e1253c7658c6736becc6eb9cace1f25f",
"rev": "d1c30452ebecfc55185ae6d1c983c09da0c274ff",
"type": "github"
},
"original": {

View File

@@ -1,2 +1,2 @@
DROP TABLE pathVersions;
DROP TABLE path;
DROP TABLE pathMetadata;
DROP TABLE path;

View File

@@ -1,12 +1,14 @@
CREATE TABLE path (
id integer primary key,
id text primary key,
title text,
description text,
versions text
);
CREATE TABLE pathVersions (
pathId integer references path(id),
versionNumber string,
CREATE TABLE pathMetadata (
pathId text references path(id),
versionNumber text,
createdAt text,
updatedAt text,
primary key (pathId, versionNumber)
);
);

View File

@@ -1,4 +1,3 @@
// Learn more about Tauri commands at https://tauri.app/develop/calling-rust/
use sqlx::{migrate::MigrateDatabase, sqlite::SqlitePoolOptions, Pool, Sqlite};
use tauri::{App, Manager};

View File

@@ -0,0 +1,7 @@
#[derive(Debug, sqlx::FromRow)]
pub struct ExerciseDb {
pub id: u16,
pub node_id: u32,
pub ex_type: String,
pub content: String,
}

View File

@@ -0,0 +1,3 @@
pub mod path_db;
pub mod node_db;
pub mod exercise_db;

View File

@@ -0,0 +1,7 @@
#[derive(Debug, sqlx::FromRow)]
pub struct NodeDb {
pub id: u32,
pub title: String,
pub description: String,
pub path_id: String
}

View File

@@ -0,0 +1,17 @@
use chrono::{DateTime, Utc};
#[derive(sqlx::FromRow, Debug)]
pub struct PathDb {
pub id: String,
pub title: String,
pub description: String,
}
#[derive(Debug, sqlx::FromRow)]
pub struct MetadataDb {
pub path_id : String,
pub version: String,
pub created_at: String,
pub updated_at: String,
}

View File

@@ -1,6 +1,7 @@
#[derive(Debug, Clone)]
pub struct Exercise {
id: u16,
ex_type: String,
content: String,
node_id: u32
pub id: u16,
pub ex_type: String,
pub content: String,
pub node_id: u32,
}

View File

@@ -1,3 +1,5 @@
pub mod path;
pub mod node;
pub mod exercise;
pub mod db_models;

View File

@@ -1,7 +1,10 @@
#[derive(Debug, sqlx::FromRow)]
pub struct Node {
id: u32,
title: String,
description: String,
path_id: String
use crate::models::exercise::Exercise;
#[derive(Debug)]
pub struct Node{
pub id: u32,
pub title: String,
pub description: String,
pub path_id: String,
pub exercises: Vec<Exercise>,
}

View File

@@ -1,21 +1,21 @@
use chrono::{DateTime, Utc};
use sqlx::sqlite::SqliteRow;
use sqlx::Row;
use crate::models::node::Node;
#[derive(sqlx::FromRow, Debug)]
#[derive(Debug)]
pub struct Path {
id: String,
title: String,
description: String,
nodes: Vec<Node>,
metadata: Metadata,
pub id: String,
pub title: String,
pub description: String,
pub metadata: Vec<Metadata>,
pub nodes: Vec<Node>,
}
#[derive(Debug, sqlx::FromRow)]
#[derive(Debug)]
pub struct Metadata {
versions: Vec<String>,
created_at: DateTime<Utc>,
updated_at: DateTime<Utc>,
pub path_id : String,
pub version: String,
pub created_at: DateTime<Utc>,
pub updated_at: DateTime<Utc>,
}

View File

@@ -1,6 +1,19 @@
use sqlx::{sqlite::SqlitePool, FromRow};
use sqlx::{
sqlite::{SqlitePool, SqliteRow},
FromRow,
};
use std::collections::HashMap;
use crate::models::path::Path;
use crate::models::{
db_models::{
exercise_db::ExerciseDb,
node_db::NodeDb,
path_db::{MetadataDb, PathDb},
},
exercise::Exercise,
node::Node,
path::{Metadata, Path},
};
pub struct PathRepository<'a> {
pub pool: &'a SqlitePool,
@@ -8,36 +21,211 @@ pub struct PathRepository<'a> {
impl<'a> PathRepository<'a> {
pub async fn get_path_by_id(&self, id: i32) -> Result<Path, String> {
let result = sqlx::query("SELECT * FROM path WHERE id = ?")
// Get Path
let path_result = sqlx::query("SELECT * FROM path WHERE id = ?")
.bind(id)
.fetch_all(self.pool)
.await
.map_err(|e| format!("ERROR: Failed to query Path db: {} ", e))
.unwrap();
.await;
if result.len() > 1 {
let path_result: Vec<SqliteRow> = match path_result {
Ok(r) => r,
Err(e) => {
return Err(format!("ERROR: Failed to query Path db: {} ", e));
}
};
if path_result.len() > 1 {
return Err(format!("ERROR: Multiple paths for ID {} found", id));
} else if result.is_empty(){
} else if path_result.is_empty() {
return Err(format!("ERROR: No Path with ID {} found", id));
}
let path = match result.first() {
Some(p) => match Path::from_row(p) {
Ok(p) => {p},
Err(e) => {return Err(format!("ERROR: Could not parse Path: {}", e));},
},
let path_result = match path_result.first() {
Some(p) => match PathDb::from_row(p) {
Ok(p) => p,
Err(e) => {
return Err(format!("ERROR: Could not parse Path: {}", e));
}
},
None => return Err(format!("ERROR: No path for ID {} found", id)),
};
// Get Metadata for path
let metadata_result = sqlx::query("SELECT * From pathMetadata where pathId = ?")
.bind(path_result.id.clone())
.fetch_all(self.pool)
.await;
let metadata_result = match metadata_result {
Ok(r) => r,
Err(e) => {
return Err(format!("ERROR: Failed to query Metadata db: {}", e));
}
};
if metadata_result.is_empty() {
return Err(format!(
"ERROR: No metadata for path [{:?}] found",
path_result
));
}
let metadata_result: Result<Vec<MetadataDb>, String> = metadata_result
.iter()
.map(|row| {
MetadataDb::from_row(row)
.map_err(|e| format!("ERROR: Could not parse Metadata struct: {}", e))
})
.collect();
let metadata_result = match metadata_result {
Ok(r) => r,
Err(e) => return Err(e),
};
// Get nodes for path
let node_result = sqlx::query("SELECT * From node where pathId = ?")
.bind(path_result.id.clone())
.fetch_all(self.pool)
.await;
let node_result = match node_result {
Ok(r) => r,
Err(e) => {
return Err(format!("ERROR: Failed to query Node db: {}", e));
}
};
if node_result.is_empty() {
return Err(format!(
"ERROR: No Nodes for path [{:?}] found",
path_result
));
}
let node_result: Result<Vec<NodeDb>, String> = node_result
.iter()
.map(|row| {
NodeDb::from_row(row)
.map_err(|e| format!("ERROR: Could not parse Node struct: {}", e))
})
.collect();
let node_result = match node_result {
Ok(r) => r,
Err(e) => return Err(e),
};
// Get exercises for path
let exercise_result = sqlx::query("SELECT * From exercise where pathId = ?")
.bind(path_result.id.clone())
.fetch_all(self.pool)
.await;
let exercise_result = match exercise_result {
Ok(r) => r,
Err(e) => {
return Err(format!("ERROR: Failed to query Exercise db: {}", e));
}
};
if exercise_result.is_empty() {
return Err(format!(
"ERROR: No Exercise for path [{:?}] found",
path_result
));
}
let exercise_result: Result<Vec<ExerciseDb>, String> = exercise_result
.iter()
.map(|row| {
ExerciseDb::from_row(row)
.map_err(|e| format!("ERROR: Could not parse Exercise struct: {}", e))
})
.collect();
let exercise_result = match exercise_result {
Ok(r) => r,
Err(e) => return Err(e),
};
// Convert metadata
let metadata: Vec<Metadata> = metadata_result
.iter()
.map(|m| Metadata {
path_id: m.path_id.clone(),
version: m.version.clone(),
created_at: m.created_at.parse().unwrap(),
updated_at: m.updated_at.parse().unwrap(),
})
.collect();
// Group exercises by node_id
let mut exercises_by_node: HashMap<u32, Vec<Exercise>> = HashMap::new();
for exercise_db in exercise_result {
let exercise = Exercise {
id: exercise_db.id,
ex_type: exercise_db.ex_type,
content: exercise_db.content,
node_id: exercise_db.node_id,
};
exercises_by_node
.entry(exercise_db.node_id)
.or_insert_with(Vec::new)
.push(exercise);
}
// Create nodes with their respective exercises
let nodes: Vec<Node> = node_result
.iter()
.map(|node_db| Node {
id: node_db.id,
title: node_db.title.clone(),
description: node_db.description.clone(),
path_id: node_db.path_id.clone(),
exercises: exercises_by_node
.get(&node_db.id)
.cloned()
.unwrap_or_else(Vec::new),
})
.collect();
let path = Path {
id: path_result.id,
title: path_result.title,
description: path_result.description,
metadata,
nodes,
};
Ok(path)
}
pub async fn get_all_paths(&self) -> Result<Vec<Path>, String> {
let rows = sqlx::query_as::<_, Path>("SELECT * FROM paths")
let rows = sqlx::query_as::<_, PathDb>("SELECT * FROM path")
.fetch_all(self.pool)
.await
.map_err(|e| e.to_string())?;
Ok(rows)
let mut paths = Vec::new();
for path_db in rows {
match self.get_path_by_id(path_db.id.parse().unwrap_or(0)).await {
Ok(path) => paths.push(path),
Err(e) => {
eprintln!("Warning: Failed to load path {}: {}", path_db.id, e);
// Continue with other paths instead of failing completely
continue;
}
}
}
Ok(paths)
}
pub async fn save_path(&self, _path: Path) -> Result<(), String> {
// TODO: Implement path saving logic
todo!("Implement save_path functionality")
}
}