From 72fb3b11ee870f406b28aa2b2c77561e43dab199 Mon Sep 17 00:00:00 2001 From: "Elf M. Sternberg" Date: Mon, 26 Oct 2020 18:54:56 -0700 Subject: [PATCH] FEAT Delete & Update note is now complete. Well, as complete as it could be without proper automated testing. I think there'll be some more testing soon, as it doesn't make sense for it to hang out so blatantly like this. Both a fmt and clippy pass have shaken all the lint off, and right now it builds without warnings or lintings. Wheee! --- server/nm-store/docs/storage.md | 46 + server/nm-store/src/errors.rs | 1 - server/nm-store/src/lib.rs | 3 +- server/nm-store/src/reference_parser.rs | 19 +- server/nm-store/src/row_structs.rs | 111 -- .../nm-store/src/sql/initialize_database.sql | 2 +- server/nm-store/src/store.rs | 982 ++++++++++-------- server/nm-store/src/structs.rs | 162 ++- 8 files changed, 709 insertions(+), 617 deletions(-) create mode 100644 server/nm-store/docs/storage.md delete mode 100644 server/nm-store/src/row_structs.rs diff --git a/server/nm-store/docs/storage.md b/server/nm-store/docs/storage.md new file mode 100644 index 0000000..da38bde --- /dev/null +++ b/server/nm-store/docs/storage.md @@ -0,0 +1,46 @@ +# Storage layer for Notesmachine + +This library implements the core functionality of Notesmachine and +describes that functionality to a storage layer. There's a bit of +intermingling in here which can't be helped, although it may make sense +in the future to separate the decomposition of the note content into a +higher layer. + +Notesmachine storage notes consist of two items: Zettle and Kasten, +which are German for "Note" and "Box". Here are the basic rules: + +- Boxes have titles (and date metadata) +- Notes have content and a type (and date metadata) +- Notes are stored in boxes + - Notes are positioned with respect to other notes. + - There are two positions: + - Siblings, creating lists + - Children, creating trees like this one +- Notes may have references (pointers) to other boxes +- Notes may be moved around +- Notes may be deleted +- Boxes may be deleted +- When a box is renamed, every reference to that box is auto-edited to + reflect the change. If a box is renamed to match an existing box, the + notes in both boxes are merged. + +Note-to-note relationships form trees, and are kept in a SQL database of +(`parent_id`, `child_id`, `position`, `relationship_type`). The +`position` is a monotonic index on the parent (that is, every pair +(`parent_id`, `position`) must be unique). The `relationship_type` is +an enum and can specify that the relationship is *original*, +*embedding*, or *referencing*. An embedded or referenced note may be +read/write or read-only with respect to the original, but there is only +one original note at any time. + +Note-to-box relationships form a graph, and are kept in the SQL database +as a collection of *edges* from the note to the box (and naturally +vice-versa). + +- Decision: When an original note is deleted, do all references and + embeddings also get deleted, or is the oldest one elevated to be a new + "original"? Or is that something the user may choose? + +- Decision: Should the merging issue be handled at this layer, or would + it make sense to move this to a higher layer, and only provide the + hooks for it here? diff --git a/server/nm-store/src/errors.rs b/server/nm-store/src/errors.rs index 26031f2..f42d6da 100644 --- a/server/nm-store/src/errors.rs +++ b/server/nm-store/src/errors.rs @@ -1,4 +1,3 @@ -use sqlx; use thiserror::Error; /// All the ways looking up objects can fail diff --git a/server/nm-store/src/lib.rs b/server/nm-store/src/lib.rs index 0ce5da3..5ff46d6 100644 --- a/server/nm-store/src/lib.rs +++ b/server/nm-store/src/lib.rs @@ -1,8 +1,7 @@ mod errors; -mod row_structs; +mod reference_parser; mod store; mod structs; -mod reference_parser; pub use crate::errors::NoteStoreError; pub use crate::store::NoteStore; diff --git a/server/nm-store/src/reference_parser.rs b/server/nm-store/src/reference_parser.rs index 28060d5..58d4811 100644 --- a/server/nm-store/src/reference_parser.rs +++ b/server/nm-store/src/reference_parser.rs @@ -37,7 +37,7 @@ pub(crate) fn find_links(document: &str) -> Vec { } match &node.data.borrow().value { - &NodeValue::Text(ref text) => Some( + NodeValue::Text(ref text) => Some( RE_REFERENCES .captures_iter(text) .map(|t| String::from_utf8_lossy(&t.get(1).unwrap().as_bytes()).to_string()) @@ -68,19 +68,16 @@ fn recase(title: &str) -> String { RE_PASS3.replace_all(&pass, " ").trim().to_string() } -fn build_page_titles(references: &Vec) -> Vec { +fn build_page_titles(references: &[String]) -> Vec { references .iter() - .map(|s| { - let c = s.chars().nth(0); - match c { - Some('#') => recase(s), - Some('[') => s.strip_prefix("[[").unwrap().strip_suffix("]]").unwrap().to_string(), - Some(_) => s.clone(), - _ => "".to_string(), - } + .map(|s| match s.chars().next() { + Some('#') => recase(s), + Some('[') => s.strip_prefix("[[").unwrap().strip_suffix("]]").unwrap().to_string(), + Some(_) => s.clone(), + _ => "".to_string(), }) - .filter(|s| s.len() > 0) + .filter(|s| s.is_empty()) .collect() } diff --git a/server/nm-store/src/row_structs.rs b/server/nm-store/src/row_structs.rs deleted file mode 100644 index f9dad66..0000000 --- a/server/nm-store/src/row_structs.rs +++ /dev/null @@ -1,111 +0,0 @@ -use chrono::{DateTime, Utc}; -use derive_builder::Builder; -use serde::{Deserialize, Serialize}; -use sqlx::{self, FromRow}; - -#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] -pub struct RawPage { - pub id: i64, - pub slug: String, - pub title: String, - pub note_id: i64, - pub creation_date: DateTime, - pub updated_date: DateTime, - pub lastview_date: DateTime, - pub deleted_date: Option>, -} - -#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] -pub struct RawNote { - pub id: i64, - pub uuid: String, - pub parent_id: i64, - pub parent_uuid: String, - pub content: String, - pub position: i64, - pub notetype: String, - pub creation_date: DateTime, - pub updated_date: DateTime, - pub lastview_date: DateTime, - pub deleted_date: Option>, -} - -#[derive(Clone, Serialize, Deserialize, Debug, Builder)] -pub struct NewPage { - pub slug: String, - pub title: String, - pub note_id: i64, - #[builder(default = r#"chrono::Utc::now()"#)] - pub creation_date: DateTime, - #[builder(default = r#"chrono::Utc::now()"#)] - pub updated_date: DateTime, - #[builder(default = r#"chrono::Utc::now()"#)] - pub lastview_date: DateTime, - #[builder(default = r#"None"#)] - pub deleted_date: Option>, -} - -#[derive(Clone, Serialize, Deserialize, Debug, Builder)] -pub struct NewNote { - #[builder(default = r#""".to_string()"#)] - pub uuid: String, - pub content: String, - #[builder(default = r#""note".to_string()"#)] - pub notetype: String, - #[builder(default = r#"chrono::Utc::now()"#)] - pub creation_date: DateTime, - #[builder(default = r#"chrono::Utc::now()"#)] - pub updated_date: DateTime, - #[builder(default = r#"chrono::Utc::now()"#)] - pub lastview_date: DateTime, - #[builder(default = r#"None"#)] - pub deleted_date: Option>, -} - -#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] -pub(crate) struct JustSlugs { - pub slug: String, -} - -#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] -pub(crate) struct JustTitles { - title: String, -} - -#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] -pub(crate) struct JustId { - pub id: i64, -} - -#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] -pub(crate) struct PageTitles { - pub id: i64, - pub title: String, -} - -#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] -pub(crate) struct NoteRelationship { - pub parent_id: i64, - pub note_id: i64, - pub position: i64, - pub nature: String, -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn can_build_new_note() { - let now = chrono::Utc::now(); - let newnote = NewNoteBuilder::default() - .uuid("foo".to_string()) - .content("bar".to_string()) - .build() - .unwrap(); - assert!((newnote.creation_date - now).num_minutes() < 1); - assert!((newnote.updated_date - now).num_minutes() < 1); - assert!((newnote.lastview_date - now).num_minutes() < 1); - assert!(newnote.deleted_date.is_none()); - } -} diff --git a/server/nm-store/src/sql/initialize_database.sql b/server/nm-store/src/sql/initialize_database.sql index 933ff9a..9d4b6e1 100644 --- a/server/nm-store/src/sql/initialize_database.sql +++ b/server/nm-store/src/sql/initialize_database.sql @@ -1,8 +1,8 @@ DROP TABLE IF EXISTS notes; DROP TABLE IF EXISTS note_relationships; DROP TABLE IF EXISTS pages; -DROP TABLE IF EXISTS favorites; DROP TABLE IF EXISTS page_relationships; +DROP TABLE IF EXISTS favorites; CREATE TABLE notes ( id INTEGER PRIMARY KEY AUTOINCREMENT, diff --git a/server/nm-store/src/store.rs b/server/nm-store/src/store.rs index 0f23063..6040922 100644 --- a/server/nm-store/src/store.rs +++ b/server/nm-store/src/store.rs @@ -1,22 +1,19 @@ use crate::errors::NoteStoreError; -use crate::row_structs::{ - JustId, JustSlugs, NewNote, NewNoteBuilder, NewPage, NewPageBuilder, NoteRelationship, RawNote, - PageTitles, - RawPage, -}; use crate::reference_parser::build_references; -use friendly_id; +use crate::structs::{ + JustId, JustSlugs, NewNote, NewNoteBuilder, NewPage, NewPageBuilder, NoteRelationship, PageTitles, RawNote, + RawPage, RowCount, +}; use lazy_static::lazy_static; use regex::Regex; -use std::collections::HashSet; use shrinkwraprs::Shrinkwrap; use slug::slugify; -use sqlx; use sqlx::{ - sqlite::{Sqlite, SqlitePool, SqliteRow}, - Done, Executor, Row, + sqlite::{Sqlite, SqlitePool, SqliteRow}, + Done, Executor, Row, }; use std::collections::HashMap; +use std::collections::HashSet; use std::sync::Arc; #[derive(Shrinkwrap, Copy, Clone)] @@ -36,175 +33,224 @@ type NoteResult = core::result::Result; type SqlResult = sqlx::Result; impl NoteStore { - pub async fn new(url: &str) -> NoteResult { - let pool = SqlitePool::connect(url).await?; - Ok(NoteStore(Arc::new(pool))) - } + pub async fn new(url: &str) -> NoteResult { + let pool = SqlitePool::connect(url).await?; + Ok(NoteStore(Arc::new(pool))) + } - // Erase all the data in the database and restore it - // to its original empty form. Do not use unless you - // really, really want that to happen. - pub async fn reset_database(&self) -> NoteResult<()> { - reset_database(&*self.0) - .await - .map_err(NoteStoreError::DBError) - } + // Erase all the data in the database and restore it + // to its original empty form. Do not use unless you + // really, really want that to happen. + pub async fn reset_database(&self) -> NoteResult<()> { + reset_database(&*self.0).await.map_err(NoteStoreError::DBError) + } - /// Fetch page by slug - /// - /// Supports the use case of the user navigating to a known place - /// via a bookmark or other URL. Since the title isn't clear from - /// the slug, the slug is insufficient to generate a new page, so - /// this use case says that in the event of a failure to find the - /// requested page, return a basic NotFound. - pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(RawPage, Vec)> { - // let select_note_collection_for_root = include_str!("sql/select_note_collection_for_root.sql"); - let mut tx = self.0.begin().await?; - let page = select_page_by_slug(&mut tx, slug).await?; - let note_id = page.note_id; - let notes = select_note_collection_from_root(&mut tx, note_id).await?; - tx.commit().await?; - Ok((page, notes)) - } + /// Fetch page by slug + /// + /// Supports the use case of the user navigating to a known place + /// via a bookmark or other URL. Since the title isn't clear from + /// the slug, the slug is insufficient to generate a new page, so + /// this use case says that in the event of a failure to find the + /// requested page, return a basic NotFound. + pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(RawPage, Vec)> { + // let select_note_collection_for_root = include_str!("sql/select_note_collection_for_root.sql"); + let mut tx = self.0.begin().await?; + let page = select_page_by_slug(&mut tx, slug).await?; + let note_id = page.note_id; + let notes = select_note_collection_from_root(&mut tx, note_id).await?; + tx.commit().await?; + Ok((page, notes)) + } - /// Fetch page by title - /// - /// Supports the use case of the user navigating to a page via - /// the page's formal title. Since the title is the key reference - /// of the system, if no page with that title is found, a page with - /// that title is generated automatically. - pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(RawPage, Vec)> { - let mut tx = self.0.begin().await?; - let (page, notes) = match select_page_by_title(&mut tx, title).await { - Ok(page) => { - let note_id = page.note_id; - ( - page, - select_note_collection_from_root(&mut tx, note_id).await?, - ) - } - Err(sqlx::Error::RowNotFound) => { - let page = { - let new_root_note = create_unique_root_note(); - let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?; - let new_page_slug = generate_slug(&mut tx, title).await?; - let new_page = create_new_page_for(&title, &new_page_slug, new_root_note_id); - let _ = insert_one_new_page(&mut tx, &new_page).await?; - select_page_by_title(&mut tx, &title).await? - }; - let note_id = page.note_id; - ( - page, - select_note_collection_from_root(&mut tx, note_id).await?, - ) - } - Err(e) => return Err(NoteStoreError::DBError(e)), - }; - tx.commit().await?; - Ok((page, notes)) - } + /// Fetch page by title + /// + /// Supports the use case of the user navigating to a page via + /// the page's formal title. Since the title is the key reference + /// of the system, if no page with that title is found, a page with + /// that title is generated automatically. + pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(RawPage, Vec)> { + let mut tx = self.0.begin().await?; + let (page, notes) = match select_page_by_title(&mut tx, title).await { + Ok(page) => { + let note_id = page.note_id; + (page, select_note_collection_from_root(&mut tx, note_id).await?) + } + Err(sqlx::Error::RowNotFound) => { + let page = { + let new_root_note = create_unique_root_note(); + let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?; + let new_page_slug = generate_slug(&mut tx, title).await?; + let new_page = create_new_page_for(&title, &new_page_slug, new_root_note_id); + let _ = insert_one_new_page(&mut tx, &new_page).await?; + select_page_by_title(&mut tx, &title).await? + }; + let note_id = page.note_id; + (page, select_note_collection_from_root(&mut tx, note_id).await?) + } + Err(e) => return Err(NoteStoreError::DBError(e)), + }; + tx.commit().await?; + Ok((page, notes)) + } - // TODO: Make sure the position is sane. - /// Insert a note as the child of an existing note, at a set position. - pub async fn insert_nested_note( - &self, - note: &NewNote, - parent_note_uuid: &str, - position: i64, - ) -> NoteResult { - let mut new_note = note.clone(); - new_note.uuid = friendly_id::create(); - let references = build_references(¬e.content); - let mut tx = self.0.begin().await?; + // TODO: Make sure the position is sane. + /// Insert a note as the child of an existing note, at a set position. + pub async fn insert_nested_note( + &self, + note: &NewNote, + parent_note_uuid: &str, + position: i64, + ) -> NoteResult { + let mut new_note = note.clone(); + new_note.uuid = friendly_id::create(); + let references = build_references(¬e.content); + let mut tx = self.0.begin().await?; - // Start by building the note and putting it into its relationship. - let parent_id = select_note_id_for_uuid(&mut tx, parent_note_uuid).await?; - let new_note_id = insert_one_new_note(&mut tx, &new_note).await?; - let _ = make_room_for_new_note(&mut tx, parent_id, position).await?; - let _ = insert_note_note_relationship(&mut tx, parent_id, new_note_id, position).await?; + // Start by building the note and putting it into its relationship. + let parent_id: ParentId = select_note_id_for_uuid(&mut tx, parent_note_uuid).await?; + let parent_max_position = assert_max_child_position_for_note(&mut tx, parent_id).await?; + let position = if position > parent_max_position { + parent_max_position + 1 + } else { + position + }; + let new_note_id = insert_one_new_note(&mut tx, &new_note).await?; + let _ = make_room_for_new_note(&mut tx, parent_id, position).await?; + let _ = insert_note_to_note_relationship(&mut tx, parent_id, new_note_id, position, "note").await?; - // From the references, make lists of pages that exist, and pages - // that do not. - let found_references = find_all_references_for(&mut tx, &references).await?; - let new_references = diff_references(&references, &found_references); - let mut known_reference_ids: Vec = Vec::new(); + // From the references, make lists of pages that exist, and pages + // that do not. + let found_references = find_all_page_references_for(&mut tx, &references).await?; + let new_references = diff_references(&references, &found_references); + let mut known_reference_ids: Vec = Vec::new(); - // Create the pages that don't exist - for one_reference in new_references.iter() { - let new_root_note = create_unique_root_note(); - let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?; - let new_page_slug = generate_slug(&mut tx, &one_reference).await?; - let new_page = create_new_page_for(&one_reference, &new_page_slug, new_root_note_id); - known_reference_ids.push(insert_one_new_page(&mut tx, &new_page).await?) - }; + // Create the pages that don't exist + for one_reference in new_references.iter() { + let new_root_note = create_unique_root_note(); + let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?; + let new_page_slug = generate_slug(&mut tx, &one_reference).await?; + let new_page = create_new_page_for(&one_reference, &new_page_slug, new_root_note_id); + known_reference_ids.push(insert_one_new_page(&mut tx, &new_page).await?) + } - // And associate the note with all the pages. - known_reference_ids.append(&mut found_references.iter().map(|r| PageId(r.id)).collect()); - let _ = insert_note_page_references(&mut tx, new_note_id, &known_reference_ids).await?; + // And associate the note with all the pages. + known_reference_ids.append(&mut found_references.iter().map(|r| PageId(r.id)).collect()); + let _ = insert_note_to_page_relationships(&mut tx, new_note_id, &known_reference_ids).await?; - tx.commit().await?; - Ok(new_note.uuid) - } + tx.commit().await?; + Ok(new_note.uuid) + } - // TODO: Make sure the new position is sane. - /// Move a note from one location to another. - pub async fn move_note( - &self, - note_uuid: &str, - old_parent_uuid: &str, - new_parent_uuid: &str, - new_position: i64, - ) -> NoteResult<()> { - let sample = vec![note_uuid, old_parent_uuid, new_parent_uuid]; - let mut tx = self.0.begin().await?; + // This doesn't do anything with the references, as those are + // dependent entirely on the *content*, and not the *position*, of + // the note and the referenced page. + // + // TODO: Ensure the position is sane. + /// Move a note from one location to another. + pub async fn move_note( + &self, + note_uuid: &str, + old_parent_uuid: &str, + new_parent_uuid: &str, + new_position: i64, + ) -> NoteResult<()> { + let all_uuids = vec![note_uuid, old_parent_uuid, new_parent_uuid]; + let mut tx = self.0.begin().await?; - // This is one of the few cases where we we're getting IDs for - // notes, but the nature of the ID isn't known at this time. - // This has to be handled manually, in the next paragraph - // below. - let found_id_vec = bulk_select_ids_for_note_uuids(&mut tx, &sample).await?; - let found_ids: HashMap = found_id_vec.into_iter().collect(); - if found_ids.len() != 3 { - return Err(NoteStoreError::NotFound); - } + // This is one of the few cases where we we're getting IDs for + // notes, but the nature of the ID isn't known at this time. + // This has to be handled manually, in the next paragraph + // below. + let found_id_vec = bulk_select_ids_for_note_uuids(&mut tx, &all_uuids).await?; + let found_ids: HashMap = found_id_vec.into_iter().collect(); + if found_ids.len() != 3 { + return Err(NoteStoreError::NotFound); + } - let old_parent_id = ParentId(*found_ids.get(old_parent_uuid).unwrap()); - let new_parent_id = ParentId(*found_ids.get(new_parent_uuid).unwrap()); - let note_id = NoteId(*found_ids.get(note_uuid).unwrap()); + let old_parent_id = ParentId(*found_ids.get(old_parent_uuid).unwrap()); + let new_parent_id = ParentId(*found_ids.get(new_parent_uuid).unwrap()); + let note_id = NoteId(*found_ids.get(note_uuid).unwrap()); - let old_note_position = get_note_note_relationship(&mut tx, old_parent_id, note_id) - .await? - .position; + let old_note = get_note_to_note_relationship(&mut tx, old_parent_id, note_id).await?; + let old_note_position = old_note.position; + let old_note_nature = &old_note.nature; - let _ = delete_note_note_relationship(&mut tx, old_parent_id, note_id).await?; - let _ = close_hole_for_deleted_note(&mut tx, old_parent_id, old_note_position).await?; - let _ = make_room_for_new_note(&mut tx, new_parent_id, new_position).await?; - let _ = - insert_note_note_relationship(&mut tx, new_parent_id, note_id, new_position).await?; - tx.commit().await?; - Ok(()) - } + let _ = delete_note_to_note_relationship(&mut tx, old_parent_id, note_id).await?; + let _ = close_hole_for_deleted_note(&mut tx, old_parent_id, old_note_position).await?; + let parent_max_position = assert_max_child_position_for_note(&mut tx, new_parent_id).await?; + let new_position = if new_position > parent_max_position { + parent_max_position + 1 + } else { + new_position + }; + let _ = make_room_for_new_note(&mut tx, new_parent_id, new_position).await?; + let _ = + insert_note_to_note_relationship(&mut tx, new_parent_id, note_id, new_position, old_note_nature).await?; + tx.commit().await?; + Ok(()) + } - /// Embed or reference a note from a different location. - pub async fn reference_or_embed_note( - &self, - note_uuid: &str, - new_parent_uuid: &str, - new_position: i64, - new_nature: &str, - ) -> NoteResult<()> { - todo!() - } + /// Embed or reference a note from a different location. + pub async fn reference_or_embed_note( + &self, + note_uuid: &str, + new_parent_uuid: &str, + new_position: i64, + new_nature: &str, + ) -> NoteResult<()> { + let mut tx = self.0.begin().await?; + let existing_note_id: NoteId = NoteId(select_note_id_for_uuid(&mut tx, note_uuid).await?.0); + let new_parent_id: ParentId = select_note_id_for_uuid(&mut tx, new_parent_uuid).await?; + let _ = make_room_for_new_note(&mut tx, new_parent_id, new_position).await?; + let _ = insert_note_to_note_relationship(&mut tx, new_parent_id, existing_note_id, new_position, new_nature) + .await?; + tx.commit().await?; + Ok(()) + } - /// Delete a note - pub async fn delete_note(&self, note_uuid: &str, note_parent_uuid: &str) -> NoteResult<()> { - todo!() - } + /// Delete a note + pub async fn delete_note(&self, note_uuid: &str, note_parent_uuid: &str) -> NoteResult<()> { + let mut tx = self.0.begin().await?; + let condemned_note_id: NoteId = NoteId(select_note_id_for_uuid(&mut tx, note_uuid).await?.0); + let note_parent_id: ParentId = select_note_id_for_uuid(&mut tx, note_parent_uuid).await?; + let _ = delete_note_to_note_relationship(&mut tx, note_parent_id, condemned_note_id); + if count_existing_note_relationships(&mut tx, condemned_note_id).await? == 0 { + let _ = delete_note_to_page_relationships(&mut tx, condemned_note_id).await?; + let _ = delete_note(&mut tx, condemned_note_id).await?; + } + tx.commit().await?; + Ok(()) + } - /// Update a note's content - pub async fn update_note_content(&self, note_uuid: &str, content: &str) -> NoteResult<()> { - todo!() - } + /// Update a note's content + pub async fn update_note_content(&self, note_uuid: &str, content: &str) -> NoteResult<()> { + let references = build_references(&content); + + let mut tx = self.0.begin().await?; + + let note_id: NoteId = NoteId(select_note_id_for_uuid(&mut tx, note_uuid).await?.0); + let _ = update_note_content(&mut tx, note_id, &content).await?; + + let found_references = find_all_page_references_for(&mut tx, &references).await?; + let new_references = diff_references(&references, &found_references); + let mut known_reference_ids: Vec = Vec::new(); + + // Create the pages that don't exist + for one_reference in new_references.iter() { + let new_root_note = create_unique_root_note(); + let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?; + let new_page_slug = generate_slug(&mut tx, &one_reference).await?; + let new_page = create_new_page_for(&one_reference, &new_page_slug, new_root_note_id); + known_reference_ids.push(insert_one_new_page(&mut tx, &new_page).await?) + } + + // And associate the note with all the pages. + known_reference_ids.append(&mut found_references.iter().map(|r| PageId(r.id)).collect()); + let _ = insert_note_to_page_relationships(&mut tx, note_id, &known_reference_ids).await?; + + tx.commit().await?; + Ok(()) + } } // ___ _ _ // | _ \_ _(_)_ ____ _| |_ ___ @@ -219,158 +265,153 @@ impl NoteStore { async fn reset_database<'a, E>(executor: E) -> SqlResult<()> where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let initialize_sql = include_str!("sql/initialize_database.sql"); - sqlx::query(initialize_sql) - .execute(executor) - .await - .map(|_| ()) + let initialize_sql = include_str!("sql/initialize_database.sql"); + sqlx::query(initialize_sql).execute(executor).await.map(|_| ()) } async fn select_page_by_slug<'a, E>(executor: E, slug: &str) -> SqlResult where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let select_one_page_by_slug_sql = concat!( - "SELECT id, title, slug, note_id, creation_date, updated_date, ", - "lastview_date, deleted_date FROM pages WHERE slug=?;" - ); - Ok(sqlx::query_as(&select_one_page_by_slug_sql) - .bind(&slug) - .fetch_one(executor) - .await?) + let select_one_page_by_slug_sql = concat!( + "SELECT id, title, slug, note_id, creation_date, updated_date, ", + "lastview_date, deleted_date FROM pages WHERE slug=?;" + ); + Ok(sqlx::query_as(&select_one_page_by_slug_sql) + .bind(&slug) + .fetch_one(executor) + .await?) } async fn select_page_by_title<'a, E>(executor: E, title: &str) -> SqlResult where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let select_one_page_by_title_sql = concat!( - "SELECT id, title, slug, note_id, creation_date, updated_date, ", - "lastview_date, deleted_date FROM pages WHERE title=?;" - ); - Ok(sqlx::query_as(&select_one_page_by_title_sql) - .bind(&title) - .fetch_one(executor) - .await?) + let select_one_page_by_title_sql = concat!( + "SELECT id, title, slug, note_id, creation_date, updated_date, ", + "lastview_date, deleted_date FROM pages WHERE title=?;" + ); + Ok(sqlx::query_as(&select_one_page_by_title_sql) + .bind(&title) + .fetch_one(executor) + .await?) } async fn select_note_id_for_uuid<'a, E>(executor: E, uuid: &str) -> SqlResult where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let select_note_id_for_uuid_sql = "SELECT id FROM notes WHERE uuid = ?"; - let id: JustId = sqlx::query_as(&select_note_id_for_uuid_sql) - .bind(&uuid) - .fetch_one(executor) - .await?; - Ok(ParentId(id.id)) + let select_note_id_for_uuid_sql = "SELECT id FROM notes WHERE uuid = ?"; + let id: JustId = sqlx::query_as(&select_note_id_for_uuid_sql) + .bind(&uuid) + .fetch_one(executor) + .await?; + Ok(ParentId(id.id)) } -async fn make_room_for_new_note<'a, E>( - executor: E, - parent_id: ParentId, - position: i64, -) -> SqlResult<()> +async fn make_room_for_new_note<'a, E>(executor: E, parent_id: ParentId, position: i64) -> SqlResult<()> where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let make_room_for_new_note_sql = concat!( - "UPDATE note_relationships ", - "SET position = position + 1 ", - "WHERE position >= ? and parent_id = ?;" - ); + let make_room_for_new_note_sql = concat!( + "UPDATE note_relationships ", + "SET position = position + 1 ", + "WHERE position >= ? and parent_id = ?;" + ); - sqlx::query(make_room_for_new_note_sql) - .bind(&position) - .bind(&*parent_id) - .execute(executor) - .await - .map(|_| ()) + sqlx::query(make_room_for_new_note_sql) + .bind(&position) + .bind(&*parent_id) + .execute(executor) + .await + .map(|_| ()) } -async fn insert_note_note_relationship<'a, E>( - executor: E, - parent_id: ParentId, - note_id: NoteId, - position: i64, +async fn insert_note_to_note_relationship<'a, E>( + executor: E, + parent_id: ParentId, + note_id: NoteId, + position: i64, + nature: &str, ) -> SqlResult<()> where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let insert_note_note_relationship_sql = concat!( - "INSERT INTO note_relationships (parent_id, note_id, position, nature) ", - "values (?, ?, ?, ?)" - ); + let insert_note_to_note_relationship_sql = concat!( + "INSERT INTO note_relationships (parent_id, note_id, position, nature) ", + "values (?, ?, ?, ?)" + ); - sqlx::query(insert_note_note_relationship_sql) - .bind(&*parent_id) - .bind(&*note_id) - .bind(&position) - .bind("note") - .execute(executor) - .await - .map(|_| ()) + sqlx::query(insert_note_to_note_relationship_sql) + .bind(&*parent_id) + .bind(&*note_id) + .bind(&position) + .bind(&nature) + .execute(executor) + .await + .map(|_| ()) } async fn select_note_collection_from_root<'a, E>(executor: E, root: i64) -> SqlResult> where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let select_note_collection_from_root_sql = - include_str!("sql/select_note_collection_from_root.sql"); - Ok(sqlx::query_as(&select_note_collection_from_root_sql) - .bind(&root) - .fetch_all(executor) - .await?) + let select_note_collection_from_root_sql = include_str!("sql/select_note_collection_from_root.sql"); + Ok(sqlx::query_as(&select_note_collection_from_root_sql) + .bind(&root) + .fetch_all(executor) + .await?) } async fn insert_one_new_note<'a, E>(executor: E, note: &NewNote) -> SqlResult where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let insert_one_note_sql = concat!( - "INSERT INTO notes ( ", - " uuid, ", - " content, ", - " notetype, ", - " creation_date, ", - " updated_date, ", - " lastview_date) ", - "VALUES (?, ?, ?, ?, ?, ?);" - ); + let insert_one_note_sql = concat!( + "INSERT INTO notes ( ", + " uuid, ", + " content, ", + " notetype, ", + " creation_date, ", + " updated_date, ", + " lastview_date) ", + "VALUES (?, ?, ?, ?, ?, ?);" + ); - Ok(NoteId( - sqlx::query(insert_one_note_sql) - .bind(¬e.uuid) - .bind(¬e.content) - .bind(¬e.notetype) - .bind(¬e.creation_date) - .bind(¬e.updated_date) - .bind(¬e.lastview_date) - .execute(executor) - .await? - .last_insert_rowid(), - )) + Ok(NoteId( + sqlx::query(insert_one_note_sql) + .bind(¬e.uuid) + .bind(¬e.content) + .bind(¬e.notetype) + .bind(¬e.creation_date) + .bind(¬e.updated_date) + .bind(¬e.lastview_date) + .execute(executor) + .await? + .last_insert_rowid(), + )) } -fn find_maximal_slug(slugs: &Vec) -> Option { - lazy_static! { - static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap(); - } +// Given a possible slug, find the slug with the highest +// uniquification number, and return that number, if any. +fn find_maximal_slug(slugs: &[JustSlugs]) -> Option { + lazy_static! { + static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap(); + } - if slugs.len() == 0 { - return None; - } + if slugs.is_empty() { + return None; + } - let mut slug_counters: Vec = slugs - .iter() - .filter_map(|slug| RE_CAP_NUM.captures(&slug.slug)) - .map(|cap| cap.get(1).unwrap().as_str().parse::().unwrap()) - .collect(); - slug_counters.sort_unstable(); - slug_counters.pop() + let mut slug_counters: Vec = slugs + .iter() + .filter_map(|slug| RE_CAP_NUM.captures(&slug.slug)) + .map(|cap| cap.get(1).unwrap().as_str().parse::().unwrap()) + .collect(); + slug_counters.sort_unstable(); + slug_counters.pop() } // Given an initial string and an existing collection of slugs, @@ -378,221 +419,284 @@ fn find_maximal_slug(slugs: &Vec) -> Option { // collection. async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - lazy_static! { - static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap(); - } + lazy_static! { + static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap(); + } - let initial_slug = slugify(title); - let sample_slug = RE_STRIP_NUM.replace_all(&initial_slug, ""); - let slug_finder_sql = "SELECT slug FROM pages WHERE slug LIKE '?%';"; - let similar_slugs: Vec = sqlx::query_as(&slug_finder_sql) - .bind(&*sample_slug) - .fetch_all(executor) - .await?; - let maximal_slug = find_maximal_slug(&similar_slugs); - match maximal_slug { - None => Ok(initial_slug), - Some(max_slug) => Ok(format!("{}-{}", initial_slug, max_slug + 1)), - } + let initial_slug = slugify(title); + let sample_slug = RE_STRIP_NUM.replace_all(&initial_slug, ""); + let slug_finder_sql = "SELECT slug FROM pages WHERE slug LIKE '?%';"; + let similar_slugs: Vec = sqlx::query_as(&slug_finder_sql) + .bind(&*sample_slug) + .fetch_all(executor) + .await?; + let maximal_slug = find_maximal_slug(&similar_slugs); + match maximal_slug { + None => Ok(initial_slug), + Some(max_slug) => Ok(format!("{}-{}", initial_slug, max_slug + 1)), + } } async fn insert_one_new_page<'a, E>(executor: E, page: &NewPage) -> SqlResult where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let insert_one_page_sql = concat!( - "INSERT INTO pages ( ", - " slug, ", - " title, ", - " note_id, ", - " creation_date, ", - " updated_date, ", - " lastview_date) ", - "VALUES (?, ?, ?, ?, ?, ?);" - ); + let insert_one_page_sql = concat!( + "INSERT INTO pages ( ", + " slug, ", + " title, ", + " note_id, ", + " creation_date, ", + " updated_date, ", + " lastview_date) ", + "VALUES (?, ?, ?, ?, ?, ?);" + ); - Ok(PageId( - sqlx::query(insert_one_page_sql) - .bind(&page.slug) - .bind(&page.title) - .bind(&page.note_id) - .bind(&page.creation_date) - .bind(&page.updated_date) - .bind(&page.lastview_date) - .execute(executor) - .await? - .last_insert_rowid(), - )) + Ok(PageId( + sqlx::query(insert_one_page_sql) + .bind(&page.slug) + .bind(&page.title) + .bind(&page.note_id) + .bind(&page.creation_date) + .bind(&page.updated_date) + .bind(&page.lastview_date) + .execute(executor) + .await? + .last_insert_rowid(), + )) } -async fn bulk_select_ids_for_note_uuids<'a, E>( - executor: E, - ids: &Vec<&str>, -) -> SqlResult> +async fn insert_note_to_page_relationships<'a, E>( + executor: E, + note_id: NoteId, + references: &[PageId], +) -> SqlResult<()> where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let bulk_select_ids_for_note_uuids_sql = "SELECT uuid, id FROM notes WHERE uuid IN (" - .to_string() - + &["?"].repeat(ids.len()).join(",") - + &");".to_string(); + let insert_note_page_references_sql = "INSERT INTO page_relationships (note_id, page_id) VALUES ".to_string() + + &["(?, ?)"].repeat(references.len()).join(", ") + + &";".to_string(); - let mut request = sqlx::query(&bulk_select_ids_for_note_uuids_sql); - for id in ids.iter() { - request = request.bind(id); - } - Ok(request - .try_map(|row: SqliteRow| { - let l = row.try_get::(0)?; - let r = row.try_get::(1)?; - Ok((l, r)) - }) - .fetch_all(executor) - .await? - .into_iter() - .collect()) + let mut request = sqlx::query(&insert_note_page_references_sql); + for reference in references { + request = request.bind(*note_id).bind(**reference); + } + + request.execute(executor).await.map(|_| ()) } -async fn get_note_note_relationship<'a, E>( - executor: E, - parent_id: ParentId, - note_id: NoteId, +// For a given collection of uuids, retrieve the internal ID used by +// the database. +async fn bulk_select_ids_for_note_uuids<'a, E>(executor: E, ids: &[&str]) -> SqlResult> +where + E: Executor<'a, Database = Sqlite>, +{ + let bulk_select_ids_for_note_uuids_sql = "SELECT uuid, id FROM notes WHERE uuid IN (".to_string() + + &["?"].repeat(ids.len()).join(",") + + &");".to_string(); + + let mut request = sqlx::query(&bulk_select_ids_for_note_uuids_sql); + for id in ids.iter() { + request = request.bind(id); + } + Ok(request + .try_map(|row: SqliteRow| { + let l = row.try_get::(0)?; + let r = row.try_get::(1)?; + Ok((l, r)) + }) + .fetch_all(executor) + .await? + .into_iter() + .collect()) +} + +// Used by move_note to identify the single note to note relationship +// by the original parent and child pair. Used mostly to find the +// position for recalculation, to create a new gap or close an old +// one. +async fn get_note_to_note_relationship<'a, E>( + executor: E, + parent_id: ParentId, + note_id: NoteId, ) -> SqlResult where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let get_note_note_relationship_sql = concat!( - "SELECT parent_id, note_id, position, nature ", - "FROM note_relationships ", - "WHERE parent_id = ? and note_id = ? ", - "LIMIT 1" - ); - sqlx::query_as(get_note_note_relationship_sql) - .bind(&*parent_id) - .bind(&*note_id) - .fetch_one(executor) - .await + let get_note_to_note_relationship_sql = concat!( + "SELECT parent_id, note_id, position, nature ", + "FROM note_relationships ", + "WHERE parent_id = ? and note_id = ? ", + "LIMIT 1" + ); + sqlx::query_as(get_note_to_note_relationship_sql) + .bind(&*parent_id) + .bind(&*note_id) + .fetch_one(executor) + .await } -async fn delete_note_note_relationship<'a, E>( - executor: E, - parent_id: ParentId, - note_id: NoteId, -) -> SqlResult<()> +async fn delete_note_to_note_relationship<'a, E>(executor: E, parent_id: ParentId, note_id: NoteId) -> SqlResult<()> where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let delete_note_note_relationship_sql = concat!( - "DELETE FROM note_relationships ", - "WHERE parent_id = ? and note_id = ? " - ); + let delete_note_to_note_relationship_sql = concat!( + "DELETE FROM note_relationships ", + "WHERE parent_id = ? and note_id = ? " + ); - let count = sqlx::query(delete_note_note_relationship_sql) - .bind(&*parent_id) - .bind(&*note_id) - .execute(executor) - .await? - .rows_affected(); + let count = sqlx::query(delete_note_to_note_relationship_sql) + .bind(&*parent_id) + .bind(&*note_id) + .execute(executor) + .await? + .rows_affected(); - match count { - 1 => Ok(()), - _ => Err(sqlx::Error::RowNotFound), - } + match count { + 1 => Ok(()), + _ => Err(sqlx::Error::RowNotFound), + } } -async fn close_hole_for_deleted_note<'a, E>( - executor: E, - parent_id: ParentId, - position: i64, -) -> SqlResult<()> +async fn delete_note_to_page_relationships<'a, E>(executor: E, note_id: NoteId) -> SqlResult<()> where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let close_hole_for_deleted_note_sql = concat!( - "UPDATE note_relationships ", - "SET position = position - 1 ", - "WHERE position > ? and parent_id = ?;" - ); + let delete_note_to_page_relationships_sql = concat!("DELETE FROM page_relationships ", "WHERE note_id = ? "); - sqlx::query(close_hole_for_deleted_note_sql) - .bind(&position) - .bind(&*parent_id) - .execute(executor) - .await - .map(|_| ()) + let _ = sqlx::query(delete_note_to_page_relationships_sql) + .bind(&*note_id) + .execute(executor) + .await?; + Ok(()) } -async fn find_all_references_for<'a, E>( - executor: E, - references: &Vec, -) -> SqlResult> +async fn delete_note<'a, E>(executor: E, note_id: NoteId) -> SqlResult<()> where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let find_all_references_for_sql = "SELECT id, title FROM pages WHERE title IN (" - .to_string() + - &["?"].repeat(references.len()).join(",") + - &");".to_string(); + let delete_note_sql = concat!("DELETE FROM notes WHERE note_id = ?"); - let mut request = sqlx::query_as(&find_all_references_for_sql); - for id in references.iter() { - request = request.bind(id); - } - request - .fetch_all(executor) - .await + let count = sqlx::query(delete_note_sql) + .bind(&*note_id) + .execute(executor) + .await? + .rows_affected(); + + match count { + 1 => Ok(()), + _ => Err(sqlx::Error::RowNotFound), + } } -async fn insert_note_page_references<'a, E>( - executor: E, - note_id: NoteId, - references: &Vec, -) -> SqlResult<()> +async fn count_existing_note_relationships<'a, E>(executor: E, note_id: NoteId) -> SqlResult where - E: Executor<'a, Database = Sqlite>, + E: Executor<'a, Database = Sqlite>, { - let insert_note_page_references_sql = - "INSERT INTO note_page_references (note_id, page_id) VALUES ".to_string() + - &["(?, ?)"].repeat(references.len()).join(", ") + - &";".to_string(); + let count_existing_note_relationships_sql = "SELECT COUNT(*) as count FROM page_relationships WHERE note_id = ?"; - let mut request = sqlx::query(&insert_note_page_references_sql); - for reference in references { - request = request.bind(&*note_id).bind(**reference); - } + let count: RowCount = sqlx::query_as(count_existing_note_relationships_sql) + .bind(&*note_id) + .fetch_one(executor) + .await?; - request - .execute(executor) - .await - .map(|_| ()) + Ok(count.count) +} + +async fn assert_max_child_position_for_note<'a, E>(executor: E, note_id: ParentId) -> SqlResult +where + E: Executor<'a, Database = Sqlite>, +{ + let assert_max_child_position_for_note_sql = + "SELECT MAX(position) as count FROM note_relationships WHERE parent_id = ?"; + + let count: RowCount = sqlx::query_as(assert_max_child_position_for_note_sql) + .bind(&*note_id) + .fetch_one(executor) + .await?; + + Ok(count.count) +} + +// After removing a note, recalculate the position of all notes under +// the parent note, such that there order is now completely +// sequential. +async fn close_hole_for_deleted_note<'a, E>(executor: E, parent_id: ParentId, position: i64) -> SqlResult<()> +where + E: Executor<'a, Database = Sqlite>, +{ + let close_hole_for_deleted_note_sql = concat!( + "UPDATE note_relationships ", + "SET position = position - 1 ", + "WHERE position > ? and parent_id = ?;" + ); + + sqlx::query(close_hole_for_deleted_note_sql) + .bind(&position) + .bind(&*parent_id) + .execute(executor) + .await + .map(|_| ()) +} + +async fn find_all_page_references_for<'a, E>(executor: E, references: &[String]) -> SqlResult> +where + E: Executor<'a, Database = Sqlite>, +{ + let find_all_references_for_sql = "SELECT id, title FROM pages WHERE title IN (".to_string() + + &["?"].repeat(references.len()).join(",") + + &");".to_string(); + + let mut request = sqlx::query_as(&find_all_references_for_sql); + for id in references.iter() { + request = request.bind(id); + } + request.fetch_all(executor).await +} + +async fn update_note_content<'a, E>(executor: E, note_id: NoteId, content: &str) -> SqlResult<()> +where + E: Executor<'a, Database = Sqlite>, +{ + let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?"; + let count = sqlx::query(update_note_content_sql) + .bind(content) + .bind(&*note_id) + .execute(executor) + .await? + .rows_affected(); + + match count { + 1 => Ok(()), + _ => Err(sqlx::Error::RowNotFound), + } } fn create_unique_root_note() -> NewNote { - NewNoteBuilder::default() - .uuid(friendly_id::create()) - .content("".to_string()) - .notetype("root".to_string()) - .build() - .unwrap() + NewNoteBuilder::default() + .uuid(friendly_id::create()) + .content("".to_string()) + .notetype("root".to_string()) + .build() + .unwrap() } fn create_new_page_for(title: &str, slug: &str, note_id: NoteId) -> NewPage { - NewPageBuilder::default() - .slug(slug.to_string()) - .title(title.to_string()) - .note_id(*note_id) - .build() - .unwrap() + NewPageBuilder::default() + .slug(slug.to_string()) + .title(title.to_string()) + .note_id(*note_id) + .build() + .unwrap() } // Given the references supplied, and the references found in the datastore, // return a list of the references not found in the datastore. -fn diff_references(references: &Vec, found_references: &Vec) -> Vec { - let all: HashSet = references.iter().cloned().collect(); - let found: HashSet = found_references.iter().map(|r| r.title.clone()).collect(); - all.difference(&found).cloned().collect() +fn diff_references(references: &[String], found_references: &[PageTitles]) -> Vec { + let all: HashSet = references.iter().cloned().collect(); + let found: HashSet = found_references.iter().map(|r| r.title.clone()).collect(); + all.difference(&found).cloned().collect() } - - diff --git a/server/nm-store/src/structs.rs b/server/nm-store/src/structs.rs index 858618f..63b3df5 100644 --- a/server/nm-store/src/structs.rs +++ b/server/nm-store/src/structs.rs @@ -1,58 +1,116 @@ use chrono::{DateTime, Utc}; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; use sqlx::{self, FromRow}; -// // A Resource is either content or a URL to content that the -// // user embeds in a note. TODO: I have no idea how to do this yet, -// // but I'll figure it out. -// #[derive(Clone, Serialize, Deserialize, Debug)] -// pub struct Resource { -// pub id: String, -// pub content: String, -// } -// -// // A Breadcrumb is a component of a reference. Every element should -// // be clickable, although in practice what's going to happen is that -// // the user will be sent to the *page* with that note, then *scrolled* -// // to that note via anchor. -// #[derive(Clone, Debug)] -// pub struct Breadcrumb { -// pub note_id: String, -// pub summary: String, -// } -// -// // A Note is the heart of our system. It is a single object that has -// // a place in our system; it has a parent, but it also has embedded -// // references that allow it to navigate through a web of related -// // objects. It may have children. *AT THIS LAYER*, though, it is -// // returned as an array. It is up to the -// #[derive(Clone, Debug)] -// pub struct Note { -// pub id: String, -// pub parent_id: String, -// pub content: String, -// pub resources: Vec, -// pub note_type: String, // Describes the relationship to the parent note. -// pub created: DateTime, -// pub updated: DateTime, -// pub viewed: DateTime, -// pub deleted: Option>, -// } -// -// pub struct Reference { -// pub page_id: String, -// pub page_title: String, -// pub reference_summary_titles: Vec, -// pub reference_summary: String, -// } - -pub struct Page { +#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] +pub struct RawPage { + pub id: i64, pub slug: String, pub title: String, - // pub notes: Vec, // The actual notes on this page. - // pub references: Vec, // All other notes that reference this page. - // pub unlinked_references: Vec, - pub created: DateTime, - pub updated: DateTime, - pub viewed: DateTime, - pub deleted: Option>, + pub note_id: i64, + pub creation_date: DateTime, + pub updated_date: DateTime, + pub lastview_date: DateTime, + pub deleted_date: Option>, +} + +#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] +pub struct RawNote { + pub id: i64, + pub uuid: String, + pub parent_id: i64, + pub parent_uuid: String, + pub content: String, + pub position: i64, + pub notetype: String, + pub creation_date: DateTime, + pub updated_date: DateTime, + pub lastview_date: DateTime, + pub deleted_date: Option>, +} + +#[derive(Clone, Serialize, Deserialize, Debug, Builder)] +pub struct NewPage { + pub slug: String, + pub title: String, + pub note_id: i64, + #[builder(default = r#"chrono::Utc::now()"#)] + pub creation_date: DateTime, + #[builder(default = r#"chrono::Utc::now()"#)] + pub updated_date: DateTime, + #[builder(default = r#"chrono::Utc::now()"#)] + pub lastview_date: DateTime, + #[builder(default = r#"None"#)] + pub deleted_date: Option>, +} + +#[derive(Clone, Serialize, Deserialize, Debug, Builder)] +pub struct NewNote { + #[builder(default = r#""".to_string()"#)] + pub uuid: String, + pub content: String, + #[builder(default = r#""note".to_string()"#)] + pub notetype: String, + #[builder(default = r#"chrono::Utc::now()"#)] + pub creation_date: DateTime, + #[builder(default = r#"chrono::Utc::now()"#)] + pub updated_date: DateTime, + #[builder(default = r#"chrono::Utc::now()"#)] + pub lastview_date: DateTime, + #[builder(default = r#"None"#)] + pub deleted_date: Option>, +} + +#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] +pub(crate) struct JustSlugs { + pub slug: String, +} + +#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] +pub(crate) struct JustTitles { + title: String, +} + +#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] +pub(crate) struct JustId { + pub id: i64, +} + +#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] +pub(crate) struct PageTitles { + pub id: i64, + pub title: String, +} + +#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] +pub(crate) struct NoteRelationship { + pub parent_id: i64, + pub note_id: i64, + pub position: i64, + pub nature: String, +} + +#[derive(Clone, Serialize, Deserialize, Debug, FromRow)] +pub(crate) struct RowCount { + pub count: i64, +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn can_build_new_note() { + let now = chrono::Utc::now(); + let newnote = NewNoteBuilder::default() + .uuid("foo".to_string()) + .content("bar".to_string()) + .build() + .unwrap(); + assert!((newnote.creation_date - now).num_minutes() < 1); + assert!((newnote.updated_date - now).num_minutes() < 1); + assert!((newnote.lastview_date - now).num_minutes() < 1); + assert!(newnote.deleted_date.is_none()); + } }