Compare commits
No commits in common. "canon" and "reboot-20201004" have entirely different histories.
canon
...
reboot-202
|
@ -1,6 +0,0 @@
|
||||||
/target
|
|
||||||
Cargo.lock
|
|
||||||
*#
|
|
||||||
.#*
|
|
||||||
*~
|
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
[workspace]
|
|
||||||
members = ["server/*"]
|
|
|
@ -1,11 +0,0 @@
|
||||||
[[source]]
|
|
||||||
url = "https://pypi.python.org/simple"
|
|
||||||
verify_ssl = true
|
|
||||||
name = "pypi"
|
|
||||||
|
|
||||||
[packages]
|
|
||||||
|
|
||||||
[dev-packages]
|
|
||||||
|
|
||||||
[requires]
|
|
||||||
python_version = "2.7"
|
|
|
@ -1 +0,0 @@
|
||||||
cognitive-complexity-threshold = 9
|
|
|
@ -1,5 +0,0 @@
|
||||||
[ ] Add RelationshipKind to Notes passed out
|
|
||||||
[ ] Add KastenKind to Backreferences passed out
|
|
||||||
[ ] Provide the array of note references (the 'cycle' manager) to make
|
|
||||||
mapping from Vec->Tree easier.
|
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
Content analysis
|
|
|
@ -1,10 +0,0 @@
|
||||||
The thing of it is, we have two kinds of notes:
|
|
||||||
|
|
||||||
1. This layer of the system will handle broken/missing position issues.
|
|
||||||
2. The client layer of the system will ensure that a parent is provided.
|
|
||||||
3. The notes retrieved via the CTE have information and parenting and
|
|
||||||
location.
|
|
||||||
4. Notes put *into* the system have parent and location provided
|
|
||||||
separately.
|
|
||||||
5. Clients do not specify the ids of notes put into the system.
|
|
||||||
6. Retrieval by slug must test for is-a-box.
|
|
|
@ -1,101 +0,0 @@
|
||||||
DROP TABLE IF EXISTS notes;
|
|
||||||
DROP TABLE IF EXISTS note_relationships;
|
|
||||||
DROP TABLE IF EXISTS pages;
|
|
||||||
DROP TABLE IF EXISTS page_relationships;
|
|
||||||
DROP TABLE IF EXISTS favorites;
|
|
||||||
|
|
||||||
CREATE TABLE notes (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
uuid TEXT NOT NULL UNIQUE,
|
|
||||||
notetype TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE pages (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
slug text NOT NULL UNIQUE,
|
|
||||||
note_id INTEGER,
|
|
||||||
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE note_relationships (
|
|
||||||
note_id INTEGER NOT NULL,
|
|
||||||
parent_id INTEGER NOT NULL,
|
|
||||||
position INTEGER NOT NULL,
|
|
||||||
nature TEXT NOT NULL,
|
|
||||||
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION,
|
|
||||||
FOREIGN KEY (parent_id) REFERENCES notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE page_relationships (
|
|
||||||
note_id INTEGER NOT NULL,
|
|
||||||
page_id INTEGER NOT NULL,
|
|
||||||
FOREIGN KEY (note_id) references notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION,
|
|
||||||
FOREIGN KEY (page_id) references pages (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
|
||||||
);
|
|
||||||
|
|
||||||
INSERT INTO notes (id, uuid, notetype) VALUES
|
|
||||||
(1, "U1", "root"),
|
|
||||||
(2, "U2", "note"),
|
|
||||||
(3, "U3", "note"),
|
|
||||||
(4, "U4", "note"),
|
|
||||||
(5, "U5", "note"),
|
|
||||||
(6, "U6", "note"),
|
|
||||||
(7, "U7", "note"),
|
|
||||||
(8, "U8", "note");
|
|
||||||
|
|
||||||
INSERT INTO note_relationships (note_id, parent_id, position, nature) VALUES
|
|
||||||
(2, 1, 1, "note"),
|
|
||||||
(3, 1, 2, "note"),
|
|
||||||
(4, 3, 1, "note"),
|
|
||||||
(5, 3, 2, "note"),
|
|
||||||
(6, 5, 1, "note"),
|
|
||||||
(7, 4, 1, "note"),
|
|
||||||
(8, 7, 1, "note");
|
|
||||||
|
|
||||||
|
|
||||||
SELECT id, uuid, parent_id, parent_uuid, notetype
|
|
||||||
FROM (
|
|
||||||
WITH RECURSIVE parents (
|
|
||||||
id, uuid, parent_id, parent_uuid, notetype, cycle
|
|
||||||
)
|
|
||||||
|
|
||||||
AS (
|
|
||||||
SELECT notes.id, notes.uuid,
|
|
||||||
note_parents.id,
|
|
||||||
note_parents.uuid,
|
|
||||||
notes.notetype,
|
|
||||||
','||notes.id||','
|
|
||||||
FROM notes
|
|
||||||
INNER JOIN note_relationships
|
|
||||||
ON notes.id = note_relationships.note_id
|
|
||||||
AND notes.notetype = 'note'
|
|
||||||
INNER JOIN notes as note_parents
|
|
||||||
ON note_parents.id = note_relationships.parent_id
|
|
||||||
WHERE notes.id = 4
|
|
||||||
|
|
||||||
UNION
|
|
||||||
SELECT DISTINCT notes.id,
|
|
||||||
notes.uuid,
|
|
||||||
next_parent.id,
|
|
||||||
next_parent.uuid,
|
|
||||||
notes.notetype,
|
|
||||||
parents.cycle||notes.id||','
|
|
||||||
FROM notes
|
|
||||||
INNER JOIN parents
|
|
||||||
ON parents.parent_id = notes.id
|
|
||||||
LEFT JOIN note_relationships
|
|
||||||
ON note_relationships.note_id = notes.id
|
|
||||||
LEFT JOIN notes as next_parent
|
|
||||||
ON next_parent.id = note_relationships.parent_id
|
|
||||||
WHERE parents.cycle NOT LIKE '%,'||notes.id||',%'
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT * FROM parents);
|
|
||||||
|
|
||||||
-- Possible outcomes epending on the 'WHERE notes.id' clause in the root SELECT (first value is passed in):
|
|
||||||
-- 8, 7, 4, 3, 1
|
|
||||||
-- 6, 5, 3, 1
|
|
||||||
-- 2, 1
|
|
||||||
-- 4, 3, 1
|
|
||||||
-- 1 should result in no return (root notes aren't returned by themselves; they exist only so that
|
|
||||||
-- valid note reversals can find their parent page objects.
|
|
|
@ -1,88 +0,0 @@
|
||||||
DROP TABLE IF EXISTS notes;
|
|
||||||
DROP TABLE IF EXISTS note_relationships;
|
|
||||||
DROP TABLE IF EXISTS pages;
|
|
||||||
DROP TABLE IF EXISTS page_relationships;
|
|
||||||
DROP TABLE IF EXISTS favorites;
|
|
||||||
|
|
||||||
CREATE TABLE notes (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
uuid TEXT NOT NULL UNIQUE,
|
|
||||||
notetype TEXT
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE pages (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
slug text NOT NULL UNIQUE,
|
|
||||||
note_id INTEGER,
|
|
||||||
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE note_relationships (
|
|
||||||
note_id INTEGER NOT NULL,
|
|
||||||
parent_id INTEGER NOT NULL,
|
|
||||||
position INTEGER NOT NULL,
|
|
||||||
nature TEXT NOT NULL,
|
|
||||||
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION,
|
|
||||||
FOREIGN KEY (parent_id) REFERENCES notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE page_relationships (
|
|
||||||
note_id INTEGER NOT NULL,
|
|
||||||
page_id INTEGER NOT NULL,
|
|
||||||
FOREIGN KEY (note_id) references notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION,
|
|
||||||
FOREIGN KEY (page_id) references pages (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
|
||||||
);
|
|
||||||
|
|
||||||
INSERT INTO notes (id, uuid, notetype) VALUES
|
|
||||||
(1, "U1", "root"),
|
|
||||||
(2, "U2", "note"),
|
|
||||||
(3, "U3", "note"),
|
|
||||||
(4, "U4", "note"),
|
|
||||||
(5, "U5", "note"),
|
|
||||||
(6, "U6", "note"),
|
|
||||||
(7, "U7", "note"),
|
|
||||||
(8, "U8", "note");
|
|
||||||
|
|
||||||
INSERT INTO note_relationships (note_id, parent_id, position, nature) VALUES
|
|
||||||
(2, 1, 1, "note"),
|
|
||||||
(3, 1, 2, "note"),
|
|
||||||
(4, 3, 1, "note"),
|
|
||||||
(5, 3, 2, "note"),
|
|
||||||
(6, 5, 1, "note"),
|
|
||||||
(7, 4, 1, "note"),
|
|
||||||
(8, 7, 1, "note");
|
|
||||||
|
|
||||||
SELECT id, uuid, parent_id, parent_uuid, position, notetype
|
|
||||||
FROM (
|
|
||||||
WITH RECURSIVE notetree (
|
|
||||||
id, uuid, parent_id, parent_uuid, position, notetype, cycle
|
|
||||||
)
|
|
||||||
|
|
||||||
AS (
|
|
||||||
SELECT notes.id, notes.uuid,
|
|
||||||
notes.id AS parent_id,
|
|
||||||
notes.uuid AS parent_uuid,
|
|
||||||
0, notes.notetype, ','||notes.id||','
|
|
||||||
FROM notes
|
|
||||||
WHERE notes.id = 1 AND notes.notetype = "root"
|
|
||||||
|
|
||||||
UNION
|
|
||||||
SELECT notes.id, notes.uuid,
|
|
||||||
notetree.id AS parent_id,
|
|
||||||
notetree.uuid AS parent_uuid,
|
|
||||||
note_relationships.position,
|
|
||||||
notes.notetype,
|
|
||||||
notetree.cycle||notes.id||','
|
|
||||||
FROM notes
|
|
||||||
INNER JOIN note_relationships
|
|
||||||
ON notes.id = note_relationships.note_id
|
|
||||||
INNER JOIN notetree
|
|
||||||
ON note_relationships.parent_id = notetree.id
|
|
||||||
WHERE notetree.cycle NOT LIKE '%,'||notes.id||',%'
|
|
||||||
ORDER BY note_relationships.position
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT * from notetree);
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -8,9 +8,6 @@ pub enum NoteStoreError {
|
||||||
#[error("Invalid Note Structure")]
|
#[error("Invalid Note Structure")]
|
||||||
InvalidNoteStructure(String),
|
InvalidNoteStructure(String),
|
||||||
|
|
||||||
/// The requested kasten or note was not found. As much as
|
|
||||||
/// possible, this should be preferred to a
|
|
||||||
/// sqlx::Error::RowNotFound.
|
|
||||||
#[error("Not found")]
|
#[error("Not found")]
|
||||||
NotFound,
|
NotFound,
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,10 @@
|
||||||
mod errors;
|
mod errors;
|
||||||
mod parser;
|
mod reference_parser;
|
||||||
mod store;
|
mod store;
|
||||||
mod structs;
|
mod structs;
|
||||||
|
|
||||||
pub use crate::errors::NoteStoreError;
|
pub use crate::errors::NoteStoreError;
|
||||||
pub use crate::store::NoteStore;
|
pub use crate::store::NoteStore;
|
||||||
pub use crate::structs::{Note, NoteKind, NoteRelationship, PageRelationship};
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
|
@ -22,14 +21,14 @@ mod tests {
|
||||||
storagepool
|
storagepool
|
||||||
}
|
}
|
||||||
|
|
||||||
// Request for the page by slug. If the page exists, return it.
|
// Request for the page by slug.
|
||||||
// If the page doesn't, return NotFound
|
// If the page exists, return it. If the page doesn't, return NotFound
|
||||||
//
|
|
||||||
#[tokio::test(threaded_scheduler)]
|
#[tokio::test(threaded_scheduler)]
|
||||||
async fn fetching_unfound_page_by_slug_works() {
|
async fn fetching_unfound_page_by_slug_works() {
|
||||||
let storagepool = fresh_inmemory_database().await;
|
let storagepool = fresh_inmemory_database().await;
|
||||||
let foundpage = storagepool.get_page_by_slug("nonexistent-page").await;
|
let unfoundpage = storagepool.get_page_by_slug("nonexistent-page").await;
|
||||||
assert!(foundpage.is_err());
|
assert!(unfoundpage.is_err());
|
||||||
}
|
}
|
||||||
|
|
||||||
// Request for the page by title. If the page exists, return it.
|
// Request for the page by title. If the page exists, return it.
|
||||||
|
@ -44,22 +43,23 @@ mod tests {
|
||||||
let newpageresult = storagepool.get_page_by_title(&title).await;
|
let newpageresult = storagepool.get_page_by_title(&title).await;
|
||||||
|
|
||||||
assert!(newpageresult.is_ok(), "{:?}", newpageresult);
|
assert!(newpageresult.is_ok(), "{:?}", newpageresult);
|
||||||
let (newpages, _) = newpageresult.unwrap();
|
let (newpage, newnotes) = newpageresult.unwrap();
|
||||||
|
|
||||||
assert_eq!(newpages.len(), 1);
|
assert_eq!(newpage.title, title, "{:?}", newpage.title);
|
||||||
let newpage = newpages.iter().next().unwrap();
|
assert_eq!(newpage.slug, "nonexistent-page");
|
||||||
|
|
||||||
|
assert_eq!(newnotes.len(), 1);
|
||||||
|
assert_eq!(newnotes[0].notetype, "root");
|
||||||
|
assert_eq!(newpage.note_id, newnotes[0].id);
|
||||||
|
|
||||||
assert_eq!(newpage.content, title, "{:?}", newpage.content);
|
|
||||||
assert_eq!(newpage.id, "nonexistent-page");
|
|
||||||
assert_eq!(newpage.kind, NoteKind::Page);
|
|
||||||
assert!((newpage.creation_date - now).num_minutes() < 1);
|
assert!((newpage.creation_date - now).num_minutes() < 1);
|
||||||
assert!((newpage.updated_date - now).num_minutes() < 1);
|
assert!((newpage.updated_date - now).num_minutes() < 1);
|
||||||
assert!((newpage.lastview_date - now).num_minutes() < 1);
|
assert!((newpage.lastview_date - now).num_minutes() < 1);
|
||||||
assert!(newpage.deleted_date.is_none());
|
assert!(newpage.deleted_date.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_new_note(content: &str) -> structs::NewNote {
|
fn make_new_note(content: &str) -> row_structs::NewNote {
|
||||||
structs::NewNoteBuilder::default()
|
row_structs::NewNoteBuilder::default()
|
||||||
.content(content.to_string())
|
.content(content.to_string())
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -70,46 +70,38 @@ mod tests {
|
||||||
let title = "Nonexistent Page";
|
let title = "Nonexistent Page";
|
||||||
let storagepool = fresh_inmemory_database().await;
|
let storagepool = fresh_inmemory_database().await;
|
||||||
let newpageresult = storagepool.get_page_by_title(&title).await;
|
let newpageresult = storagepool.get_page_by_title(&title).await;
|
||||||
|
let (_newpage, newnotes) = newpageresult.unwrap();
|
||||||
|
|
||||||
assert!(newpageresult.is_ok(), "{:?}", newpageresult);
|
let root = &newnotes[0];
|
||||||
let (newpages, _) = newpageresult.unwrap();
|
|
||||||
assert_eq!(newpages.len(), 1);
|
|
||||||
let root = &newpages[0];
|
|
||||||
|
|
||||||
// root <- 1 <- 3
|
|
||||||
// <- 2 <- 4
|
|
||||||
|
|
||||||
let note1 = make_new_note("1");
|
let note1 = make_new_note("1");
|
||||||
let note1_id = storagepool.add_note(¬e1, &root.id, Some(0)).await;
|
let note1_uuid = storagepool.insert_nested_note(¬e1, &root.uuid, 0).await;
|
||||||
assert!(note1_id.is_ok(), "{:?}", note1_id);
|
assert!(note1_uuid.is_ok(), "{:?}", note1_uuid);
|
||||||
let note1_id = note1_id.unwrap();
|
let note1_uuid = note1_uuid.unwrap();
|
||||||
|
|
||||||
let note2 = make_new_note("2");
|
let note2 = make_new_note("2");
|
||||||
let note2_id = storagepool.add_note(¬e2, &root.id, Some(0)).await;
|
let note2_uuid = storagepool.insert_nested_note(¬e2, &root.uuid, 0).await;
|
||||||
assert!(note2_id.is_ok(), "{:?}", note2_id);
|
assert!(note2_uuid.is_ok(), "{:?}", note2_uuid);
|
||||||
let note2_id = note2_id.unwrap();
|
let note2_uuid = note2_uuid.unwrap();
|
||||||
|
|
||||||
let note3 = make_new_note("3");
|
let note3 = make_new_note("3");
|
||||||
let note3_id = storagepool.add_note(¬e3, ¬e1_id, Some(0)).await;
|
let note3_uuid = storagepool.insert_nested_note(¬e3, ¬e1_uuid, 0).await;
|
||||||
assert!(note3_id.is_ok(), "{:?}", note3_id);
|
assert!(note3_uuid.is_ok(), "{:?}", note3_uuid);
|
||||||
let _note3_id = note3_id.unwrap();
|
let note3_uuid = note3_uuid.unwrap();
|
||||||
|
|
||||||
let note4 = make_new_note("4");
|
let note4 = make_new_note("4");
|
||||||
let note4_id = storagepool.add_note(¬e4, ¬e2_id, Some(0)).await;
|
let note4_uuid = storagepool.insert_nested_note(¬e4, ¬e2_uuid, 0).await;
|
||||||
assert!(note4_id.is_ok(), "{:?}", note4_id);
|
assert!(note4_uuid.is_ok(), "{:?}", note4_uuid);
|
||||||
let _note4_id = note4_id.unwrap();
|
let note4_uuid = note4_uuid.unwrap();
|
||||||
|
|
||||||
let newpageresult = storagepool.get_page_by_title(&title).await;
|
let newpageresult = storagepool.get_page_by_title(&title).await;
|
||||||
assert!(newpageresult.is_ok(), "{:?}", newpageresult);
|
let (newpage, newnotes) = newpageresult.unwrap();
|
||||||
let (newpages, _) = newpageresult.unwrap();
|
|
||||||
|
|
||||||
assert_eq!(newpages.len(), 5);
|
assert_eq!(newpage.title, title, "{:?}", newpage.title);
|
||||||
let newroot = newpages.iter().next().unwrap();
|
assert_eq!(newpage.slug, "nonexistent-page");
|
||||||
|
|
||||||
assert_eq!(newroot.content, title, "{:?}", newroot.content);
|
assert_eq!(newnotes.len(), 5);
|
||||||
assert_eq!(newroot.id, "nonexistent-page");
|
assert_eq!(newnotes[0].notetype, "root");
|
||||||
|
assert_eq!(newpage.note_id, newnotes[0].id);
|
||||||
assert_eq!(newpages[1].parent_id, Some(newroot.id.clone()));
|
|
||||||
assert_eq!(newpages[2].parent_id, Some(newpages[1].id.clone()));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,59 +0,0 @@
|
||||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
//! # Storage layer for Notesmachine
|
|
||||||
//!
|
|
||||||
//! This library implements the core functionality of Notesmachine and
|
|
||||||
//! describes that functionality to a storage layer. There's a bit of
|
|
||||||
//! intermingling in here which can't be helped, although it may make
|
|
||||||
//! sense in the future to separate the decomposition of the note
|
|
||||||
//! content into a higher layer.
|
|
||||||
//!
|
|
||||||
//! Notesmachine storage notes consist of two items: Note and Kasten.
|
|
||||||
//! This distinction is somewhat arbitrary, as structurally these two
|
|
||||||
//! items are stored in the same table.
|
|
||||||
//!
|
|
||||||
//! - Boxes have titles (and date metadata)
|
|
||||||
//! - Notes have content and a type (and date metadata)
|
|
||||||
//! - Notes are stored in boxes
|
|
||||||
//! - Notes are positioned with respect to other notes.
|
|
||||||
//! - There are two positions:
|
|
||||||
//! - Siblings, creating lists
|
|
||||||
//! - Children, creating trees like this one
|
|
||||||
//! - Notes may have references (pointers) to other boxes
|
|
||||||
//! - Notes may be moved around
|
|
||||||
//! - Notes may be deleted
|
|
||||||
//! - Boxes may be deleted
|
|
||||||
//! - When a box is renamed, every reference to that box is auto-edited to
|
|
||||||
//! reflect the change. If a box is renamed to match an existing box, the
|
|
||||||
//! notes in both boxes are merged.
|
|
||||||
//!
|
|
||||||
//! Note-to-note relationships form trees, and are kept in a SQL database of
|
|
||||||
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
|
|
||||||
//! `position` is a monotonic index on the parent (that is, every pair
|
|
||||||
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
|
|
||||||
//! an enum and can specify that the relationship is *original*,
|
|
||||||
//! *embedding*, or *referencing*. An embedded or referenced note may be
|
|
||||||
//! read/write or read-only with respect to the original, but there is only
|
|
||||||
//! one original note at any time.
|
|
||||||
//!
|
|
||||||
//! Note-to-box relationships form a graph, and are kept in the SQL database
|
|
||||||
//! as a collection of *edges* from the note to the box (and naturally
|
|
||||||
//! vice-versa).
|
|
||||||
//!
|
|
||||||
//! - Decision: When an original note is deleted, do all references and
|
|
||||||
//! embeddings also get deleted, or is the oldest one elevated to be a new
|
|
||||||
//! "original"? Or is that something the user may choose?
|
|
||||||
//!
|
|
||||||
//! - Decision: Should the merging issue be handled at this layer, or would
|
|
||||||
//! it make sense to move this to a higher layer, and only provide the
|
|
||||||
//! hooks for it here?
|
|
||||||
//!
|
|
||||||
|
|
||||||
mod references;
|
|
||||||
use references::{build_page_titles, find_links};
|
|
||||||
|
|
||||||
pub(crate) fn build_references(content: &str) -> Vec<String> {
|
|
||||||
build_page_titles(&find_links(content))
|
|
||||||
}
|
|
|
@ -4,7 +4,7 @@ use lazy_static::lazy_static;
|
||||||
use regex::bytes::Regex as BytesRegex;
|
use regex::bytes::Regex as BytesRegex;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
struct Finder(pub Vec<String>);
|
pub struct Finder(pub Vec<String>);
|
||||||
|
|
||||||
impl Finder {
|
impl Finder {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
|
@ -24,7 +24,9 @@ impl Finder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn find_links(document: &str) -> Vec<String> {
|
/// Given a content block, return a list of all the page references found
|
||||||
|
/// within the block. The references may need further massaging.
|
||||||
|
pub(crate) fn find_links(document: &str) -> Vec<String> {
|
||||||
let arena = Arena::new();
|
let arena = Arena::new();
|
||||||
let mut finder = Finder::new();
|
let mut finder = Finder::new();
|
||||||
let root = parse_document(&arena, document, &ComrakOptions::default());
|
let root = parse_document(&arena, document, &ComrakOptions::default());
|
||||||
|
@ -38,9 +40,7 @@ pub(super) fn find_links(document: &str) -> Vec<String> {
|
||||||
NodeValue::Text(ref text) => Some(
|
NodeValue::Text(ref text) => Some(
|
||||||
RE_REFERENCES
|
RE_REFERENCES
|
||||||
.captures_iter(text)
|
.captures_iter(text)
|
||||||
.filter_map(|t| t.get(1))
|
.map(|t| String::from_utf8_lossy(&t.get(1).unwrap().as_bytes()).to_string())
|
||||||
.map(|t| String::from_utf8_lossy(t.as_bytes()).to_string())
|
|
||||||
.filter(|s| !s.is_empty())
|
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -50,60 +50,41 @@ pub(super) fn find_links(document: &str) -> Vec<String> {
|
||||||
finder.0
|
finder.0
|
||||||
}
|
}
|
||||||
|
|
||||||
// This function is for the camel and snake case handers.
|
|
||||||
fn recase(title: &str) -> String {
|
fn recase(title: &str) -> String {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
// Take every word that has a pattern of a capital letter
|
|
||||||
// followed by a lower case, and put a space between the
|
|
||||||
// capital and anything that preceeds it.
|
|
||||||
|
|
||||||
// TODO: Make Unicode aware.
|
|
||||||
static ref RE_PASS1: Regex = Regex::new(r"(?P<s>.)(?P<n>[A-Z][a-z]+)").unwrap();
|
static ref RE_PASS1: Regex = Regex::new(r"(?P<s>.)(?P<n>[A-Z][a-z]+)").unwrap();
|
||||||
|
|
||||||
// Take every instance of a lower case letter or number,
|
|
||||||
// followed by a capital letter, and put a space between them.
|
|
||||||
|
|
||||||
// TODO: Make Unicode aware. [[:lower:]] is an ASCII-ism.
|
|
||||||
static ref RE_PASS2: Regex = Regex::new(r"(?P<s>[[:lower:]]|\d)(?P<n>[[:upper:]])").unwrap();
|
static ref RE_PASS2: Regex = Regex::new(r"(?P<s>[[:lower:]]|\d)(?P<n>[[:upper:]])").unwrap();
|
||||||
|
static ref RE_PASS4: Regex = Regex::new(r"(?P<s>[a-z])(?P<n>\d)").unwrap();
|
||||||
// Take every instance of a word suffixed by a number and put
|
|
||||||
// a space between them.
|
|
||||||
|
|
||||||
// TODO: Make Unicode aware. [[:lower:]] is an ASCII-ism.
|
|
||||||
static ref RE_PASS4: Regex = Regex::new(r"(?P<s>[[:lower:]])(?P<n>\d)").unwrap();
|
|
||||||
|
|
||||||
// Take every instance of the one-or-more-of the symbols listed, and
|
|
||||||
// replace them with a space. This function is Unicode-irrelevant,
|
|
||||||
// although there is a list of symbols in the backreference parser
|
|
||||||
// that may disagree.
|
|
||||||
|
|
||||||
// TODO: Examime backreference parser and determine if this is
|
|
||||||
// sufficient.
|
|
||||||
static ref RE_PASS3: Regex = Regex::new(r"(:|_|-| )+").unwrap();
|
static ref RE_PASS3: Regex = Regex::new(r"(:|_|-| )+").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
// This should panic if misused, so... :-)
|
// This should panic if misused, so... :-)
|
||||||
let pass = title.to_string();
|
let pass = title.to_string();
|
||||||
let pass = pass.strip_prefix("#").unwrap();
|
let pass = pass.strip_prefix("#").unwrap();
|
||||||
|
|
||||||
let pass = RE_PASS1.replace_all(&pass, "$s $n");
|
let pass = RE_PASS1.replace_all(&pass, "$s $n");
|
||||||
let pass = RE_PASS4.replace_all(&pass, "$s $n");
|
let pass = RE_PASS4.replace_all(&pass, "$s $n");
|
||||||
let pass = RE_PASS2.replace_all(&pass, "$s $n");
|
let pass = RE_PASS2.replace_all(&pass, "$s $n");
|
||||||
RE_PASS3.replace_all(&pass, " ").trim().to_string()
|
RE_PASS3.replace_all(&pass, " ").trim().to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn build_page_titles(references: &[String]) -> Vec<String> {
|
fn build_page_titles(references: &[String]) -> Vec<String> {
|
||||||
references
|
references
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|s| match s.chars().next() {
|
.map(|s| match s.chars().next() {
|
||||||
Some('#') => Some(recase(s)),
|
Some('#') => recase(s),
|
||||||
Some('[') => Some(s.strip_prefix("[[").unwrap().strip_suffix("]]").unwrap().to_string()),
|
Some('[') => s.strip_prefix("[[").unwrap().strip_suffix("]]").unwrap().to_string(),
|
||||||
Some(_) => Some(s.clone()),
|
Some(_) => s.clone(),
|
||||||
_ => None,
|
_ => "".to_string(),
|
||||||
})
|
})
|
||||||
.filter(|s| !s.is_empty())
|
.filter(|s| s.is_empty())
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn build_references(content: &str) -> Vec<String> {
|
||||||
|
build_page_titles(&find_links(content))
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
|
@ -7,15 +7,34 @@ DROP TABLE IF EXISTS favorites;
|
||||||
CREATE TABLE notes (
|
CREATE TABLE notes (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
uuid TEXT NOT NULL UNIQUE,
|
uuid TEXT NOT NULL UNIQUE,
|
||||||
|
content TEXT NULL,
|
||||||
notetype TEXT,
|
notetype TEXT,
|
||||||
)
|
creation_date DATETIME NOT NULL,
|
||||||
|
updated_date DATETIME NOT NULL,
|
||||||
|
lastview_date DATETIME NOT NULL,
|
||||||
|
deleted_date DATETIME NULL
|
||||||
|
);
|
||||||
|
|
||||||
|
CREATE INDEX notes_uuids ON notes (uuid);
|
||||||
|
|
||||||
CREATE TABLE pages (
|
CREATE TABLE pages (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
title text NOT NULL UNIQUE,
|
||||||
slug text NOT NULL UNIQUE,
|
slug text NOT NULL UNIQUE,
|
||||||
note_id INTEGER,
|
note_id INTEGER,
|
||||||
|
creation_date DATETIME NOT NULL,
|
||||||
|
updated_date DATETIME NOT NULL,
|
||||||
|
lastview_date DATETIME NOT NULL,
|
||||||
|
deleted_date DATETIME NULL,
|
||||||
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
||||||
)
|
);
|
||||||
|
|
||||||
|
CREATE INDEX pages_slugs ON pages (slug);
|
||||||
|
|
||||||
|
CREATE TABLE favorites (
|
||||||
|
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||||
|
position INTEGER NOT NULL
|
||||||
|
);
|
||||||
|
|
||||||
CREATE TABLE note_relationships (
|
CREATE TABLE note_relationships (
|
||||||
note_id INTEGER NOT NULL,
|
note_id INTEGER NOT NULL,
|
||||||
|
@ -32,5 +51,3 @@ CREATE TABLE page_relationships (
|
||||||
FOREIGN KEY (note_id) references notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION,
|
FOREIGN KEY (note_id) references notes (id) ON DELETE NO ACTION ON UPDATE NO ACTION,
|
||||||
FOREIGN KEY (page_id) references pages (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
FOREIGN KEY (page_id) references pages (id) ON DELETE NO ACTION ON UPDATE NO ACTION
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
INSERT INTO notes (
|
||||||
|
uuid,
|
||||||
|
content,
|
||||||
|
notetype,
|
||||||
|
creation_date,
|
||||||
|
updated_date,
|
||||||
|
lastview_date)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?);
|
|
@ -0,0 +1,8 @@
|
||||||
|
INSERT INTO pages (
|
||||||
|
slug,
|
||||||
|
title,
|
||||||
|
note_id,
|
||||||
|
creation_date,
|
||||||
|
updated_date,
|
||||||
|
lastview_date)
|
||||||
|
VALUES (?, ?, ?, ?, ?, ?);
|
|
@ -0,0 +1,86 @@
|
||||||
|
-- This is undoubtedly one of the more complex bits of code I've
|
||||||
|
-- written recently, and I do wish there had been macros because
|
||||||
|
-- there's a lot of hand-written, copy-pasted code here around the
|
||||||
|
-- basic content of a note; it would have been nice to be able to DRY
|
||||||
|
-- that out.
|
||||||
|
|
||||||
|
-- This expression creates a table, 'notetree', that contains all of
|
||||||
|
-- the notes nested under a page. Each entry in the table includes
|
||||||
|
-- the note's parent's internal and external ids so that applications
|
||||||
|
-- can build an actual tree out of a vec of these things.
|
||||||
|
|
||||||
|
-- TODO: Extensive testing to validate that the nodes are delivered
|
||||||
|
-- *in nesting order* to the client.
|
||||||
|
|
||||||
|
SELECT
|
||||||
|
id,
|
||||||
|
uuid,
|
||||||
|
parent_id,
|
||||||
|
parent_uuid,
|
||||||
|
content,
|
||||||
|
position,
|
||||||
|
notetype,
|
||||||
|
creation_date,
|
||||||
|
updated_date,
|
||||||
|
lastview_date,
|
||||||
|
deleted_date
|
||||||
|
|
||||||
|
FROM (
|
||||||
|
|
||||||
|
WITH RECURSIVE notetree(
|
||||||
|
id,
|
||||||
|
uuid,
|
||||||
|
parent_id,
|
||||||
|
parent_uuid,
|
||||||
|
content,
|
||||||
|
position,
|
||||||
|
notetype,
|
||||||
|
creation_date,
|
||||||
|
updated_date,
|
||||||
|
lastview_date,
|
||||||
|
deleted_date,
|
||||||
|
cycle) AS
|
||||||
|
|
||||||
|
-- ROOT expression
|
||||||
|
(SELECT
|
||||||
|
notes.id,
|
||||||
|
notes.uuid,
|
||||||
|
notes.id AS parent_id,
|
||||||
|
notes.uuid AS parent_uuid,
|
||||||
|
notes.content,
|
||||||
|
0, -- Root notes are always in position 0
|
||||||
|
notes.notetype,
|
||||||
|
notes.creation_date,
|
||||||
|
notes.updated_date,
|
||||||
|
notes.lastview_date,
|
||||||
|
notes.deleted_date,
|
||||||
|
','||notes.id||',' -- Cycle monitor
|
||||||
|
FROM notes
|
||||||
|
WHERE notes.id = ? AND notes.notetype = "root"
|
||||||
|
|
||||||
|
-- RECURSIVE expression
|
||||||
|
UNION SELECT
|
||||||
|
notes.id,
|
||||||
|
notes.uuid,
|
||||||
|
notetree.id AS parent_id,
|
||||||
|
notetree.uuid AS parent_uuid,
|
||||||
|
notes.content,
|
||||||
|
note_relationships.position,
|
||||||
|
notes.notetype,
|
||||||
|
notes.creation_date,
|
||||||
|
notes.updated_date,
|
||||||
|
notes.lastview_date,
|
||||||
|
notes.deleted_date,
|
||||||
|
notetree.cycle||notes.id||','
|
||||||
|
FROM notes
|
||||||
|
INNER JOIN note_relationships ON notes.id = note_relationships.note_id
|
||||||
|
-- For a given ID in the level of notetree in *this* recursion,
|
||||||
|
-- we want each note's branches one level down.
|
||||||
|
INNER JOIN notetree ON note_relationships.parent_id = notetree.id
|
||||||
|
-- And we want to make sure there are no cycles. There shouldn't
|
||||||
|
-- be; we're supposed to prevent those. But you never know.
|
||||||
|
WHERE
|
||||||
|
notetree.cycle NOT LIKE '%,'||notes.id||',%'
|
||||||
|
ORDER BY note_relationships.position)
|
||||||
|
SELECT * from notetree);
|
||||||
|
|
|
@ -0,0 +1 @@
|
||||||
|
SELECT id, uuid, content, notetype, creation_date, updated_date, lastview_date, deleted_date FROM notes WHERE uuid=?;
|
|
@ -0,0 +1 @@
|
||||||
|
SELECT id, title, slug, note_id, creation_date, updated_date, lastview_date, deleted_date FROM pages WHERE slug=?;
|
|
@ -0,0 +1,3 @@
|
||||||
|
UPDATE notes
|
||||||
|
SET content = ?, updated_date = ?, lastview_date = ?
|
||||||
|
WHERE uuid = ?;
|
|
@ -0,0 +1,702 @@
|
||||||
|
use crate::errors::NoteStoreError;
|
||||||
|
use crate::reference_parser::build_references;
|
||||||
|
use crate::structs::{
|
||||||
|
JustId, JustSlugs, NewNote, NewNoteBuilder, NewPage, NewPageBuilder, NoteRelationship, PageTitles, RawNote,
|
||||||
|
RawPage, RowCount,
|
||||||
|
};
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use regex::Regex;
|
||||||
|
use shrinkwraprs::Shrinkwrap;
|
||||||
|
use slug::slugify;
|
||||||
|
use sqlx::{
|
||||||
|
sqlite::{Sqlite, SqlitePool, SqliteRow},
|
||||||
|
Done, Executor, Row,
|
||||||
|
};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
use std::collections::HashSet;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
#[derive(Shrinkwrap, Copy, Clone)]
|
||||||
|
struct PageId(i64);
|
||||||
|
|
||||||
|
#[derive(Shrinkwrap, Copy, Clone)]
|
||||||
|
struct NoteId(i64);
|
||||||
|
|
||||||
|
#[derive(Shrinkwrap, Copy, Clone)]
|
||||||
|
struct ParentId(i64);
|
||||||
|
|
||||||
|
/// A handle to our Sqlite database.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct NoteStore(Arc<SqlitePool>);
|
||||||
|
|
||||||
|
type NoteResult<T> = core::result::Result<T, NoteStoreError>;
|
||||||
|
type SqlResult<T> = sqlx::Result<T>;
|
||||||
|
|
||||||
|
impl NoteStore {
|
||||||
|
pub async fn new(url: &str) -> NoteResult<Self> {
|
||||||
|
let pool = SqlitePool::connect(url).await?;
|
||||||
|
Ok(NoteStore(Arc::new(pool)))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Erase all the data in the database and restore it
|
||||||
|
// to its original empty form. Do not use unless you
|
||||||
|
// really, really want that to happen.
|
||||||
|
pub async fn reset_database(&self) -> NoteResult<()> {
|
||||||
|
reset_database(&*self.0).await.map_err(NoteStoreError::DBError)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch page by slug
|
||||||
|
///
|
||||||
|
/// Supports the use case of the user navigating to a known place
|
||||||
|
/// via a bookmark or other URL. Since the title isn't clear from
|
||||||
|
/// the slug, the slug is insufficient to generate a new page, so
|
||||||
|
/// this use case says that in the event of a failure to find the
|
||||||
|
/// requested page, return a basic NotFound.
|
||||||
|
pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(RawPage, Vec<RawNote>)> {
|
||||||
|
// let select_note_collection_for_root = include_str!("sql/select_note_collection_for_root.sql");
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
let page = select_page_by_slug(&mut tx, slug).await?;
|
||||||
|
let note_id = page.note_id;
|
||||||
|
let notes = select_note_collection_from_root(&mut tx, note_id).await?;
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok((page, notes))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch page by title
|
||||||
|
///
|
||||||
|
/// Supports the use case of the user navigating to a page via
|
||||||
|
/// the page's formal title. Since the title is the key reference
|
||||||
|
/// of the system, if no page with that title is found, a page with
|
||||||
|
/// that title is generated automatically.
|
||||||
|
pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(RawPage, Vec<RawNote>)> {
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
let (page, notes) = match select_page_by_title(&mut tx, title).await {
|
||||||
|
Ok(page) => {
|
||||||
|
let note_id = page.note_id;
|
||||||
|
(page, select_note_collection_from_root(&mut tx, note_id).await?)
|
||||||
|
}
|
||||||
|
Err(sqlx::Error::RowNotFound) => {
|
||||||
|
let page = {
|
||||||
|
let new_root_note = create_unique_root_note();
|
||||||
|
let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?;
|
||||||
|
let new_page_slug = generate_slug(&mut tx, title).await?;
|
||||||
|
let new_page = create_new_page_for(&title, &new_page_slug, new_root_note_id);
|
||||||
|
let _ = insert_one_new_page(&mut tx, &new_page).await?;
|
||||||
|
select_page_by_title(&mut tx, &title).await?
|
||||||
|
};
|
||||||
|
let note_id = page.note_id;
|
||||||
|
(page, select_note_collection_from_root(&mut tx, note_id).await?)
|
||||||
|
}
|
||||||
|
Err(e) => return Err(NoteStoreError::DBError(e)),
|
||||||
|
};
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok((page, notes))
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Make sure the position is sane.
|
||||||
|
/// Insert a note as the child of an existing note, at a set position.
|
||||||
|
pub async fn insert_nested_note(
|
||||||
|
&self,
|
||||||
|
note: &NewNote,
|
||||||
|
parent_note_uuid: &str,
|
||||||
|
position: i64,
|
||||||
|
) -> NoteResult<String> {
|
||||||
|
let mut new_note = note.clone();
|
||||||
|
new_note.uuid = friendly_id::create();
|
||||||
|
let references = build_references(¬e.content);
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
|
||||||
|
// Start by building the note and putting it into its relationship.
|
||||||
|
let parent_id: ParentId = select_note_id_for_uuid(&mut tx, parent_note_uuid).await?;
|
||||||
|
let parent_max_position = assert_max_child_position_for_note(&mut tx, parent_id).await?;
|
||||||
|
let position = if position > parent_max_position {
|
||||||
|
parent_max_position + 1
|
||||||
|
} else {
|
||||||
|
position
|
||||||
|
};
|
||||||
|
let new_note_id = insert_one_new_note(&mut tx, &new_note).await?;
|
||||||
|
let _ = make_room_for_new_note(&mut tx, parent_id, position).await?;
|
||||||
|
let _ = insert_note_to_note_relationship(&mut tx, parent_id, new_note_id, position, "note").await?;
|
||||||
|
|
||||||
|
// From the references, make lists of pages that exist, and pages
|
||||||
|
// that do not.
|
||||||
|
let found_references = find_all_page_references_for(&mut tx, &references).await?;
|
||||||
|
let new_references = diff_references(&references, &found_references);
|
||||||
|
let mut known_reference_ids: Vec<PageId> = Vec::new();
|
||||||
|
|
||||||
|
// Create the pages that don't exist
|
||||||
|
for one_reference in new_references.iter() {
|
||||||
|
let new_root_note = create_unique_root_note();
|
||||||
|
let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?;
|
||||||
|
let new_page_slug = generate_slug(&mut tx, &one_reference).await?;
|
||||||
|
let new_page = create_new_page_for(&one_reference, &new_page_slug, new_root_note_id);
|
||||||
|
known_reference_ids.push(insert_one_new_page(&mut tx, &new_page).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
// And associate the note with all the pages.
|
||||||
|
known_reference_ids.append(&mut found_references.iter().map(|r| PageId(r.id)).collect());
|
||||||
|
let _ = insert_note_to_page_relationships(&mut tx, new_note_id, &known_reference_ids).await?;
|
||||||
|
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(new_note.uuid)
|
||||||
|
}
|
||||||
|
|
||||||
|
// This doesn't do anything with the references, as those are
|
||||||
|
// dependent entirely on the *content*, and not the *position*, of
|
||||||
|
// the note and the referenced page.
|
||||||
|
//
|
||||||
|
// TODO: Ensure the position is sane.
|
||||||
|
/// Move a note from one location to another.
|
||||||
|
pub async fn move_note(
|
||||||
|
&self,
|
||||||
|
note_uuid: &str,
|
||||||
|
old_parent_uuid: &str,
|
||||||
|
new_parent_uuid: &str,
|
||||||
|
new_position: i64,
|
||||||
|
) -> NoteResult<()> {
|
||||||
|
let all_uuids = vec![note_uuid, old_parent_uuid, new_parent_uuid];
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
|
||||||
|
// This is one of the few cases where we we're getting IDs for
|
||||||
|
// notes, but the nature of the ID isn't known at this time.
|
||||||
|
// This has to be handled manually, in the next paragraph
|
||||||
|
// below.
|
||||||
|
let found_id_vec = bulk_select_ids_for_note_uuids(&mut tx, &all_uuids).await?;
|
||||||
|
let found_ids: HashMap<String, i64> = found_id_vec.into_iter().collect();
|
||||||
|
if found_ids.len() != 3 {
|
||||||
|
return Err(NoteStoreError::NotFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
let old_parent_id = ParentId(*found_ids.get(old_parent_uuid).unwrap());
|
||||||
|
let new_parent_id = ParentId(*found_ids.get(new_parent_uuid).unwrap());
|
||||||
|
let note_id = NoteId(*found_ids.get(note_uuid).unwrap());
|
||||||
|
|
||||||
|
let old_note = get_note_to_note_relationship(&mut tx, old_parent_id, note_id).await?;
|
||||||
|
let old_note_position = old_note.position;
|
||||||
|
let old_note_nature = &old_note.nature;
|
||||||
|
|
||||||
|
let _ = delete_note_to_note_relationship(&mut tx, old_parent_id, note_id).await?;
|
||||||
|
let _ = close_hole_for_deleted_note(&mut tx, old_parent_id, old_note_position).await?;
|
||||||
|
let parent_max_position = assert_max_child_position_for_note(&mut tx, new_parent_id).await?;
|
||||||
|
let new_position = if new_position > parent_max_position {
|
||||||
|
parent_max_position + 1
|
||||||
|
} else {
|
||||||
|
new_position
|
||||||
|
};
|
||||||
|
let _ = make_room_for_new_note(&mut tx, new_parent_id, new_position).await?;
|
||||||
|
let _ =
|
||||||
|
insert_note_to_note_relationship(&mut tx, new_parent_id, note_id, new_position, old_note_nature).await?;
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Embed or reference a note from a different location.
|
||||||
|
pub async fn reference_or_embed_note(
|
||||||
|
&self,
|
||||||
|
note_uuid: &str,
|
||||||
|
new_parent_uuid: &str,
|
||||||
|
new_position: i64,
|
||||||
|
new_nature: &str,
|
||||||
|
) -> NoteResult<()> {
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
let existing_note_id: NoteId = NoteId(select_note_id_for_uuid(&mut tx, note_uuid).await?.0);
|
||||||
|
let new_parent_id: ParentId = select_note_id_for_uuid(&mut tx, new_parent_uuid).await?;
|
||||||
|
let _ = make_room_for_new_note(&mut tx, new_parent_id, new_position).await?;
|
||||||
|
let _ = insert_note_to_note_relationship(&mut tx, new_parent_id, existing_note_id, new_position, new_nature)
|
||||||
|
.await?;
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Delete a note
|
||||||
|
pub async fn delete_note(&self, note_uuid: &str, note_parent_uuid: &str) -> NoteResult<()> {
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
let condemned_note_id: NoteId = NoteId(select_note_id_for_uuid(&mut tx, note_uuid).await?.0);
|
||||||
|
let note_parent_id: ParentId = select_note_id_for_uuid(&mut tx, note_parent_uuid).await?;
|
||||||
|
let _ = delete_note_to_note_relationship(&mut tx, note_parent_id, condemned_note_id);
|
||||||
|
if count_existing_note_relationships(&mut tx, condemned_note_id).await? == 0 {
|
||||||
|
let _ = delete_note_to_page_relationships(&mut tx, condemned_note_id).await?;
|
||||||
|
let _ = delete_note(&mut tx, condemned_note_id).await?;
|
||||||
|
}
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Update a note's content
|
||||||
|
pub async fn update_note_content(&self, note_uuid: &str, content: &str) -> NoteResult<()> {
|
||||||
|
let references = build_references(&content);
|
||||||
|
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
|
||||||
|
let note_id: NoteId = NoteId(select_note_id_for_uuid(&mut tx, note_uuid).await?.0);
|
||||||
|
let _ = update_note_content(&mut tx, note_id, &content).await?;
|
||||||
|
|
||||||
|
let found_references = find_all_page_references_for(&mut tx, &references).await?;
|
||||||
|
let new_references = diff_references(&references, &found_references);
|
||||||
|
let mut known_reference_ids: Vec<PageId> = Vec::new();
|
||||||
|
|
||||||
|
// Create the pages that don't exist
|
||||||
|
for one_reference in new_references.iter() {
|
||||||
|
let new_root_note = create_unique_root_note();
|
||||||
|
let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?;
|
||||||
|
let new_page_slug = generate_slug(&mut tx, &one_reference).await?;
|
||||||
|
let new_page = create_new_page_for(&one_reference, &new_page_slug, new_root_note_id);
|
||||||
|
known_reference_ids.push(insert_one_new_page(&mut tx, &new_page).await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
// And associate the note with all the pages.
|
||||||
|
known_reference_ids.append(&mut found_references.iter().map(|r| PageId(r.id)).collect());
|
||||||
|
let _ = insert_note_to_page_relationships(&mut tx, note_id, &known_reference_ids).await?;
|
||||||
|
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// ___ _ _
|
||||||
|
// | _ \_ _(_)_ ____ _| |_ ___
|
||||||
|
// | _/ '_| \ V / _` | _/ -_)
|
||||||
|
// |_| |_| |_|\_/\__,_|\__\___|
|
||||||
|
//
|
||||||
|
|
||||||
|
// I'm putting a lot of faith in Rust's ability to inline stuff. I'm
|
||||||
|
// sure this is okay. But really, this lets the API be clean and
|
||||||
|
// coherent and easily readable, and hides away the gnarliness of some
|
||||||
|
// of the SQL queries.
|
||||||
|
|
||||||
|
async fn reset_database<'a, E>(executor: E) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let initialize_sql = include_str!("sql/initialize_database.sql");
|
||||||
|
sqlx::query(initialize_sql).execute(executor).await.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn select_page_by_slug<'a, E>(executor: E, slug: &str) -> SqlResult<RawPage>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let select_one_page_by_slug_sql = concat!(
|
||||||
|
"SELECT id, title, slug, note_id, creation_date, updated_date, ",
|
||||||
|
"lastview_date, deleted_date FROM pages WHERE slug=?;"
|
||||||
|
);
|
||||||
|
Ok(sqlx::query_as(&select_one_page_by_slug_sql)
|
||||||
|
.bind(&slug)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn select_page_by_title<'a, E>(executor: E, title: &str) -> SqlResult<RawPage>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let select_one_page_by_title_sql = concat!(
|
||||||
|
"SELECT id, title, slug, note_id, creation_date, updated_date, ",
|
||||||
|
"lastview_date, deleted_date FROM pages WHERE title=?;"
|
||||||
|
);
|
||||||
|
Ok(sqlx::query_as(&select_one_page_by_title_sql)
|
||||||
|
.bind(&title)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn select_note_id_for_uuid<'a, E>(executor: E, uuid: &str) -> SqlResult<ParentId>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let select_note_id_for_uuid_sql = "SELECT id FROM notes WHERE uuid = ?";
|
||||||
|
let id: JustId = sqlx::query_as(&select_note_id_for_uuid_sql)
|
||||||
|
.bind(&uuid)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(ParentId(id.id))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn make_room_for_new_note<'a, E>(executor: E, parent_id: ParentId, position: i64) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let make_room_for_new_note_sql = concat!(
|
||||||
|
"UPDATE note_relationships ",
|
||||||
|
"SET position = position + 1 ",
|
||||||
|
"WHERE position >= ? and parent_id = ?;"
|
||||||
|
);
|
||||||
|
|
||||||
|
sqlx::query(make_room_for_new_note_sql)
|
||||||
|
.bind(&position)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn insert_note_to_note_relationship<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
parent_id: ParentId,
|
||||||
|
note_id: NoteId,
|
||||||
|
position: i64,
|
||||||
|
nature: &str,
|
||||||
|
) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let insert_note_to_note_relationship_sql = concat!(
|
||||||
|
"INSERT INTO note_relationships (parent_id, note_id, position, nature) ",
|
||||||
|
"values (?, ?, ?, ?)"
|
||||||
|
);
|
||||||
|
|
||||||
|
sqlx::query(insert_note_to_note_relationship_sql)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.bind(&position)
|
||||||
|
.bind(&nature)
|
||||||
|
.execute(executor)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn select_note_collection_from_root<'a, E>(executor: E, root: i64) -> SqlResult<Vec<RawNote>>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let select_note_collection_from_root_sql = include_str!("sql/select_note_collection_from_root.sql");
|
||||||
|
Ok(sqlx::query_as(&select_note_collection_from_root_sql)
|
||||||
|
.bind(&root)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn insert_one_new_note<'a, E>(executor: E, note: &NewNote) -> SqlResult<NoteId>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let insert_one_note_sql = concat!(
|
||||||
|
"INSERT INTO notes ( ",
|
||||||
|
" uuid, ",
|
||||||
|
" content, ",
|
||||||
|
" notetype, ",
|
||||||
|
" creation_date, ",
|
||||||
|
" updated_date, ",
|
||||||
|
" lastview_date) ",
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?);"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(NoteId(
|
||||||
|
sqlx::query(insert_one_note_sql)
|
||||||
|
.bind(¬e.uuid)
|
||||||
|
.bind(¬e.content)
|
||||||
|
.bind(¬e.notetype)
|
||||||
|
.bind(¬e.creation_date)
|
||||||
|
.bind(¬e.updated_date)
|
||||||
|
.bind(¬e.lastview_date)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.last_insert_rowid(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given a possible slug, find the slug with the highest
|
||||||
|
// uniquification number, and return that number, if any.
|
||||||
|
fn find_maximal_slug(slugs: &[JustSlugs]) -> Option<u32> {
|
||||||
|
lazy_static! {
|
||||||
|
static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
if slugs.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut slug_counters: Vec<u32> = slugs
|
||||||
|
.iter()
|
||||||
|
.filter_map(|slug| RE_CAP_NUM.captures(&slug.slug))
|
||||||
|
.map(|cap| cap.get(1).unwrap().as_str().parse::<u32>().unwrap())
|
||||||
|
.collect();
|
||||||
|
slug_counters.sort_unstable();
|
||||||
|
slug_counters.pop()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given an initial string and an existing collection of slugs,
|
||||||
|
// generate a new slug that does not conflict with the current
|
||||||
|
// collection.
|
||||||
|
async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult<String>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
lazy_static! {
|
||||||
|
static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let initial_slug = slugify(title);
|
||||||
|
let sample_slug = RE_STRIP_NUM.replace_all(&initial_slug, "");
|
||||||
|
let slug_finder_sql = "SELECT slug FROM pages WHERE slug LIKE '?%';";
|
||||||
|
let similar_slugs: Vec<JustSlugs> = sqlx::query_as(&slug_finder_sql)
|
||||||
|
.bind(&*sample_slug)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await?;
|
||||||
|
let maximal_slug = find_maximal_slug(&similar_slugs);
|
||||||
|
match maximal_slug {
|
||||||
|
None => Ok(initial_slug),
|
||||||
|
Some(max_slug) => Ok(format!("{}-{}", initial_slug, max_slug + 1)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn insert_one_new_page<'a, E>(executor: E, page: &NewPage) -> SqlResult<PageId>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let insert_one_page_sql = concat!(
|
||||||
|
"INSERT INTO pages ( ",
|
||||||
|
" slug, ",
|
||||||
|
" title, ",
|
||||||
|
" note_id, ",
|
||||||
|
" creation_date, ",
|
||||||
|
" updated_date, ",
|
||||||
|
" lastview_date) ",
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?);"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(PageId(
|
||||||
|
sqlx::query(insert_one_page_sql)
|
||||||
|
.bind(&page.slug)
|
||||||
|
.bind(&page.title)
|
||||||
|
.bind(&page.note_id)
|
||||||
|
.bind(&page.creation_date)
|
||||||
|
.bind(&page.updated_date)
|
||||||
|
.bind(&page.lastview_date)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.last_insert_rowid(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn insert_note_to_page_relationships<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
note_id: NoteId,
|
||||||
|
references: &[PageId],
|
||||||
|
) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let insert_note_page_references_sql = "INSERT INTO page_relationships (note_id, page_id) VALUES ".to_string()
|
||||||
|
+ &["(?, ?)"].repeat(references.len()).join(", ")
|
||||||
|
+ &";".to_string();
|
||||||
|
|
||||||
|
let mut request = sqlx::query(&insert_note_page_references_sql);
|
||||||
|
for reference in references {
|
||||||
|
request = request.bind(*note_id).bind(**reference);
|
||||||
|
}
|
||||||
|
|
||||||
|
request.execute(executor).await.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
// For a given collection of uuids, retrieve the internal ID used by
|
||||||
|
// the database.
|
||||||
|
async fn bulk_select_ids_for_note_uuids<'a, E>(executor: E, ids: &[&str]) -> SqlResult<Vec<(String, i64)>>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let bulk_select_ids_for_note_uuids_sql = "SELECT uuid, id FROM notes WHERE uuid IN (".to_string()
|
||||||
|
+ &["?"].repeat(ids.len()).join(",")
|
||||||
|
+ &");".to_string();
|
||||||
|
|
||||||
|
let mut request = sqlx::query(&bulk_select_ids_for_note_uuids_sql);
|
||||||
|
for id in ids.iter() {
|
||||||
|
request = request.bind(id);
|
||||||
|
}
|
||||||
|
Ok(request
|
||||||
|
.try_map(|row: SqliteRow| {
|
||||||
|
let l = row.try_get::<String, _>(0)?;
|
||||||
|
let r = row.try_get::<i64, _>(1)?;
|
||||||
|
Ok((l, r))
|
||||||
|
})
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used by move_note to identify the single note to note relationship
|
||||||
|
// by the original parent and child pair. Used mostly to find the
|
||||||
|
// position for recalculation, to create a new gap or close an old
|
||||||
|
// one.
|
||||||
|
async fn get_note_to_note_relationship<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
parent_id: ParentId,
|
||||||
|
note_id: NoteId,
|
||||||
|
) -> SqlResult<NoteRelationship>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let get_note_to_note_relationship_sql = concat!(
|
||||||
|
"SELECT parent_id, note_id, position, nature ",
|
||||||
|
"FROM note_relationships ",
|
||||||
|
"WHERE parent_id = ? and note_id = ? ",
|
||||||
|
"LIMIT 1"
|
||||||
|
);
|
||||||
|
sqlx::query_as(get_note_to_note_relationship_sql)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_note_to_note_relationship<'a, E>(executor: E, parent_id: ParentId, note_id: NoteId) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let delete_note_to_note_relationship_sql = concat!(
|
||||||
|
"DELETE FROM note_relationships ",
|
||||||
|
"WHERE parent_id = ? and note_id = ? "
|
||||||
|
);
|
||||||
|
|
||||||
|
let count = sqlx::query(delete_note_to_note_relationship_sql)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.rows_affected();
|
||||||
|
|
||||||
|
match count {
|
||||||
|
1 => Ok(()),
|
||||||
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_note_to_page_relationships<'a, E>(executor: E, note_id: NoteId) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let delete_note_to_page_relationships_sql = concat!("DELETE FROM page_relationships ", "WHERE note_id = ? ");
|
||||||
|
|
||||||
|
let _ = sqlx::query(delete_note_to_page_relationships_sql)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn delete_note<'a, E>(executor: E, note_id: NoteId) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let delete_note_sql = concat!("DELETE FROM notes WHERE note_id = ?");
|
||||||
|
|
||||||
|
let count = sqlx::query(delete_note_sql)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.rows_affected();
|
||||||
|
|
||||||
|
match count {
|
||||||
|
1 => Ok(()),
|
||||||
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn count_existing_note_relationships<'a, E>(executor: E, note_id: NoteId) -> SqlResult<i64>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let count_existing_note_relationships_sql = "SELECT COUNT(*) as count FROM page_relationships WHERE note_id = ?";
|
||||||
|
|
||||||
|
let count: RowCount = sqlx::query_as(count_existing_note_relationships_sql)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(count.count)
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn assert_max_child_position_for_note<'a, E>(executor: E, note_id: ParentId) -> SqlResult<i64>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let assert_max_child_position_for_note_sql =
|
||||||
|
"SELECT MAX(position) as count FROM note_relationships WHERE parent_id = ?";
|
||||||
|
|
||||||
|
let count: RowCount = sqlx::query_as(assert_max_child_position_for_note_sql)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(count.count)
|
||||||
|
}
|
||||||
|
|
||||||
|
// After removing a note, recalculate the position of all notes under
|
||||||
|
// the parent note, such that there order is now completely
|
||||||
|
// sequential.
|
||||||
|
async fn close_hole_for_deleted_note<'a, E>(executor: E, parent_id: ParentId, position: i64) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let close_hole_for_deleted_note_sql = concat!(
|
||||||
|
"UPDATE note_relationships ",
|
||||||
|
"SET position = position - 1 ",
|
||||||
|
"WHERE position > ? and parent_id = ?;"
|
||||||
|
);
|
||||||
|
|
||||||
|
sqlx::query(close_hole_for_deleted_note_sql)
|
||||||
|
.bind(&position)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn find_all_page_references_for<'a, E>(executor: E, references: &[String]) -> SqlResult<Vec<PageTitles>>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let find_all_references_for_sql = "SELECT id, title FROM pages WHERE title IN (".to_string()
|
||||||
|
+ &["?"].repeat(references.len()).join(",")
|
||||||
|
+ &");".to_string();
|
||||||
|
|
||||||
|
let mut request = sqlx::query_as(&find_all_references_for_sql);
|
||||||
|
for id in references.iter() {
|
||||||
|
request = request.bind(id);
|
||||||
|
}
|
||||||
|
request.fetch_all(executor).await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn update_note_content<'a, E>(executor: E, note_id: NoteId, content: &str) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?";
|
||||||
|
let count = sqlx::query(update_note_content_sql)
|
||||||
|
.bind(content)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.rows_affected();
|
||||||
|
|
||||||
|
match count {
|
||||||
|
1 => Ok(()),
|
||||||
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_unique_root_note() -> NewNote {
|
||||||
|
NewNoteBuilder::default()
|
||||||
|
.uuid(friendly_id::create())
|
||||||
|
.content("".to_string())
|
||||||
|
.notetype("root".to_string())
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_new_page_for(title: &str, slug: &str, note_id: NoteId) -> NewPage {
|
||||||
|
NewPageBuilder::default()
|
||||||
|
.slug(slug.to_string())
|
||||||
|
.title(title.to_string())
|
||||||
|
.note_id(*note_id)
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given the references supplied, and the references found in the datastore,
|
||||||
|
// return a list of the references not found in the datastore.
|
||||||
|
fn diff_references(references: &[String], found_references: &[PageTitles]) -> Vec<String> {
|
||||||
|
let all: HashSet<String> = references.iter().cloned().collect();
|
||||||
|
let found: HashSet<String> = found_references.iter().map(|r| r.title.clone()).collect();
|
||||||
|
all.difference(&found).cloned().collect()
|
||||||
|
}
|
|
@ -1,254 +0,0 @@
|
||||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
//! # Storage layer for Notesmachine
|
|
||||||
//!
|
|
||||||
//! This library implements the core functionality of Notesmachine and
|
|
||||||
//! describes that functionality to a storage layer. There's a bit of
|
|
||||||
//! intermingling in here which can't be helped, although it may make
|
|
||||||
//! sense in the future to separate the decomposition of the note
|
|
||||||
//! content into a higher layer.
|
|
||||||
//!
|
|
||||||
//! Notesmachine storage notes consist of two items: Note and Page.
|
|
||||||
//! This distinction is somewhat arbitrary, as structurally these two
|
|
||||||
//! items are stored in the same table.
|
|
||||||
//!
|
|
||||||
//! - Boxes have titles (and date metadata)
|
|
||||||
//! - Notes have content and a type (and date metadata)
|
|
||||||
//! - Notes are stored in boxes
|
|
||||||
//! - Notes are positioned with respect to other notes.
|
|
||||||
//! - There are two positions:
|
|
||||||
//! - Siblings, creating lists
|
|
||||||
//! - Children, creating trees like this one
|
|
||||||
//! - Notes may have references (pointers) to other boxes
|
|
||||||
//! - Notes may be moved around
|
|
||||||
//! - Notes may be deleted
|
|
||||||
//! - Boxes may be deleted
|
|
||||||
//! - When a box is renamed, every reference to that box is auto-edited to
|
|
||||||
//! reflect the change. If a box is renamed to match an existing box, the
|
|
||||||
//! notes in both boxes are merged.
|
|
||||||
//!
|
|
||||||
//! Note-to-note relationships form trees, and are kept in a SQL database of
|
|
||||||
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
|
|
||||||
//! `position` is a monotonic index on the parent (that is, every pair
|
|
||||||
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
|
|
||||||
//! an enum and can specify that the relationship is *original*,
|
|
||||||
//! *embedding*, or *referencing*. An embedded or referenced note may be
|
|
||||||
//! read/write or read-only with respect to the original, but there is only
|
|
||||||
//! one original note at any time.
|
|
||||||
//!
|
|
||||||
//! Note-to-box relationships form a graph, and are kept in the SQL database
|
|
||||||
//! as a collection of *edges* from the note to the box (and naturally
|
|
||||||
//! vice-versa).
|
|
||||||
//!
|
|
||||||
//! - Decision: When an original note is deleted, do all references and
|
|
||||||
//! embeddings also get deleted, or is the oldest one elevated to be a new
|
|
||||||
//! "original"? Or is that something the user may choose?
|
|
||||||
//!
|
|
||||||
//! - Decision: Should the merging issue be handled at this layer, or would
|
|
||||||
//! it make sense to move this to a higher layer, and only provide the
|
|
||||||
//! hooks for it here?
|
|
||||||
//!
|
|
||||||
#![allow(clippy::len_zero)]
|
|
||||||
use crate::errors::NoteStoreError;
|
|
||||||
use crate::parser::build_references;
|
|
||||||
use crate::store::private::*;
|
|
||||||
use crate::structs::*;
|
|
||||||
use sqlx::sqlite::SqlitePool;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
/// A handle to our Sqlite database.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct NoteStore(Arc<SqlitePool>);
|
|
||||||
|
|
||||||
pub type NoteResult<T> = core::result::Result<T, NoteStoreError>;
|
|
||||||
|
|
||||||
// After wrestling for a while with the fact that 'box' is a reserved
|
|
||||||
// word in Rust, I decided to just go with Note (note) and Page
|
|
||||||
// (box).
|
|
||||||
|
|
||||||
impl NoteStore {
|
|
||||||
/// Initializes a new instance of the note store. Note that the
|
|
||||||
/// note store holds an Arc internally; this code is (I think)
|
|
||||||
/// safe to Send.
|
|
||||||
pub async fn new(url: &str) -> NoteResult<Self> {
|
|
||||||
let pool = SqlitePool::connect(url).await?;
|
|
||||||
Ok(NoteStore(Arc::new(pool)))
|
|
||||||
}
|
|
||||||
/// Erase all the data in the database and restore it
|
|
||||||
/// to its original empty form. Do not use unless you
|
|
||||||
/// really, really want that to happen.
|
|
||||||
pub async fn reset_database(&self) -> NoteResult<()> {
|
|
||||||
reset_database(&*self.0).await.map_err(NoteStoreError::DBError)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch page by slug
|
|
||||||
///
|
|
||||||
/// Supports the use case of the user navigating to a known place
|
|
||||||
/// via a bookmark or other URL. Since the title isn't clear from
|
|
||||||
/// the slug, the slug is insufficient to generate a new page, so
|
|
||||||
/// this use case says that in the event of a failure to find the
|
|
||||||
/// requested page, return a basic NotFound.
|
|
||||||
pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
|
|
||||||
let page = select_page_by_slug(&*self.0, slug).await?;
|
|
||||||
if page.is_empty() {
|
|
||||||
return Err(NoteStoreError::NotFound);
|
|
||||||
}
|
|
||||||
|
|
||||||
let note_id = &page[0].id;
|
|
||||||
let backreferences = select_backreferences_for_page(&*self.0, ¬e_id).await?;
|
|
||||||
Ok((page, backreferences))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch page by title
|
|
||||||
///
|
|
||||||
/// The most common use case: the user is navigating by requesting
|
|
||||||
/// a page. The page either exists or it doesn't. If it
|
|
||||||
/// doesn't, we go out and make it. Since we know it doesn't exist,
|
|
||||||
/// we also know no backreferences to it exist, so in that case you
|
|
||||||
/// get back two empty vecs.
|
|
||||||
pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
|
|
||||||
if title.is_empty() {
|
|
||||||
return Err(NoteStoreError::NotFound);
|
|
||||||
}
|
|
||||||
|
|
||||||
let page = select_page_by_title(&*self.0, title).await?;
|
|
||||||
if page.len() > 0 {
|
|
||||||
let note_id = &page[0].id;
|
|
||||||
let backreferences = select_backreferences_for_page(&*self.0, ¬e_id).await?;
|
|
||||||
return Ok((page, backreferences));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanity check!
|
|
||||||
let references = build_references(&title);
|
|
||||||
if references.len() > 0 {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Titles may not contain nested references.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
let slug = generate_slug(&mut tx, title).await?;
|
|
||||||
let page = create_page(&title, &slug);
|
|
||||||
insert_note(&mut tx, &page).await?;
|
|
||||||
tx.commit().await?;
|
|
||||||
|
|
||||||
Ok((vec![Note::from(page)], vec![]))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn add_note(&self, note: &NewNote, parent_id: &str, location: Option<i64>) -> NoteResult<String> {
|
|
||||||
let kind = RelationshipKind::Direct;
|
|
||||||
let new_id = self.insert_note(note, parent_id, location, kind).await?;
|
|
||||||
Ok(new_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Move a note from one location to another.
|
|
||||||
pub async fn move_note(
|
|
||||||
&self,
|
|
||||||
note_id: &str,
|
|
||||||
old_parent_id: &str,
|
|
||||||
new_parent_id: &str,
|
|
||||||
new_location: i64,
|
|
||||||
) -> NoteResult<()> {
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
|
|
||||||
let old_note = select_note_to_note_relationship(&mut tx, &old_parent_id, ¬e_id).await?;
|
|
||||||
let old_note_location = old_note.location;
|
|
||||||
let old_note_kind = old_note.kind;
|
|
||||||
|
|
||||||
delete_note_to_note_relationship(&mut tx, &old_parent_id, ¬e_id).await?;
|
|
||||||
close_hole_for_deleted_note_relationship(&mut tx, &old_parent_id, old_note_location).await?;
|
|
||||||
let new_location = determine_max_child_location_for_note(&mut tx, &new_parent_id, Some(new_location)).await?;
|
|
||||||
make_room_for_new_note_relationship(&mut tx, &new_parent_id, new_location).await?;
|
|
||||||
insert_note_to_note_relationship(&mut tx, &new_parent_id, ¬e_id, new_location, &old_note_kind).await?;
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Updates a note's content. Completely rebuilds the note's
|
|
||||||
/// outgoing edge reference list every time.
|
|
||||||
pub async fn update_note_content(&self, note_id: &str, content: &str) -> NoteResult<()> {
|
|
||||||
let references = build_references(&content);
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
update_note_content(&mut tx, ¬e_id, &content).await?;
|
|
||||||
delete_bulk_note_to_page_relationships(&mut tx, ¬e_id).await?;
|
|
||||||
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
|
|
||||||
insert_bulk_note_to_page_relationships(&mut tx, ¬e_id, &known_reference_ids).await?;
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Deletes a note. If the note's relationship drops to zero, all
|
|
||||||
/// references from that note to pages are also deleted.
|
|
||||||
pub async fn delete_note(&self, note_id: &str, note_parent_id: &str) -> NoteResult<()> {
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
|
|
||||||
let note_id = note_id.to_string();
|
|
||||||
let parent_id = note_parent_id.to_string();
|
|
||||||
|
|
||||||
if parent_id != note_id {
|
|
||||||
delete_note_to_note_relationship(&mut tx, &parent_id, ¬e_id).await?;
|
|
||||||
}
|
|
||||||
// The big one: if zero parents report having an interest in this note, then it,
|
|
||||||
// *and any sub-relationships*, go away.
|
|
||||||
if count_existing_note_relationships(&mut tx, ¬e_id).await? == 0 {
|
|
||||||
delete_note_to_page_relationships(&mut tx, ¬e_id).await?;
|
|
||||||
delete_note(&mut tx, ¬e_id).await?;
|
|
||||||
}
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The Private stuff
|
|
||||||
|
|
||||||
impl NoteStore {
|
|
||||||
// Pretty much the most dangerous function in our system. Has to
|
|
||||||
// have ALL the error checking.
|
|
||||||
async fn insert_note(
|
|
||||||
&self,
|
|
||||||
note: &NewNote,
|
|
||||||
parent_id: &str,
|
|
||||||
location: Option<i64>,
|
|
||||||
kind: RelationshipKind,
|
|
||||||
) -> NoteResult<String> {
|
|
||||||
if let Some(location) = location {
|
|
||||||
if location < 0 {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Add note: A negative location is not valid.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if parent_id.is_empty() {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Add note: A parent note ID is required.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if note.id.is_empty() {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Add note: Your note should have an id already".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if note.content.is_empty() {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Add note: Empty notes are not supported.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let references = build_references(¬e.content);
|
|
||||||
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
let location = determine_max_child_location_for_note(&mut tx, parent_id, location).await?;
|
|
||||||
insert_note(&mut tx, ¬e).await?;
|
|
||||||
make_room_for_new_note_relationship(&mut tx, &parent_id, location).await?;
|
|
||||||
insert_note_to_note_relationship(&mut tx, &parent_id, ¬e.id, location, &kind).await?;
|
|
||||||
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
|
|
||||||
insert_bulk_note_to_page_relationships(&mut tx, ¬e.id, &known_reference_ids).await?;
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(note.id.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,58 +0,0 @@
|
||||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
//! # Storage layer for Notesmachine
|
|
||||||
//!
|
|
||||||
//! This library implements the core functionality of Notesmachine and
|
|
||||||
//! describes that functionality to a storage layer. There's a bit of
|
|
||||||
//! intermingling in here which can't be helped, although it may make
|
|
||||||
//! sense in the future to separate the decomposition of the note
|
|
||||||
//! content into a higher layer.
|
|
||||||
//!
|
|
||||||
//! Notesmachine storage notes consist of two items: Note and Kasten.
|
|
||||||
//! This distinction is somewhat arbitrary, as structurally these two
|
|
||||||
//! items are stored in the same table.
|
|
||||||
//!
|
|
||||||
//! - Boxes have titles (and date metadata)
|
|
||||||
//! - Notes have content and a type (and date metadata)
|
|
||||||
//! - Notes are stored in boxes
|
|
||||||
//! - Notes are positioned with respect to other notes.
|
|
||||||
//! - There are two positions:
|
|
||||||
//! - Siblings, creating lists
|
|
||||||
//! - Children, creating trees like this one
|
|
||||||
//! - Notes may have references (pointers) to other boxes
|
|
||||||
//! - Notes may be moved around
|
|
||||||
//! - Notes may be deleted
|
|
||||||
//! - Boxes may be deleted
|
|
||||||
//! - When a box is renamed, every reference to that box is auto-edited to
|
|
||||||
//! reflect the change. If a box is renamed to match an existing box, the
|
|
||||||
//! notes in both boxes are merged.
|
|
||||||
//!
|
|
||||||
//! Note-to-note relationships form trees, and are kept in a SQL database of
|
|
||||||
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
|
|
||||||
//! `position` is a monotonic index on the parent (that is, every pair
|
|
||||||
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
|
|
||||||
//! an enum and can specify that the relationship is *original*,
|
|
||||||
//! *embedding*, or *referencing*. An embedded or referenced note may be
|
|
||||||
//! read/write or read-only with respect to the original, but there is only
|
|
||||||
//! one original note at any time.
|
|
||||||
//!
|
|
||||||
//! Note-to-box relationships form a graph, and are kept in the SQL database
|
|
||||||
//! as a collection of *edges* from the note to the box (and naturally
|
|
||||||
//! vice-versa).
|
|
||||||
//!
|
|
||||||
//! - Decision: When an original note is deleted, do all references and
|
|
||||||
//! embeddings also get deleted, or is the oldest one elevated to be a new
|
|
||||||
//! "original"? Or is that something the user may choose?
|
|
||||||
//!
|
|
||||||
//! - Decision: Should the merging issue be handled at this layer, or would
|
|
||||||
//! it make sense to move this to a higher layer, and only provide the
|
|
||||||
//! hooks for it here?
|
|
||||||
//!
|
|
||||||
|
|
||||||
mod api;
|
|
||||||
mod private;
|
|
||||||
|
|
||||||
pub use crate::store::api::NoteResult;
|
|
||||||
pub use crate::store::api::NoteStore;
|
|
|
@ -1,594 +0,0 @@
|
||||||
use crate::structs::*;
|
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use regex::Regex;
|
|
||||||
use slug::slugify;
|
|
||||||
use sqlx::{sqlite::Sqlite, Acquire, Done, Executor, Transaction};
|
|
||||||
use std::cmp;
|
|
||||||
use std::collections::HashSet;
|
|
||||||
|
|
||||||
type SqlResult<T> = sqlx::Result<T>;
|
|
||||||
|
|
||||||
// ___ _ _
|
|
||||||
// | _ \_ _(_)_ ____ _| |_ ___
|
|
||||||
// | _/ '_| \ V / _` | _/ -_)
|
|
||||||
// |_| |_| |_|\_/\__,_|\__\___|
|
|
||||||
//
|
|
||||||
|
|
||||||
// I'm putting a lot of faith in Rust's ability to inline stuff. I'm
|
|
||||||
// sure this is okay. But really, this lets the API be clean and
|
|
||||||
// coherent and easily readable, and hides away the gnarliness of some
|
|
||||||
// of the SQL queries.
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref SELECT_PAGE_BY_TITLE_SQL: String = str::replace(
|
|
||||||
include_str!("sql/select_notes_by_parameter.sql"),
|
|
||||||
"QUERYPARAMETER",
|
|
||||||
"notes.content"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref SELECT_PAGE_BY_ID_SQL: String = str::replace(
|
|
||||||
include_str!("sql/select_notes_by_parameter.sql"),
|
|
||||||
"QUERYPARAMETER",
|
|
||||||
"notes.id"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref SELECT_NOTES_BACKREFERENCING_PAGE_SQL: &'static str =
|
|
||||||
include_str!("sql/select_notes_backreferencing_page.sql");
|
|
||||||
}
|
|
||||||
|
|
||||||
// ___ _
|
|
||||||
// | _ \___ ___ ___| |_
|
|
||||||
// | / -_|_-</ -_) _|
|
|
||||||
// |_|_\___/__/\___|\__|
|
|
||||||
//
|
|
||||||
|
|
||||||
pub(crate) async fn reset_database<'a, E>(executor: E) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let initialize_sql = include_str!("sql/initialize_database.sql");
|
|
||||||
sqlx::query(initialize_sql).execute(executor).await.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
// ___ _ _ _ __ _
|
|
||||||
// | __|__| |_ __| |_ | |/ /__ _ __| |_ ___ _ _
|
|
||||||
// | _/ -_) _/ _| ' \ | ' </ _` (_-< _/ -_) ' \
|
|
||||||
// |_|\___|\__\__|_||_| |_|\_\__,_/__/\__\___|_||_|
|
|
||||||
//
|
|
||||||
|
|
||||||
// The next three functions are essentially the same, although the internal
|
|
||||||
// SQL operations are quite different between the first two and the last.
|
|
||||||
async fn select_object_by_query<'a, E>(executor: E, query: &str, field: &str) -> SqlResult<Vec<Note>>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let r: Vec<RowNote> = sqlx::query_as(query).bind(field).fetch_all(executor).await?;
|
|
||||||
Ok(r.into_iter().map(Note::from).collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Select the requested page via its id. This is fairly rare;
|
|
||||||
// pages should usually be picked up via their title, but if you're
|
|
||||||
// navigating to an instance, this is how you specify the page in a
|
|
||||||
// URL. The return value is an array of Note objects; it is the
|
|
||||||
// responsibility of client code to restructure these into a tree-like
|
|
||||||
// object.
|
|
||||||
//
|
|
||||||
// Recommended: Clients should update the URL whenever changing
|
|
||||||
// page.
|
|
||||||
pub(crate) async fn select_page_by_slug<'a, E>(executor: E, slug: &str) -> SqlResult<Vec<Note>>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
select_object_by_query(executor, &SELECT_PAGE_BY_ID_SQL, &slug).await
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch the page by title. The return value is an array of Note
|
|
||||||
// objects; it is the responsibility of client code to restructure
|
|
||||||
// these into a tree-like object.
|
|
||||||
pub(crate) async fn select_page_by_title<'a, E>(executor: E, title: &str) -> SqlResult<Vec<Note>>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
select_object_by_query(executor, &SELECT_PAGE_BY_TITLE_SQL, &title).await
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fetch all backreferences to a page. The return value is an array
|
|
||||||
// of arrays, and inside each array is a list from a root page to
|
|
||||||
// the note that references the give page. Clients may choose how
|
|
||||||
// they want to display that collection.
|
|
||||||
pub(crate) async fn select_backreferences_for_page<'a, E>(executor: E, page_id: &str) -> SqlResult<Vec<Note>>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
select_object_by_query(executor, &SELECT_NOTES_BACKREFERENCING_PAGE_SQL, &page_id).await
|
|
||||||
}
|
|
||||||
|
|
||||||
// ___ _ ___ _ _ _
|
|
||||||
// |_ _|_ _ ___ ___ _ _| |_ / _ \ _ _ ___ | \| |___| |_ ___
|
|
||||||
// | || ' \(_-</ -_) '_| _| | (_) | ' \/ -_) | .` / _ \ _/ -_)
|
|
||||||
// |___|_||_/__/\___|_| \__| \___/|_||_\___| |_|\_\___/\__\___|
|
|
||||||
//
|
|
||||||
|
|
||||||
// Inserts a single note into the notes table. That is all.
|
|
||||||
pub(crate) async fn insert_note<'a, E>(executor: E, note: &NewNote) -> SqlResult<String>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let insert_one_note_sql = concat!(
|
|
||||||
"INSERT INTO notes (id, content, kind, ",
|
|
||||||
" creation_date, updated_date, lastview_date) ",
|
|
||||||
"VALUES (?, ?, ?, ?, ?, ?);"
|
|
||||||
);
|
|
||||||
|
|
||||||
sqlx::query(insert_one_note_sql)
|
|
||||||
.bind(¬e.id)
|
|
||||||
.bind(¬e.content)
|
|
||||||
.bind(note.kind.to_string())
|
|
||||||
.bind(¬e.creation_date)
|
|
||||||
.bind(¬e.updated_date)
|
|
||||||
.bind(¬e.lastview_date)
|
|
||||||
.execute(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(note.id.clone())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Inserts a single note into the notes table. That is all.
|
|
||||||
pub(crate) async fn insert_bulk_notes<'a, E>(executor: E, notes: &[NewNote]) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
if notes.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let insert_pattern = "VALUES (?, ?, ?, ?, ?, ?)".to_string();
|
|
||||||
let insert_bulk_notes_sql = "INSERT INTO notes (id, content, kind, creation_date, updated_date, lastview_date) "
|
|
||||||
.to_string()
|
|
||||||
+ &[insert_pattern.as_str()].repeat(notes.len()).join(", ")
|
|
||||||
+ &";".to_string();
|
|
||||||
|
|
||||||
let mut request = sqlx::query(&insert_bulk_notes_sql);
|
|
||||||
for note in notes {
|
|
||||||
request = request
|
|
||||||
.bind(¬e.id)
|
|
||||||
.bind(¬e.content)
|
|
||||||
.bind(note.kind.to_string())
|
|
||||||
.bind(¬e.creation_date)
|
|
||||||
.bind(¬e.updated_date)
|
|
||||||
.bind(¬e.lastview_date);
|
|
||||||
}
|
|
||||||
request.execute(executor).await.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
// ___ _ _ _ _ __ _
|
|
||||||
// | _ )_ _(_) |__| | | |/ /__ _ __| |_ ___ _ _
|
|
||||||
// | _ \ || | | / _` | | ' </ _` (_-< _/ -_) ' \
|
|
||||||
// |___/\_,_|_|_\__,_| |_|\_\__,_/__/\__\___|_||_|
|
|
||||||
//
|
|
||||||
|
|
||||||
// Given a possible slug, find the slug with the highest
|
|
||||||
// uniquification number, and return that number, if any.
|
|
||||||
pub(crate) fn find_maximal_slug_number(slugs: &[JustId]) -> Option<u32> {
|
|
||||||
lazy_static! {
|
|
||||||
static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
if slugs.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut slug_counters: Vec<u32> = slugs
|
|
||||||
.iter()
|
|
||||||
.filter_map(|slug| RE_CAP_NUM.captures(&slug.id))
|
|
||||||
.map(|cap| cap.get(1).unwrap().as_str().parse::<u32>().unwrap())
|
|
||||||
.collect();
|
|
||||||
slug_counters.sort_unstable();
|
|
||||||
slug_counters.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given an initial string and an existing collection of slugs,
|
|
||||||
// generate a new slug that does not conflict with the current
|
|
||||||
// collection. Right now we're using the slugify operation, which...
|
|
||||||
// isn't all that.
|
|
||||||
pub(crate) async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult<String>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
lazy_static! {
|
|
||||||
static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap();
|
|
||||||
static ref SLUG_FINDER_SQL: String = format!(
|
|
||||||
"SELECT id FROM notes WHERE kind = '{}' AND id LIKE '?%';",
|
|
||||||
NoteKind::Page.to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let initial_slug = slugify(title);
|
|
||||||
let sample_slug = RE_STRIP_NUM.replace_all(&initial_slug, "");
|
|
||||||
let similar_slugs: Vec<JustId> = sqlx::query_as(&SLUG_FINDER_SQL)
|
|
||||||
.bind(&*sample_slug)
|
|
||||||
.fetch_all(executor)
|
|
||||||
.await?;
|
|
||||||
let maximal_slug_number = find_maximal_slug_number(&similar_slugs);
|
|
||||||
Ok(match maximal_slug_number {
|
|
||||||
None => initial_slug,
|
|
||||||
Some(slug_number) => format!("{}-{}", initial_slug, slug_number + 1),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
// A helper function: given a title and a slug, create a PageType
|
|
||||||
// note.
|
|
||||||
pub(crate) fn create_page(title: &str, slug: &str) -> NewNote {
|
|
||||||
NewNoteBuilder::default()
|
|
||||||
.id(slug.to_string())
|
|
||||||
.content(title.to_string())
|
|
||||||
.kind(NoteKind::Page)
|
|
||||||
.build()
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
// _ _ _ _ ___ _ _ _
|
|
||||||
// | | | |_ __ __| |__ _| |_ ___ / _ \ _ _ ___ | \| |___| |_ ___
|
|
||||||
// | |_| | '_ \/ _` / _` | _/ -_) | (_) | ' \/ -_) | .` / _ \ _/ -_)
|
|
||||||
// \___/| .__/\__,_\__,_|\__\___| \___/|_||_\___| |_|\_\___/\__\___|
|
|
||||||
// |_|
|
|
||||||
|
|
||||||
pub(crate) async fn update_note_content<'a, E>(executor: E, note_id: &str, content: &str) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?";
|
|
||||||
let count = sqlx::query(update_note_content_sql)
|
|
||||||
.bind(content)
|
|
||||||
.bind(note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?
|
|
||||||
.rows_affected();
|
|
||||||
|
|
||||||
match count {
|
|
||||||
1 => Ok(()),
|
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// ___ _ _ ___ _ _ _ ___ _ _ _ _ _
|
|
||||||
// | __|__| |_ __| |_ / _ \ _ _ ___ | \| |___| |_ ___ | _ \___| |__ _| |_(_)___ _ _ __| |_ (_)_ __
|
|
||||||
// | _/ -_) _/ _| ' \ | (_) | ' \/ -_) | .` / _ \ _/ -_) | / -_) / _` | _| / _ \ ' \(_-< ' \| | '_ \
|
|
||||||
// |_|\___|\__\__|_||_| \___/|_||_\___| |_|\_\___/\__\___| |_|_\___|_\__,_|\__|_\___/_||_/__/_||_|_| .__/
|
|
||||||
// |_|
|
|
||||||
|
|
||||||
pub(crate) async fn select_note_to_note_relationship<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
parent_id: &str,
|
|
||||||
note_id: &str,
|
|
||||||
) -> SqlResult<NoteRelationship>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let get_note_to_note_relationship_sql = concat!(
|
|
||||||
"SELECT parent_id, note_id, location, kind ",
|
|
||||||
"FROM note_relationships ",
|
|
||||||
"WHERE parent_id = ? and note_id = ? ",
|
|
||||||
"LIMIT 1"
|
|
||||||
);
|
|
||||||
let s: NoteRelationshipRow = sqlx::query_as(get_note_to_note_relationship_sql)
|
|
||||||
.bind(parent_id)
|
|
||||||
.bind(note_id)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(NoteRelationship::from(s))
|
|
||||||
}
|
|
||||||
|
|
||||||
// _ _ _ _ _ _ _ ___ _ _ _ _ _
|
|
||||||
// | \| |___| |_ ___ | |_ ___ | \| |___| |_ ___ | _ \___| |__ _| |_(_)___ _ _ __| |_ (_)_ __ ___
|
|
||||||
// | .` / _ \ _/ -_) | _/ _ \ | .` / _ \ _/ -_) | / -_) / _` | _| / _ \ ' \(_-< ' \| | '_ (_-<
|
|
||||||
// |_|\_\___/\__\___| \__\___/ |_|\_\___/\__\___| |_|_\___|_\__,_|\__|_\___/_||_/__/_||_|_| .__/__/
|
|
||||||
// |_|
|
|
||||||
|
|
||||||
pub(crate) async fn insert_note_to_note_relationship<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
parent_id: &str,
|
|
||||||
note_id: &str,
|
|
||||||
location: i64,
|
|
||||||
kind: &RelationshipKind,
|
|
||||||
) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let insert_note_to_note_relationship_sql = concat!(
|
|
||||||
"INSERT INTO note_relationships (parent_id, note_id, location, kind) ",
|
|
||||||
"values (?, ?, ?, ?)"
|
|
||||||
);
|
|
||||||
|
|
||||||
sqlx::query(insert_note_to_note_relationship_sql)
|
|
||||||
.bind(parent_id)
|
|
||||||
.bind(note_id)
|
|
||||||
.bind(&location)
|
|
||||||
.bind(kind.to_string())
|
|
||||||
.execute(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn make_room_for_new_note_relationship<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
parent_id: &str,
|
|
||||||
location: i64,
|
|
||||||
) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let make_room_for_new_note_sql = concat!(
|
|
||||||
"UPDATE note_relationships ",
|
|
||||||
"SET location = location + 1 ",
|
|
||||||
"WHERE location >= ? and parent_id = ?;"
|
|
||||||
);
|
|
||||||
|
|
||||||
sqlx::query(make_room_for_new_note_sql)
|
|
||||||
.bind(&location)
|
|
||||||
.bind(parent_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn determine_max_child_location_for_note<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
note_id: &str,
|
|
||||||
comp_loc: Option<i64>,
|
|
||||||
) -> SqlResult<i64>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let row_count = assert_max_child_location_for_note(executor, note_id).await? + 1;
|
|
||||||
Ok(match comp_loc {
|
|
||||||
Some(location) => cmp::min(row_count, location),
|
|
||||||
None => row_count,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn assert_max_child_location_for_note<'a, E>(executor: E, note_id: &str) -> SqlResult<i64>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let assert_max_child_location_for_note_sql =
|
|
||||||
"SELECT MAX(location) AS count FROM note_relationships WHERE parent_id = ?;";
|
|
||||||
|
|
||||||
let count: RowCount = sqlx::query_as(assert_max_child_location_for_note_sql)
|
|
||||||
.bind(note_id)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(count.count)
|
|
||||||
}
|
|
||||||
|
|
||||||
// _ _ _ _ _ __ _ ___ _ _ _ _ _
|
|
||||||
// | \| |___| |_ ___ | |_ ___ | |/ /__ _ __| |_ ___ _ _ | _ \___| |__ _| |_(_)___ _ _ __| |_ (_)_ __ ___
|
|
||||||
// | .` / _ \ _/ -_) | _/ _ \ | ' </ _` (_-< _/ -_) ' \ | / -_) / _` | _| / _ \ ' \(_-< ' \| | '_ (_-<
|
|
||||||
// |_|\_\___/\__\___| \__\___/ |_|\_\__,_/__/\__\___|_||_| |_|_\___|_\__,_|\__|_\___/_||_/__/_||_|_| .__/__/
|
|
||||||
// |_|
|
|
||||||
|
|
||||||
pub(crate) async fn insert_bulk_note_to_page_relationships<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
note_id: &str,
|
|
||||||
references: &[String],
|
|
||||||
) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
if references.is_empty() {
|
|
||||||
return Ok(());
|
|
||||||
}
|
|
||||||
|
|
||||||
let insert_pattern = format!("(?, ?, '{}')", PageRelationshipKind::Page.to_string());
|
|
||||||
let insert_note_page_references_sql = "INSERT INTO note_page_relationships (note_id, page_id, kind) VALUES "
|
|
||||||
.to_string()
|
|
||||||
+ &[insert_pattern.as_str()].repeat(references.len()).join(", ")
|
|
||||||
+ &";".to_string();
|
|
||||||
|
|
||||||
let mut request = sqlx::query(&insert_note_page_references_sql);
|
|
||||||
for reference in references {
|
|
||||||
request = request.bind(note_id).bind(reference);
|
|
||||||
}
|
|
||||||
|
|
||||||
request.execute(executor).await.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn delete_bulk_note_to_page_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let delete_note_to_page_relationship_sql = "DELETE FROM note_page_relationships WHERE and note_id = ?;";
|
|
||||||
sqlx::query(delete_note_to_page_relationship_sql)
|
|
||||||
.bind(note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given the references supplied, and the references found in the datastore,
|
|
||||||
// return a list of the references not found in the datastore.
|
|
||||||
pub(crate) fn diff_references(references: &[String], found_references: &[PageTitle]) -> Vec<String> {
|
|
||||||
let all: HashSet<String> = references.iter().cloned().collect();
|
|
||||||
let found: HashSet<String> = found_references.iter().map(|r| r.content.clone()).collect();
|
|
||||||
all.difference(&found).cloned().collect()
|
|
||||||
}
|
|
||||||
|
|
||||||
// ___ _ _ _ _ __ _ ___ _ _ _ _ _
|
|
||||||
// / __|___ _ _| |_ ___ _ _| |_ | |_ ___ | |/ /__ _ __| |_ ___ _ _ | _ \___| |__ _| |_(_)___ _ _ __| |_ (_)_ __ ___
|
|
||||||
// | (__/ _ \ ' \ _/ -_) ' \ _| | _/ _ \ | ' </ _` (_-< _/ -_) ' \ | / -_) / _` | _| / _ \ ' \(_-< ' \| | '_ (_-<
|
|
||||||
// \___\___/_||_\__\___|_||_\__| \__\___/ |_|\_\__,_/__/\__\___|_||_| |_|_\___|_\__,_|\__|_\___/_||_/__/_||_|_| .__/__/
|
|
||||||
// |_|
|
|
||||||
|
|
||||||
// Returns all the (Id, title) pairs found in the database out of a
|
|
||||||
// list of titles. Used by insert_note and update_note_content to
|
|
||||||
// find the ids of all the references in a given document.
|
|
||||||
pub(crate) async fn find_all_page_from_list_of_references<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
references: &[String],
|
|
||||||
) -> SqlResult<Vec<PageTitle>>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
if references.is_empty() {
|
|
||||||
return Ok(vec![]);
|
|
||||||
}
|
|
||||||
|
|
||||||
lazy_static! {
|
|
||||||
static ref SELECT_ALL_REFERENCES_FOR_SQL_BASE: String = format!(
|
|
||||||
"SELECT id, content FROM notes WHERE kind = '{}' AND content IN (",
|
|
||||||
NoteKind::Page.to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let find_all_references_for_sql =
|
|
||||||
SELECT_ALL_REFERENCES_FOR_SQL_BASE.to_string() + &["?"].repeat(references.len()).join(",") + &");".to_string();
|
|
||||||
|
|
||||||
let mut request = sqlx::query_as(&find_all_references_for_sql);
|
|
||||||
for id in references.iter() {
|
|
||||||
request = request.bind(id);
|
|
||||||
}
|
|
||||||
request.fetch_all(executor).await
|
|
||||||
}
|
|
||||||
|
|
||||||
// ___ _ _
|
|
||||||
// | \ ___| |___| |_ ___
|
|
||||||
// | |) / -_) / -_) _/ -_)
|
|
||||||
// |___/\___|_\___|\__\___|
|
|
||||||
//
|
|
||||||
|
|
||||||
pub(crate) async fn delete_note_to_note_relationship<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
parent_id: &str,
|
|
||||||
note_id: &str,
|
|
||||||
) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let delete_note_to_note_relationship_sql = concat!(
|
|
||||||
"DELETE FROM note_relationships ",
|
|
||||||
"WHERE parent_id = ? and note_id = ? "
|
|
||||||
);
|
|
||||||
|
|
||||||
let count = sqlx::query(delete_note_to_note_relationship_sql)
|
|
||||||
.bind(parent_id)
|
|
||||||
.bind(note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?
|
|
||||||
.rows_affected();
|
|
||||||
|
|
||||||
match count {
|
|
||||||
1 => Ok(()),
|
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn delete_note_to_page_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
lazy_static! {
|
|
||||||
static ref DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL: String = format!(
|
|
||||||
"DELETE FROM note_relationships WHERE kind in ('{}', '{}') AND parent_id = ?;",
|
|
||||||
PageRelationshipKind::Page.to_string(),
|
|
||||||
PageRelationshipKind::Unacked.to_string()
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlx::query(&DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL)
|
|
||||||
.bind(note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) async fn delete_note<'a, E>(executor: E, note_id: &str) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let delete_note_sql = "DELETE FROM notes WHERE note_id = ?";
|
|
||||||
|
|
||||||
let count = sqlx::query(delete_note_sql)
|
|
||||||
.bind(note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?
|
|
||||||
.rows_affected();
|
|
||||||
|
|
||||||
match count {
|
|
||||||
1 => Ok(()),
|
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// After removing a note, recalculate the position of all notes under
|
|
||||||
// the parent note, such that there order is now completely
|
|
||||||
// sequential.
|
|
||||||
pub(crate) async fn close_hole_for_deleted_note_relationship<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
parent_id: &str,
|
|
||||||
location: i64,
|
|
||||||
) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let close_hole_for_deleted_note_sql = concat!(
|
|
||||||
"UPDATE note_relationships ",
|
|
||||||
"SET location = location - 1 ",
|
|
||||||
"WHERE location > ? and parent_id = ?;"
|
|
||||||
);
|
|
||||||
|
|
||||||
sqlx::query(close_hole_for_deleted_note_sql)
|
|
||||||
.bind(&location)
|
|
||||||
.bind(parent_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given a list of references found in the content, generate the
|
|
||||||
// references that do not previously exist, returning all found
|
|
||||||
// references. NOTE: The function signature for this is for a
|
|
||||||
// transaction, and uses a nested transaction.
|
|
||||||
pub(crate) async fn validate_or_generate_all_found_references(
|
|
||||||
txi: &mut Transaction<'_, Sqlite>,
|
|
||||||
references: &[String],
|
|
||||||
) -> SqlResult<Vec<String>> {
|
|
||||||
let mut tx = txi.begin().await?;
|
|
||||||
|
|
||||||
let found_references = find_all_page_from_list_of_references(&mut tx, &references).await?;
|
|
||||||
let new_references = diff_references(&references, &found_references);
|
|
||||||
let mut new_page: Vec<NewNote> = vec![];
|
|
||||||
for one_reference in new_references.iter() {
|
|
||||||
let slug = generate_slug(&mut tx, one_reference).await?;
|
|
||||||
new_page.push(create_page(&one_reference, &slug));
|
|
||||||
}
|
|
||||||
insert_bulk_notes(&mut tx, &new_page).await?;
|
|
||||||
|
|
||||||
let mut all_reference_ids: Vec<String> = found_references.iter().map(|r| r.id.clone()).collect();
|
|
||||||
all_reference_ids.append(&mut new_page.iter().map(|r| r.id.clone()).collect());
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(all_reference_ids)
|
|
||||||
}
|
|
||||||
|
|
||||||
// __ __ _
|
|
||||||
// | \/ (_)___ __
|
|
||||||
// | |\/| | (_-</ _|
|
|
||||||
// |_| |_|_/__/\__|
|
|
||||||
//
|
|
||||||
|
|
||||||
// The dreaded miscellaneous!
|
|
||||||
|
|
||||||
pub(crate) async fn count_existing_note_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<i64>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let count_existing_note_relationships_sql = "SELECT COUNT(*) as count FROM note_relationships WHERE note_id = ?;";
|
|
||||||
let count: RowCount = sqlx::query_as(&count_existing_note_relationships_sql)
|
|
||||||
.bind(note_id)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(count.count)
|
|
||||||
}
|
|
|
@ -1,58 +0,0 @@
|
||||||
DROP TABLE IF EXISTS notes;
|
|
||||||
DROP TABLE IF EXISTS note_relationships;
|
|
||||||
DROP TABLE IF EXISTS note_page_relationships;
|
|
||||||
DROP TABLE IF EXISTS favorites;
|
|
||||||
|
|
||||||
CREATE TABLE notes (
|
|
||||||
id TEXT NOT NULL PRIMARY KEY,
|
|
||||||
content TEXT NOT NULL,
|
|
||||||
kind TEXT NOT NULL,
|
|
||||||
creation_date DATETIME NOT NULL,
|
|
||||||
updated_date DATETIME NOT NULL,
|
|
||||||
lastview_date DATETIME NOT NULL,
|
|
||||||
deleted_date DATETIME NULL
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX note_ids ON notes (id);
|
|
||||||
|
|
||||||
CREATE TABLE favorites (
|
|
||||||
id TEXT NOT NULL UNIQUE,
|
|
||||||
location INTEGER NOT NULL,
|
|
||||||
FOREIGN KEY (id) REFERENCES notes (id) ON DELETE CASCADE
|
|
||||||
);
|
|
||||||
|
|
||||||
-- This table represents the forest of data relating a page to its
|
|
||||||
-- collections of notes. The root is itself "a note," but the content
|
|
||||||
-- of that note will always be just the title of the page.
|
|
||||||
--
|
|
||||||
CREATE TABLE note_relationships (
|
|
||||||
note_id TEXT NOT NULL,
|
|
||||||
parent_id TEXT NOT NULL,
|
|
||||||
location INTEGER NOT NULL,
|
|
||||||
kind TEXT NOT NULL,
|
|
||||||
-- If either note disappears, we want all the edges to disappear as well.
|
|
||||||
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (parent_id) REFERENCES notes (id) ON DELETE CASCADE,
|
|
||||||
UNIQUE (note_id, parent_id),
|
|
||||||
CHECK (note_id <> parent_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- This table represents the graph of data relating notes to pages.
|
|
||||||
--
|
|
||||||
CREATE TABLE note_page_relationships (
|
|
||||||
note_id TEXT NOT NULL,
|
|
||||||
page_id TEXT NOT NULL,
|
|
||||||
kind TEXT NOT NULL,
|
|
||||||
-- If either note disappears, we want all the edges to disappear as well.
|
|
||||||
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (page_id) REFERENCES notes (id) ON DELETE CASCADE,
|
|
||||||
UNIQUE (note_id, page_id),
|
|
||||||
CHECK (note_id <> page_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
-- A fabulous constraint. This index prevents us from saying that
|
|
||||||
-- if a note points to a page, the page may not point to a
|
|
||||||
-- note. Now, it's absolutely required that a page_id point to
|
|
||||||
-- a PageType note; the content should be a title only.
|
|
||||||
CREATE UNIQUE INDEX note_page_unique_idx
|
|
||||||
ON note_page_relationships (MIN(note_id, page_id), MAX(note_id, page_id));
|
|
|
@ -1,72 +0,0 @@
|
||||||
SELECT
|
|
||||||
id,
|
|
||||||
parent_id,
|
|
||||||
content,
|
|
||||||
location,
|
|
||||||
kind,
|
|
||||||
creation_date,
|
|
||||||
updated_date,
|
|
||||||
lastview_date,
|
|
||||||
deleted_date
|
|
||||||
|
|
||||||
FROM (
|
|
||||||
|
|
||||||
WITH RECURSIVE parents (
|
|
||||||
id,
|
|
||||||
parent_id,
|
|
||||||
content,
|
|
||||||
location,
|
|
||||||
kind,
|
|
||||||
creation_date,
|
|
||||||
updated_date,
|
|
||||||
lastview_date,
|
|
||||||
deleted_date,
|
|
||||||
cycle
|
|
||||||
)
|
|
||||||
|
|
||||||
AS (
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
notes.id,
|
|
||||||
note_parents.id,
|
|
||||||
notes.content,
|
|
||||||
note_relationships.location,
|
|
||||||
notes.kind,
|
|
||||||
notes.creation_date,
|
|
||||||
notes.updated_date,
|
|
||||||
notes.lastview_date,
|
|
||||||
notes.deleted_date,
|
|
||||||
','||notes.id||','
|
|
||||||
FROM notes
|
|
||||||
INNER JOIN note_relationships
|
|
||||||
ON notes.id = note_relationships.note_id
|
|
||||||
AND notes.kind = 'note'
|
|
||||||
INNER JOIN notes as note_parents
|
|
||||||
ON note_parents.id = note_relationships.parent_id
|
|
||||||
WHERE notes.id
|
|
||||||
IN (SELECT note_id
|
|
||||||
FROM note_page_relationships
|
|
||||||
WHERE page_id = ?) -- IMPORTANT: THIS IS THE PARAMETER
|
|
||||||
|
|
||||||
UNION
|
|
||||||
SELECT DISTINCT
|
|
||||||
notes.id,
|
|
||||||
next_parent.id,
|
|
||||||
notes.content,
|
|
||||||
note_relationships.location,
|
|
||||||
notes.kind,
|
|
||||||
notes.creation_date,
|
|
||||||
notes.updated_date,
|
|
||||||
notes.lastview_date,
|
|
||||||
notes.deleted_date,
|
|
||||||
parents.cycle||notes.id||','
|
|
||||||
FROM notes
|
|
||||||
INNER JOIN parents
|
|
||||||
ON parents.parent_id = notes.id
|
|
||||||
LEFT JOIN note_relationships
|
|
||||||
ON note_relationships.note_id = notes.id
|
|
||||||
LEFT JOIN notes as next_parent
|
|
||||||
ON next_parent.id = note_relationships.parent_id
|
|
||||||
WHERE parents.cycle NOT LIKE '%,'||notes.id||',%'
|
|
||||||
)
|
|
||||||
SELECT * from parents);
|
|
|
@ -1,98 +0,0 @@
|
||||||
-- This is undoubtedly one of the more complex bits of code I've
|
|
||||||
-- written recently, and I do wish there had been macros because
|
|
||||||
-- there's a lot of hand-written, copy-pasted code here around the
|
|
||||||
-- basic content of a note; it would have been nice to be able to DRY
|
|
||||||
-- that out.
|
|
||||||
|
|
||||||
-- This expression creates a table, 'notetree', that contains all of
|
|
||||||
-- the notes nested under a page. Each entry in the table includes
|
|
||||||
-- the note's parent's internal and external ids so that applications
|
|
||||||
-- can build an actual tree out of a vec of these things.
|
|
||||||
|
|
||||||
-- TODO: Extensive testing to validate that the nodes are delivered
|
|
||||||
-- *in nesting order* to the client.
|
|
||||||
|
|
||||||
-- Search in here for the term QUERYPARAMETER. That string will be
|
|
||||||
-- substituted with the correct parameter (id or title) depending on
|
|
||||||
-- the use case, by the level 1 client (the private parts of
|
|
||||||
-- store.rs).
|
|
||||||
|
|
||||||
SELECT
|
|
||||||
id,
|
|
||||||
parent_id,
|
|
||||||
content,
|
|
||||||
location,
|
|
||||||
kind,
|
|
||||||
creation_date,
|
|
||||||
updated_date,
|
|
||||||
lastview_date,
|
|
||||||
deleted_date
|
|
||||||
|
|
||||||
FROM (
|
|
||||||
|
|
||||||
WITH RECURSIVE notestree (
|
|
||||||
id,
|
|
||||||
parent_id,
|
|
||||||
content,
|
|
||||||
location,
|
|
||||||
kind,
|
|
||||||
creation_date,
|
|
||||||
updated_date,
|
|
||||||
lastview_date,
|
|
||||||
deleted_date,
|
|
||||||
cycle
|
|
||||||
)
|
|
||||||
|
|
||||||
AS (
|
|
||||||
|
|
||||||
-- The seed query. Finds the root node of any tree of notes,
|
|
||||||
-- which by definition has a location of zero and a type of
|
|
||||||
-- 'page'.
|
|
||||||
SELECT
|
|
||||||
notes.id,
|
|
||||||
NULL as parent_id,
|
|
||||||
notes.content,
|
|
||||||
0, -- All boxes are at position zero. They are the root of the tree.
|
|
||||||
notes.kind,
|
|
||||||
notes.creation_date,
|
|
||||||
notes.updated_date,
|
|
||||||
notes.lastview_date,
|
|
||||||
notes.deleted_date,
|
|
||||||
','||notes.id||',' -- Cycle monitor
|
|
||||||
FROM notes
|
|
||||||
WHERE notes.kind = "box"
|
|
||||||
AND QUERYPARAMETER = ? -- The Query Parameter
|
|
||||||
|
|
||||||
-- RECURSIVE expression
|
|
||||||
--
|
|
||||||
-- Here, for each recursion down the tree, we collect the child
|
|
||||||
-- nodes for a given node, eliding any cycles.
|
|
||||||
--
|
|
||||||
-- TODO: Figure out what to do when a cycle DOES occur.
|
|
||||||
|
|
||||||
UNION SELECT
|
|
||||||
notes.id,
|
|
||||||
notestree.id AS parent_id,
|
|
||||||
notes.content,
|
|
||||||
note_relationships.location,
|
|
||||||
notes.kind,
|
|
||||||
notes.creation_date,
|
|
||||||
notes.updated_date,
|
|
||||||
notes.lastview_date,
|
|
||||||
notes.deleted_date,
|
|
||||||
notestree.cycle||notes.id||','
|
|
||||||
FROM notes
|
|
||||||
INNER JOIN note_relationships
|
|
||||||
ON notes.id = note_relationships.note_id
|
|
||||||
-- For a given ID in the level of notestree in *this* recursion,
|
|
||||||
-- we want each note's branches one level down.
|
|
||||||
INNER JOIN notestree
|
|
||||||
ON note_relationships.parent_id = notestree.id
|
|
||||||
-- And we want to make sure there are no cycles. There shouldn't
|
|
||||||
-- be; we're supposed to prevent those. But you never know.
|
|
||||||
WHERE notestree.cycle NOT LIKE '%,'||notes.id||',%'
|
|
||||||
ORDER BY note_relationships.location
|
|
||||||
)
|
|
||||||
|
|
||||||
SELECT * from notestree);
|
|
||||||
|
|
|
@ -1,141 +1,40 @@
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use derive_builder::Builder;
|
use derive_builder::Builder;
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
use sqlx::{self, FromRow};
|
use sqlx::{self, FromRow};
|
||||||
|
|
||||||
// Page is German for "Box," and is used both because this is
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
// supposed to be a Page, and because "Box" is a heavily
|
pub struct RawPage {
|
||||||
// reserved word in Rust. So, for that matter, are "crate" and
|
pub id: i64,
|
||||||
// "cargo," "cell," and so forth. If I'd wanted to go the Full
|
pub slug: String,
|
||||||
// Noguchi, I guess I could have used "envelope."
|
pub title: String,
|
||||||
|
pub note_id: i64,
|
||||||
// In order to prevent arbitrary enumeration tokens from getting into
|
|
||||||
// the database, the private layer takes a very hard line on insisting
|
|
||||||
// that everything sent TO the datastore come in the enumerated
|
|
||||||
// format, and everything coming OUT of the database be converted back
|
|
||||||
// into an enumeration. These macros instantiate those objects
|
|
||||||
// and their conversions to/from strings.
|
|
||||||
|
|
||||||
macro_rules! build_conversion_enums {
|
|
||||||
( $ty:ident, $( $s:literal => $x:ident, )*) => {
|
|
||||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
|
||||||
pub enum $ty {
|
|
||||||
$( $x ), *
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<String> for $ty {
|
|
||||||
fn from(kind: String) -> Self {
|
|
||||||
match &kind[..] {
|
|
||||||
$( $s => $ty::$x, )*
|
|
||||||
_ => panic!("Illegal value in $ty database: {}", kind),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<$ty> for String {
|
|
||||||
fn from(kind: $ty) -> Self {
|
|
||||||
match kind {
|
|
||||||
$( $ty::$x => $s ),*
|
|
||||||
}
|
|
||||||
.to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl $ty {
|
|
||||||
pub fn to_string(&self) -> String {
|
|
||||||
String::from(self.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// The different kinds of objects we support.
|
|
||||||
|
|
||||||
build_conversion_enums!(
|
|
||||||
NoteKind,
|
|
||||||
"box" => Page,
|
|
||||||
"note" => Note,
|
|
||||||
"resource" => Resource,
|
|
||||||
);
|
|
||||||
|
|
||||||
// The different kinds of relationships we support. I do not yet
|
|
||||||
// know how to ensure that there is a maximum of one (a ->
|
|
||||||
// b)::Direct, and that for any (a -> b) there is no (b <- a), that
|
|
||||||
// is, nor, for that matter, do I know how to prevent cycles.
|
|
||||||
|
|
||||||
build_conversion_enums!(
|
|
||||||
RelationshipKind,
|
|
||||||
"direct" => Direct,
|
|
||||||
"reference" => Reference,
|
|
||||||
"embed" => Embed,
|
|
||||||
);
|
|
||||||
|
|
||||||
build_conversion_enums!(
|
|
||||||
PageRelationshipKind,
|
|
||||||
"page" => Page,
|
|
||||||
"unacked" => Unacked,
|
|
||||||
"cancelled" => Cancelled,
|
|
||||||
);
|
|
||||||
|
|
||||||
// A Note is the base construct of our system. It represents a
|
|
||||||
// single note and contains information about its parent and location.
|
|
||||||
// This is the object *retrieved* from the database.
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, FromRow)]
|
|
||||||
pub(crate) struct RowNote {
|
|
||||||
pub id: String,
|
|
||||||
pub parent_id: Option<String>,
|
|
||||||
pub content: String,
|
|
||||||
pub kind: String,
|
|
||||||
pub location: i64,
|
|
||||||
pub creation_date: DateTime<Utc>,
|
pub creation_date: DateTime<Utc>,
|
||||||
pub updated_date: DateTime<Utc>,
|
pub updated_date: DateTime<Utc>,
|
||||||
pub lastview_date: DateTime<Utc>,
|
pub lastview_date: DateTime<Utc>,
|
||||||
pub deleted_date: Option<DateTime<Utc>>,
|
pub deleted_date: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A Note as it's returned from the private layer. This is
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
/// provided to ensure that the NoteKind is an enum, and that we
|
pub struct RawNote {
|
||||||
/// control the list of possible values stored in the database.
|
pub id: i64,
|
||||||
#[derive(Clone, Debug)]
|
pub uuid: String,
|
||||||
pub struct Note {
|
pub parent_id: i64,
|
||||||
pub id: String,
|
pub parent_uuid: String,
|
||||||
pub parent_id: Option<String>,
|
|
||||||
pub content: String,
|
pub content: String,
|
||||||
pub kind: NoteKind,
|
pub position: i64,
|
||||||
pub location: i64,
|
pub notetype: String,
|
||||||
pub creation_date: DateTime<Utc>,
|
pub creation_date: DateTime<Utc>,
|
||||||
pub updated_date: DateTime<Utc>,
|
pub updated_date: DateTime<Utc>,
|
||||||
pub lastview_date: DateTime<Utc>,
|
pub lastview_date: DateTime<Utc>,
|
||||||
pub deleted_date: Option<DateTime<Utc>>,
|
pub deleted_date: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<RowNote> for Note {
|
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
|
||||||
fn from(note: RowNote) -> Self {
|
pub struct NewPage {
|
||||||
Self {
|
pub slug: String,
|
||||||
id: note.id,
|
pub title: String,
|
||||||
parent_id: note.parent_id,
|
pub note_id: i64,
|
||||||
content: note.content,
|
|
||||||
kind: NoteKind::from(note.kind),
|
|
||||||
location: note.location,
|
|
||||||
creation_date: note.creation_date,
|
|
||||||
updated_date: note.updated_date,
|
|
||||||
lastview_date: note.lastview_date,
|
|
||||||
deleted_date: note.deleted_date,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A new Note object as it's inserted into the system. It has no
|
|
||||||
/// parent or location information; those are data relative to the
|
|
||||||
/// parent, and must be provided by the client. In the case of a
|
|
||||||
/// Page, no location or parent is necessary.
|
|
||||||
#[derive(Clone, Debug, Builder)]
|
|
||||||
pub struct NewNote {
|
|
||||||
#[builder(default = r#"friendly_id::create()"#)]
|
|
||||||
pub id: String,
|
|
||||||
pub content: String,
|
|
||||||
#[builder(default = r#"NoteKind::Note"#)]
|
|
||||||
pub kind: NoteKind,
|
|
||||||
#[builder(default = r#"chrono::Utc::now()"#)]
|
#[builder(default = r#"chrono::Utc::now()"#)]
|
||||||
pub creation_date: DateTime<Utc>,
|
pub creation_date: DateTime<Utc>,
|
||||||
#[builder(default = r#"chrono::Utc::now()"#)]
|
#[builder(default = r#"chrono::Utc::now()"#)]
|
||||||
|
@ -146,92 +45,57 @@ pub struct NewNote {
|
||||||
pub deleted_date: Option<DateTime<Utc>>,
|
pub deleted_date: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<NewNote> for Note {
|
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
|
||||||
/// Only used for building new pages, so the decision- making is
|
pub struct NewNote {
|
||||||
/// limited to page-level things, like pointing to self and
|
#[builder(default = r#""".to_string()"#)]
|
||||||
/// having a location of zero.
|
pub uuid: String,
|
||||||
fn from(note: NewNote) -> Self {
|
|
||||||
Self {
|
|
||||||
id: note.id,
|
|
||||||
parent_id: None,
|
|
||||||
content: note.content,
|
|
||||||
kind: note.kind,
|
|
||||||
location: 0,
|
|
||||||
creation_date: note.creation_date,
|
|
||||||
updated_date: note.updated_date,
|
|
||||||
lastview_date: note.lastview_date,
|
|
||||||
deleted_date: note.deleted_date,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, FromRow)]
|
|
||||||
pub(crate) struct JustId {
|
|
||||||
pub id: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, FromRow)]
|
|
||||||
pub(crate) struct PageTitle {
|
|
||||||
pub id: String,
|
|
||||||
pub content: String,
|
pub content: String,
|
||||||
|
#[builder(default = r#""note".to_string()"#)]
|
||||||
|
pub notetype: String,
|
||||||
|
#[builder(default = r#"chrono::Utc::now()"#)]
|
||||||
|
pub creation_date: DateTime<Utc>,
|
||||||
|
#[builder(default = r#"chrono::Utc::now()"#)]
|
||||||
|
pub updated_date: DateTime<Utc>,
|
||||||
|
#[builder(default = r#"chrono::Utc::now()"#)]
|
||||||
|
pub lastview_date: DateTime<Utc>,
|
||||||
|
#[builder(default = r#"None"#)]
|
||||||
|
pub deleted_date: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, FromRow)]
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
|
pub(crate) struct JustSlugs {
|
||||||
|
pub slug: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
|
pub(crate) struct JustTitles {
|
||||||
|
title: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
|
pub(crate) struct JustId {
|
||||||
|
pub id: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
|
pub(crate) struct PageTitles {
|
||||||
|
pub id: i64,
|
||||||
|
pub title: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
|
pub(crate) struct NoteRelationship {
|
||||||
|
pub parent_id: i64,
|
||||||
|
pub note_id: i64,
|
||||||
|
pub position: i64,
|
||||||
|
pub nature: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
pub(crate) struct RowCount {
|
pub(crate) struct RowCount {
|
||||||
pub count: i64,
|
pub count: i64,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Debug, FromRow)]
|
|
||||||
pub(crate) struct NoteRelationshipRow {
|
|
||||||
pub parent_id: String,
|
|
||||||
pub note_id: String,
|
|
||||||
pub location: i64,
|
|
||||||
pub kind: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct NoteRelationship {
|
|
||||||
pub parent_id: String,
|
|
||||||
pub note_id: String,
|
|
||||||
pub location: i64,
|
|
||||||
pub kind: RelationshipKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<NoteRelationshipRow> for NoteRelationship {
|
|
||||||
fn from(rel: NoteRelationshipRow) -> Self {
|
|
||||||
Self {
|
|
||||||
parent_id: rel.parent_id,
|
|
||||||
note_id: rel.note_id,
|
|
||||||
location: rel.location,
|
|
||||||
kind: RelationshipKind::from(rel.kind),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug, FromRow)]
|
|
||||||
pub(crate) struct PageRelationshipRow {
|
|
||||||
pub note_id: String,
|
|
||||||
pub page_id: String,
|
|
||||||
pub kind: String,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct PageRelationship {
|
|
||||||
pub note_id: String,
|
|
||||||
pub page_id: String,
|
|
||||||
pub kind: PageRelationshipKind,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl From<PageRelationshipRow> for PageRelationship {
|
|
||||||
fn from(rel: PageRelationshipRow) -> Self {
|
|
||||||
Self {
|
|
||||||
page_id: rel.page_id,
|
|
||||||
note_id: rel.note_id,
|
|
||||||
kind: PageRelationshipKind::from(rel.kind),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -239,8 +103,11 @@ mod tests {
|
||||||
#[test]
|
#[test]
|
||||||
fn can_build_new_note() {
|
fn can_build_new_note() {
|
||||||
let now = chrono::Utc::now();
|
let now = chrono::Utc::now();
|
||||||
let newnote = NewNoteBuilder::default().content("bar".to_string()).build().unwrap();
|
let newnote = NewNoteBuilder::default()
|
||||||
assert!(newnote.id.len() > 4);
|
.uuid("foo".to_string())
|
||||||
|
.content("bar".to_string())
|
||||||
|
.build()
|
||||||
|
.unwrap();
|
||||||
assert!((newnote.creation_date - now).num_minutes() < 1);
|
assert!((newnote.creation_date - now).num_minutes() < 1);
|
||||||
assert!((newnote.updated_date - now).num_minutes() < 1);
|
assert!((newnote.updated_date - now).num_minutes() < 1);
|
||||||
assert!((newnote.lastview_date - now).num_minutes() < 1);
|
assert!((newnote.lastview_date - now).num_minutes() < 1);
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
/target
|
|
||||||
Cargo.lock
|
|
|
@ -1,15 +0,0 @@
|
||||||
[package]
|
|
||||||
name = "nm-trees"
|
|
||||||
version = "0.1.0"
|
|
||||||
authors = ["Elf M. Sternberg <elf.sternberg@gmail.com>"]
|
|
||||||
edition = "2018"
|
|
||||||
|
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
|
||||||
|
|
||||||
[dependencies]
|
|
||||||
nm-store = { path = "../nm-store" }
|
|
||||||
thiserror = "1.0.20"
|
|
||||||
tokio = { version = "0.2.22", features = ["rt-threaded", "blocking"] }
|
|
||||||
serde = { version = "1.0.116", features = ["derive"] }
|
|
||||||
serde_json = "1.0.56"
|
|
||||||
chrono = { version = "0.4.18", features = ["serde"] }
|
|
|
@ -1,15 +0,0 @@
|
||||||
.PHONY: all
|
|
||||||
all: help
|
|
||||||
|
|
||||||
.PHONY: help
|
|
||||||
help:
|
|
||||||
@M=$$(perl -ne 'm/((\w|-)*):.*##/ && print length($$1)."\n"' Makefile | \
|
|
||||||
sort -nr | head -1) && \
|
|
||||||
perl -ne "m/^((\w|-)*):.*##\s*(.*)/ && print(sprintf(\"%s: %s\t%s\n\", \$$1, \" \"x($$M-length(\$$1)), \$$3))" Makefile
|
|
||||||
|
|
||||||
# This is necessary because I'm trying hard not to use
|
|
||||||
# any `nightly` features. But rustfmt is likely to be
|
|
||||||
# a `nightly-only` feature for a long time to come, so
|
|
||||||
# this is my hack.
|
|
||||||
fmt: ## Format the code, using the most modern version of rustfmt
|
|
||||||
rustup run nightly cargo fmt
|
|
|
@ -1,146 +0,0 @@
|
||||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
//! # Tree Layer
|
|
||||||
//!
|
|
||||||
//! This layer provides an interface between the storage layer and
|
|
||||||
//! the outside world. It provides all of the basic logic, including
|
|
||||||
//! the premise that a note without a parent is automatically
|
|
||||||
//! made a child of the day's notepad.
|
|
||||||
|
|
||||||
mod make_tree;
|
|
||||||
mod structs;
|
|
||||||
|
|
||||||
use crate::make_tree::{make_backreferences, make_note_tree};
|
|
||||||
use crate::structs::{Note, Page};
|
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
use nm_store::{NewNote, NoteStore, NoteStoreError};
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub struct Notesmachine(pub(crate) NoteStore);
|
|
||||||
|
|
||||||
type Result<T> = core::result::Result<T, NoteStoreError>;
|
|
||||||
|
|
||||||
pub fn make_page(foundtree: &Note, backreferences: Vec<Vec<Note>>) -> Page {
|
|
||||||
Page {
|
|
||||||
slug: foundtree.id,
|
|
||||||
title: foundtree.content,
|
|
||||||
creation_date: foundtree.creation_date,
|
|
||||||
updated_date: foundtree.updated_date,
|
|
||||||
lastview_date: foundtree.lastview_date,
|
|
||||||
deleted_date: foundtree.deleted_date,
|
|
||||||
notes: foundtree.children,
|
|
||||||
backreferences: backreferences,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Notesmachine {
|
|
||||||
pub async fn new(url: &str) -> Result<Self> {
|
|
||||||
let notestore = NoteStore::new(url).await?;
|
|
||||||
Ok(Notesmachine(notestore))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_page_via_slug(&self, slug: &str) -> Result<Page> {
|
|
||||||
let (rawtree, rawbackreferences) = self.0.get_kasten_by_slug(slug).await?;
|
|
||||||
Ok(make_page(
|
|
||||||
&make_note_tree(&rawtree),
|
|
||||||
make_backreferences(&rawbackreferences),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn get_page(&self, title: &str) -> Result<Page> {
|
|
||||||
let (rawtree, rawbackreferences) = self.0.get_kasten_by_title(title).await?;
|
|
||||||
Ok(make_page(
|
|
||||||
&make_note_tree(&rawtree),
|
|
||||||
make_backreferences(&rawbackreferences),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO:
|
|
||||||
// You should be able to:
|
|
||||||
// Add a note that has no parent (gets added to "today")
|
|
||||||
// Add a note that specifies only the page (gets added to page/root)
|
|
||||||
// Add a note that has no location (gets tacked onto the end of the above)
|
|
||||||
// Add a note that specifies the date of creation.
|
|
||||||
pub async fn add_note(&self, note: &NewNote) -> Result<String> {
|
|
||||||
let mut note = note.clone();
|
|
||||||
if note.parent_id.is_none() {
|
|
||||||
note.parent_id = self.get_today_page().await?;
|
|
||||||
}
|
|
||||||
Ok(self.0.add_note(¬e))
|
|
||||||
}
|
|
||||||
|
|
||||||
// pub async fn reference_note(&self, note_id: &str, new_parent_id: &str, new_location: i64) -> Result<()> {
|
|
||||||
// todo!();
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// pub async fn embed_note(&self, note_id: &str, new_parent_id: &str, new_location: i64) -> Result<()> {
|
|
||||||
// todo!();
|
|
||||||
// }
|
|
||||||
|
|
||||||
pub async fn move_note(
|
|
||||||
&self,
|
|
||||||
note_id: &str,
|
|
||||||
old_parent_id: &str,
|
|
||||||
new_parent_id: &str,
|
|
||||||
location: i64,
|
|
||||||
) -> Result<()> {
|
|
||||||
self.0.move_note(note_id, old_parent_id, new_parent_id, location).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_note(&self, note_id: &str, content: &str) -> Result<()> {
|
|
||||||
self.0.update_note_content(note_id, content).await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn delete_note(&self, note_id: &str, parent_note_id: &str) -> Result<()> {
|
|
||||||
self.0.delete_note(note_id, parent_note_id).await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Private stuff
|
|
||||||
|
|
||||||
impl Notesmachine {
|
|
||||||
async fn get_today_page(&self) -> Result<String> {
|
|
||||||
let title = chrono::Utc::now().format("%F").to_string();
|
|
||||||
let (rawtree, _) = self.0.get_kasten_by_title(title).await?;
|
|
||||||
Ok(rawtree.id)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
use tokio;
|
|
||||||
|
|
||||||
async fn fresh_inmemory_database() -> Notesmachine {
|
|
||||||
let notesmachine = Notesmachine::new("sqlite://:memory:").await;
|
|
||||||
assert!(notesmachine.is_ok(), "{:?}", notesmachine);
|
|
||||||
let notesmachine = notesmachine.unwrap();
|
|
||||||
let reset = notesmachine.0.reset_database().await;
|
|
||||||
assert!(reset.is_ok(), "{:?}", reset);
|
|
||||||
notesmachine
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test(threaded_scheduler)]
|
|
||||||
async fn fetching_unfound_page_by_slug_works() {
|
|
||||||
let notesmachine = fresh_inmemory_database().await;
|
|
||||||
let unfoundpage = notesmachine.get_page_via_slug("nonexistent-slug").await;
|
|
||||||
assert!(unfoundpage.is_err());
|
|
||||||
}
|
|
||||||
|
|
||||||
#[tokio::test(threaded_scheduler)]
|
|
||||||
async fn fetching_unfound_page_by_title_works() {
|
|
||||||
let title = "Nonexistent Page";
|
|
||||||
let notesmachine = fresh_inmemory_database().await;
|
|
||||||
let newpageresult = notesmachine.get_page(&title).await;
|
|
||||||
assert!(newpageresult.is_ok(), "{:?}", newpageresult);
|
|
||||||
|
|
||||||
let newpage = newpageresult.unwrap();
|
|
||||||
assert_eq!(newpage.title, title, "{:?}", newpage.title);
|
|
||||||
assert_eq!(newpage.slug, "nonexistent-page", "{:?}", newpage.slug);
|
|
||||||
assert_eq!(newpage.root_note.content, "", "{:?}", newpage.root_note.content);
|
|
||||||
assert_eq!(newpage.root_note.notetype, "root", "{:?}", newpage.root_note.notetype);
|
|
||||||
assert_eq!(newpage.root_note.children.len(), 0, "{:?}", newpage.root_note.children);
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,75 +0,0 @@
|
||||||
use crate::structs::{Note, Page};
|
|
||||||
use nm_store::NoteKind;
|
|
||||||
|
|
||||||
fn make_note_tree_from(rawnotes: &[nm_store::Note], root_id: &str) -> Note {
|
|
||||||
let the_note = {
|
|
||||||
let foundroots: Vec<&nm_store::Note> = rawnotes.iter().filter(|note| note.id == root_id).collect();
|
|
||||||
debug_assert!(foundroots.len() == 1);
|
|
||||||
foundroots.iter().next().unwrap().clone()
|
|
||||||
};
|
|
||||||
|
|
||||||
// The special case of the root node must be filtered out here to
|
|
||||||
// prevent the first pass from smashing the stack in an infinite
|
|
||||||
// loop. The root node is identified by the type 'root' and
|
|
||||||
// having its `id` and `parent_id` be equal. Numeric comparisons
|
|
||||||
// are faster.
|
|
||||||
let mut children = rawnotes
|
|
||||||
.iter()
|
|
||||||
.filter(|note| note.parent_id.is_some() && note.parent_id.unwrap() == root_id && note.id != the_note.id)
|
|
||||||
.map(|note| make_note_tree_from(rawnotes, ¬e.id))
|
|
||||||
.collect::<Vec<Note>>();
|
|
||||||
children.sort_unstable_by(|a, b| a.location.cmp(&b.location));
|
|
||||||
|
|
||||||
Note {
|
|
||||||
id: the_note.id,
|
|
||||||
parent_id: the_note.parent_id,
|
|
||||||
content: the_note.content,
|
|
||||||
kind: the_note.kind.to_string(),
|
|
||||||
location: the_note.location,
|
|
||||||
creation_date: the_note.creation_date,
|
|
||||||
updated_date: the_note.updated_date,
|
|
||||||
lastview_date: the_note.updated_date,
|
|
||||||
deleted_date: the_note.deleted_date,
|
|
||||||
children: children,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn make_note_tree(rawnotes: &[nm_store::Note]) -> Note {
|
|
||||||
let the_root = {
|
|
||||||
let foundroots: Vec<&nm_store::Note> = rawnotes.iter().filter(|note| note.kind == NoteKind::Kasten).collect();
|
|
||||||
debug_assert!(foundroots.len() == 1);
|
|
||||||
foundroots.iter().next().unwrap().clone()
|
|
||||||
};
|
|
||||||
make_note_tree_from(&rawnotes, &the_root.id)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn add_child(rawnotes: &[nm_store::Note], acc: &mut Vec<Note>, note_id: &str) -> Vec<Note> {
|
|
||||||
let child = rawnotes
|
|
||||||
.iter()
|
|
||||||
.find(|note| note.parent_id.is_some() && note.parent_id.unwrap() == note_id);
|
|
||||||
if let Some(c) = child {
|
|
||||||
acc.push(Note {
|
|
||||||
id: c.id,
|
|
||||||
parent_id: Some(note_id.to_string()),
|
|
||||||
content: c.content,
|
|
||||||
kind: c.kind.to_string(),
|
|
||||||
location: c.location,
|
|
||||||
creation_date: c.creation_date,
|
|
||||||
updated_date: c.updated_date,
|
|
||||||
lastview_date: c.updated_date,
|
|
||||||
deleted_date: c.deleted_date,
|
|
||||||
children: vec![],
|
|
||||||
});
|
|
||||||
add_child(rawnotes, acc, &c.id)
|
|
||||||
} else {
|
|
||||||
acc.to_vec()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub(crate) fn make_backreferences(rawnotes: &[nm_store::Note]) -> Vec<Vec<Note>> {
|
|
||||||
rawnotes
|
|
||||||
.iter()
|
|
||||||
.filter(|note| note.parent_id.is_none() && note.kind == NoteKind::Kasten)
|
|
||||||
.map(|root| add_child(rawnotes, &mut Vec::<Note>::new(), &root.id))
|
|
||||||
.collect()
|
|
||||||
}
|
|
|
@ -1,27 +0,0 @@
|
||||||
use chrono::{DateTime, Utc};
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Note {
|
|
||||||
pub id: String,
|
|
||||||
pub parent_id: Option<String>,
|
|
||||||
pub content: String,
|
|
||||||
pub location: i64,
|
|
||||||
pub kind: String,
|
|
||||||
pub creation_date: DateTime<Utc>,
|
|
||||||
pub updated_date: DateTime<Utc>,
|
|
||||||
pub lastview_date: DateTime<Utc>,
|
|
||||||
pub deleted_date: Option<DateTime<Utc>>,
|
|
||||||
pub children: Vec<Note>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct Page {
|
|
||||||
pub slug: String,
|
|
||||||
pub title: String,
|
|
||||||
pub creation_date: DateTime<Utc>,
|
|
||||||
pub updated_date: DateTime<Utc>,
|
|
||||||
pub lastview_date: DateTime<Utc>,
|
|
||||||
pub deleted_date: Option<DateTime<Utc>>,
|
|
||||||
pub notes: Vec<Note>,
|
|
||||||
pub backreferences: Vec<Vec<Note>>,
|
|
||||||
}
|
|
Loading…
Reference in New Issue