Compare commits

..

No commits in common. "canon" and "private-store" have entirely different histories.

10 changed files with 644 additions and 659 deletions

View File

@ -1 +0,0 @@
cognitive-complexity-threshold = 9

View File

@ -1,3 +1,4 @@
use sqlx;
use thiserror::Error; use thiserror::Error;
/// All the ways looking up objects can fail /// All the ways looking up objects can fail

View File

@ -1,5 +1,5 @@
mod errors; mod errors;
mod parser; mod reference_parser;
mod store; mod store;
mod structs; mod structs;
@ -7,6 +7,7 @@ pub use crate::errors::NoteStoreError;
pub use crate::store::NoteStore; pub use crate::store::NoteStore;
pub use crate::structs::{Note, NoteKind, NoteRelationship, PageRelationship}; pub use crate::structs::{Note, NoteKind, NoteRelationship, PageRelationship};
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -112,4 +113,6 @@ mod tests {
assert_eq!(newpages[1].parent_id, Some(newroot.id.clone())); assert_eq!(newpages[1].parent_id, Some(newroot.id.clone()));
assert_eq!(newpages[2].parent_id, Some(newpages[1].id.clone())); assert_eq!(newpages[2].parent_id, Some(newpages[1].id.clone()));
} }
} }

View File

@ -1,59 +0,0 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! # Storage layer for Notesmachine
//!
//! This library implements the core functionality of Notesmachine and
//! describes that functionality to a storage layer. There's a bit of
//! intermingling in here which can't be helped, although it may make
//! sense in the future to separate the decomposition of the note
//! content into a higher layer.
//!
//! Notesmachine storage notes consist of two items: Note and Kasten.
//! This distinction is somewhat arbitrary, as structurally these two
//! items are stored in the same table.
//!
//! - Boxes have titles (and date metadata)
//! - Notes have content and a type (and date metadata)
//! - Notes are stored in boxes
//! - Notes are positioned with respect to other notes.
//! - There are two positions:
//! - Siblings, creating lists
//! - Children, creating trees like this one
//! - Notes may have references (pointers) to other boxes
//! - Notes may be moved around
//! - Notes may be deleted
//! - Boxes may be deleted
//! - When a box is renamed, every reference to that box is auto-edited to
//! reflect the change. If a box is renamed to match an existing box, the
//! notes in both boxes are merged.
//!
//! Note-to-note relationships form trees, and are kept in a SQL database of
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
//! `position` is a monotonic index on the parent (that is, every pair
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
//! an enum and can specify that the relationship is *original*,
//! *embedding*, or *referencing*. An embedded or referenced note may be
//! read/write or read-only with respect to the original, but there is only
//! one original note at any time.
//!
//! Note-to-box relationships form a graph, and are kept in the SQL database
//! as a collection of *edges* from the note to the box (and naturally
//! vice-versa).
//!
//! - Decision: When an original note is deleted, do all references and
//! embeddings also get deleted, or is the oldest one elevated to be a new
//! "original"? Or is that something the user may choose?
//!
//! - Decision: Should the merging issue be handled at this layer, or would
//! it make sense to move this to a higher layer, and only provide the
//! hooks for it here?
//!
mod references;
use references::{build_page_titles, find_links};
pub(crate) fn build_references(content: &str) -> Vec<String> {
build_page_titles(&find_links(content))
}

View File

@ -4,7 +4,7 @@ use lazy_static::lazy_static;
use regex::bytes::Regex as BytesRegex; use regex::bytes::Regex as BytesRegex;
use regex::Regex; use regex::Regex;
struct Finder(pub Vec<String>); pub struct Finder(pub Vec<String>);
impl Finder { impl Finder {
pub fn new() -> Self { pub fn new() -> Self {
@ -24,7 +24,7 @@ impl Finder {
} }
} }
pub(super) fn find_links(document: &str) -> Vec<String> { fn find_links(document: &str) -> Vec<String> {
let arena = Arena::new(); let arena = Arena::new();
let mut finder = Finder::new(); let mut finder = Finder::new();
let root = parse_document(&arena, document, &ComrakOptions::default()); let root = parse_document(&arena, document, &ComrakOptions::default());
@ -50,48 +50,25 @@ pub(super) fn find_links(document: &str) -> Vec<String> {
finder.0 finder.0
} }
// This function is for the camel and snake case handers.
fn recase(title: &str) -> String { fn recase(title: &str) -> String {
lazy_static! { lazy_static! {
// Take every word that has a pattern of a capital letter
// followed by a lower case, and put a space between the
// capital and anything that preceeds it.
// TODO: Make Unicode aware.
static ref RE_PASS1: Regex = Regex::new(r"(?P<s>.)(?P<n>[A-Z][a-z]+)").unwrap(); static ref RE_PASS1: Regex = Regex::new(r"(?P<s>.)(?P<n>[A-Z][a-z]+)").unwrap();
// Take every instance of a lower case letter or number,
// followed by a capital letter, and put a space between them.
// TODO: Make Unicode aware. [[:lower:]] is an ASCII-ism.
static ref RE_PASS2: Regex = Regex::new(r"(?P<s>[[:lower:]]|\d)(?P<n>[[:upper:]])").unwrap(); static ref RE_PASS2: Regex = Regex::new(r"(?P<s>[[:lower:]]|\d)(?P<n>[[:upper:]])").unwrap();
static ref RE_PASS4: Regex = Regex::new(r"(?P<s>[a-z])(?P<n>\d)").unwrap();
// Take every instance of a word suffixed by a number and put
// a space between them.
// TODO: Make Unicode aware. [[:lower:]] is an ASCII-ism.
static ref RE_PASS4: Regex = Regex::new(r"(?P<s>[[:lower:]])(?P<n>\d)").unwrap();
// Take every instance of the one-or-more-of the symbols listed, and
// replace them with a space. This function is Unicode-irrelevant,
// although there is a list of symbols in the backreference parser
// that may disagree.
// TODO: Examime backreference parser and determine if this is
// sufficient.
static ref RE_PASS3: Regex = Regex::new(r"(:|_|-| )+").unwrap(); static ref RE_PASS3: Regex = Regex::new(r"(:|_|-| )+").unwrap();
} }
// This should panic if misused, so... :-) // This should panic if misused, so... :-)
let pass = title.to_string(); let pass = title.to_string();
let pass = pass.strip_prefix("#").unwrap(); let pass = pass.strip_prefix("#").unwrap();
let pass = RE_PASS1.replace_all(&pass, "$s $n"); let pass = RE_PASS1.replace_all(&pass, "$s $n");
let pass = RE_PASS4.replace_all(&pass, "$s $n"); let pass = RE_PASS4.replace_all(&pass, "$s $n");
let pass = RE_PASS2.replace_all(&pass, "$s $n"); let pass = RE_PASS2.replace_all(&pass, "$s $n");
RE_PASS3.replace_all(&pass, " ").trim().to_string() RE_PASS3.replace_all(&pass, " ").trim().to_string()
} }
pub(super) fn build_page_titles(references: &[String]) -> Vec<String> { fn build_page_titles(references: &[String]) -> Vec<String> {
references references
.iter() .iter()
.filter_map(|s| match s.chars().next() { .filter_map(|s| match s.chars().next() {
@ -104,6 +81,10 @@ pub(super) fn build_page_titles(references: &[String]) -> Vec<String> {
.collect() .collect()
} }
pub(crate) fn build_references(content: &str) -> Vec<String> {
build_page_titles(&find_links(content))
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -1,254 +0,0 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! # Storage layer for Notesmachine
//!
//! This library implements the core functionality of Notesmachine and
//! describes that functionality to a storage layer. There's a bit of
//! intermingling in here which can't be helped, although it may make
//! sense in the future to separate the decomposition of the note
//! content into a higher layer.
//!
//! Notesmachine storage notes consist of two items: Note and Page.
//! This distinction is somewhat arbitrary, as structurally these two
//! items are stored in the same table.
//!
//! - Boxes have titles (and date metadata)
//! - Notes have content and a type (and date metadata)
//! - Notes are stored in boxes
//! - Notes are positioned with respect to other notes.
//! - There are two positions:
//! - Siblings, creating lists
//! - Children, creating trees like this one
//! - Notes may have references (pointers) to other boxes
//! - Notes may be moved around
//! - Notes may be deleted
//! - Boxes may be deleted
//! - When a box is renamed, every reference to that box is auto-edited to
//! reflect the change. If a box is renamed to match an existing box, the
//! notes in both boxes are merged.
//!
//! Note-to-note relationships form trees, and are kept in a SQL database of
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
//! `position` is a monotonic index on the parent (that is, every pair
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
//! an enum and can specify that the relationship is *original*,
//! *embedding*, or *referencing*. An embedded or referenced note may be
//! read/write or read-only with respect to the original, but there is only
//! one original note at any time.
//!
//! Note-to-box relationships form a graph, and are kept in the SQL database
//! as a collection of *edges* from the note to the box (and naturally
//! vice-versa).
//!
//! - Decision: When an original note is deleted, do all references and
//! embeddings also get deleted, or is the oldest one elevated to be a new
//! "original"? Or is that something the user may choose?
//!
//! - Decision: Should the merging issue be handled at this layer, or would
//! it make sense to move this to a higher layer, and only provide the
//! hooks for it here?
//!
#![allow(clippy::len_zero)]
use crate::errors::NoteStoreError;
use crate::parser::build_references;
use crate::store::private::*;
use crate::structs::*;
use sqlx::sqlite::SqlitePool;
use std::sync::Arc;
/// A handle to our Sqlite database.
#[derive(Clone, Debug)]
pub struct NoteStore(Arc<SqlitePool>);
pub type NoteResult<T> = core::result::Result<T, NoteStoreError>;
// After wrestling for a while with the fact that 'box' is a reserved
// word in Rust, I decided to just go with Note (note) and Page
// (box).
impl NoteStore {
/// Initializes a new instance of the note store. Note that the
/// note store holds an Arc internally; this code is (I think)
/// safe to Send.
pub async fn new(url: &str) -> NoteResult<Self> {
let pool = SqlitePool::connect(url).await?;
Ok(NoteStore(Arc::new(pool)))
}
/// Erase all the data in the database and restore it
/// to its original empty form. Do not use unless you
/// really, really want that to happen.
pub async fn reset_database(&self) -> NoteResult<()> {
reset_database(&*self.0).await.map_err(NoteStoreError::DBError)
}
/// Fetch page by slug
///
/// Supports the use case of the user navigating to a known place
/// via a bookmark or other URL. Since the title isn't clear from
/// the slug, the slug is insufficient to generate a new page, so
/// this use case says that in the event of a failure to find the
/// requested page, return a basic NotFound.
pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
let page = select_page_by_slug(&*self.0, slug).await?;
if page.is_empty() {
return Err(NoteStoreError::NotFound);
}
let note_id = &page[0].id;
let backreferences = select_backreferences_for_page(&*self.0, &note_id).await?;
Ok((page, backreferences))
}
/// Fetch page by title
///
/// The most common use case: the user is navigating by requesting
/// a page. The page either exists or it doesn't. If it
/// doesn't, we go out and make it. Since we know it doesn't exist,
/// we also know no backreferences to it exist, so in that case you
/// get back two empty vecs.
pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
if title.is_empty() {
return Err(NoteStoreError::NotFound);
}
let page = select_page_by_title(&*self.0, title).await?;
if page.len() > 0 {
let note_id = &page[0].id;
let backreferences = select_backreferences_for_page(&*self.0, &note_id).await?;
return Ok((page, backreferences));
}
// Sanity check!
let references = build_references(&title);
if references.len() > 0 {
return Err(NoteStoreError::InvalidNoteStructure(
"Titles may not contain nested references.".to_string(),
));
}
let mut tx = self.0.begin().await?;
let slug = generate_slug(&mut tx, title).await?;
let page = create_page(&title, &slug);
insert_note(&mut tx, &page).await?;
tx.commit().await?;
Ok((vec![Note::from(page)], vec![]))
}
pub async fn add_note(&self, note: &NewNote, parent_id: &str, location: Option<i64>) -> NoteResult<String> {
let kind = RelationshipKind::Direct;
let new_id = self.insert_note(note, parent_id, location, kind).await?;
Ok(new_id)
}
/// Move a note from one location to another.
pub async fn move_note(
&self,
note_id: &str,
old_parent_id: &str,
new_parent_id: &str,
new_location: i64,
) -> NoteResult<()> {
let mut tx = self.0.begin().await?;
let old_note = select_note_to_note_relationship(&mut tx, &old_parent_id, &note_id).await?;
let old_note_location = old_note.location;
let old_note_kind = old_note.kind;
delete_note_to_note_relationship(&mut tx, &old_parent_id, &note_id).await?;
close_hole_for_deleted_note_relationship(&mut tx, &old_parent_id, old_note_location).await?;
let new_location = determine_max_child_location_for_note(&mut tx, &new_parent_id, Some(new_location)).await?;
make_room_for_new_note_relationship(&mut tx, &new_parent_id, new_location).await?;
insert_note_to_note_relationship(&mut tx, &new_parent_id, &note_id, new_location, &old_note_kind).await?;
tx.commit().await?;
Ok(())
}
/// Updates a note's content. Completely rebuilds the note's
/// outgoing edge reference list every time.
pub async fn update_note_content(&self, note_id: &str, content: &str) -> NoteResult<()> {
let references = build_references(&content);
let mut tx = self.0.begin().await?;
update_note_content(&mut tx, &note_id, &content).await?;
delete_bulk_note_to_page_relationships(&mut tx, &note_id).await?;
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
insert_bulk_note_to_page_relationships(&mut tx, &note_id, &known_reference_ids).await?;
tx.commit().await?;
Ok(())
}
/// Deletes a note. If the note's relationship drops to zero, all
/// references from that note to pages are also deleted.
pub async fn delete_note(&self, note_id: &str, note_parent_id: &str) -> NoteResult<()> {
let mut tx = self.0.begin().await?;
let note_id = note_id.to_string();
let parent_id = note_parent_id.to_string();
if parent_id != note_id {
delete_note_to_note_relationship(&mut tx, &parent_id, &note_id).await?;
}
// The big one: if zero parents report having an interest in this note, then it,
// *and any sub-relationships*, go away.
if count_existing_note_relationships(&mut tx, &note_id).await? == 0 {
delete_note_to_page_relationships(&mut tx, &note_id).await?;
delete_note(&mut tx, &note_id).await?;
}
tx.commit().await?;
Ok(())
}
}
// The Private stuff
impl NoteStore {
// Pretty much the most dangerous function in our system. Has to
// have ALL the error checking.
async fn insert_note(
&self,
note: &NewNote,
parent_id: &str,
location: Option<i64>,
kind: RelationshipKind,
) -> NoteResult<String> {
if let Some(location) = location {
if location < 0 {
return Err(NoteStoreError::InvalidNoteStructure(
"Add note: A negative location is not valid.".to_string(),
));
}
}
if parent_id.is_empty() {
return Err(NoteStoreError::InvalidNoteStructure(
"Add note: A parent note ID is required.".to_string(),
));
}
if note.id.is_empty() {
return Err(NoteStoreError::InvalidNoteStructure(
"Add note: Your note should have an id already".to_string(),
));
}
if note.content.is_empty() {
return Err(NoteStoreError::InvalidNoteStructure(
"Add note: Empty notes are not supported.".to_string(),
));
}
let references = build_references(&note.content);
let mut tx = self.0.begin().await?;
let location = determine_max_child_location_for_note(&mut tx, parent_id, location).await?;
insert_note(&mut tx, &note).await?;
make_room_for_new_note_relationship(&mut tx, &parent_id, location).await?;
insert_note_to_note_relationship(&mut tx, &parent_id, &note.id, location, &kind).await?;
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
insert_bulk_note_to_page_relationships(&mut tx, &note.id, &known_reference_ids).await?;
tx.commit().await?;
Ok(note.id.to_string())
}
}

View File

@ -51,8 +51,8 @@
//! hooks for it here? //! hooks for it here?
//! //!
mod api;
mod private; mod private;
mod store;
pub use crate::store::api::NoteResult; pub use crate::store::store::NoteResult;
pub use crate::store::api::NoteStore; pub use crate::store::store::NoteStore;

View File

@ -3,8 +3,8 @@ use lazy_static::lazy_static;
use regex::Regex; use regex::Regex;
use slug::slugify; use slug::slugify;
use sqlx::{sqlite::Sqlite, Acquire, Done, Executor, Transaction}; use sqlx::{sqlite::Sqlite, Acquire, Done, Executor, Transaction};
use std::cmp;
use std::collections::HashSet; use std::collections::HashSet;
use std::cmp;
type SqlResult<T> = sqlx::Result<T>; type SqlResult<T> = sqlx::Result<T>;
@ -35,6 +35,7 @@ lazy_static! {
); );
} }
lazy_static! { lazy_static! {
static ref SELECT_NOTES_BACKREFERENCING_PAGE_SQL: &'static str = static ref SELECT_NOTES_BACKREFERENCING_PAGE_SQL: &'static str =
include_str!("sql/select_notes_backreferencing_page.sql"); include_str!("sql/select_notes_backreferencing_page.sql");
@ -51,7 +52,10 @@ where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let initialize_sql = include_str!("sql/initialize_database.sql"); let initialize_sql = include_str!("sql/initialize_database.sql");
sqlx::query(initialize_sql).execute(executor).await.map(|_| ()) sqlx::query(initialize_sql)
.execute(executor)
.await
.map(|_| ())
} }
// ___ _ _ _ __ _ // ___ _ _ _ __ _
@ -66,10 +70,14 @@ async fn select_object_by_query<'a, E>(executor: E, query: &str, field: &str) ->
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let r: Vec<RowNote> = sqlx::query_as(query).bind(field).fetch_all(executor).await?; let r: Vec<RowNote> = sqlx::query_as(query)
Ok(r.into_iter().map(Note::from).collect()) .bind(field)
.fetch_all(executor)
.await?;
Ok(r.into_iter().map(|z| Note::from(z)).collect())
} }
// Select the requested page via its id. This is fairly rare; // Select the requested page via its id. This is fairly rare;
// pages should usually be picked up via their title, but if you're // pages should usually be picked up via their title, but if you're
// navigating to an instance, this is how you specify the page in a // navigating to an instance, this is how you specify the page in a
@ -100,7 +108,10 @@ where
// of arrays, and inside each array is a list from a root page to // of arrays, and inside each array is a list from a root page to
// the note that references the give page. Clients may choose how // the note that references the give page. Clients may choose how
// they want to display that collection. // they want to display that collection.
pub(crate) async fn select_backreferences_for_page<'a, E>(executor: E, page_id: &str) -> SqlResult<Vec<Note>> pub(crate) async fn select_backreferences_for_page<'a, E>(
executor: E,
page_id: &str,
) -> SqlResult<Vec<Note>>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
@ -124,7 +135,7 @@ where
"VALUES (?, ?, ?, ?, ?, ?);" "VALUES (?, ?, ?, ?, ?, ?);"
); );
sqlx::query(insert_one_note_sql) let _ = sqlx::query(insert_one_note_sql)
.bind(&note.id) .bind(&note.id)
.bind(&note.content) .bind(&note.content)
.bind(note.kind.to_string()) .bind(note.kind.to_string())
@ -137,7 +148,7 @@ where
} }
// Inserts a single note into the notes table. That is all. // Inserts a single note into the notes table. That is all.
pub(crate) async fn insert_bulk_notes<'a, E>(executor: E, notes: &[NewNote]) -> SqlResult<()> pub(crate) async fn bulk_insert_notes<'a, E>(executor: E, notes: &[NewNote]) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
@ -146,10 +157,11 @@ where
} }
let insert_pattern = "VALUES (?, ?, ?, ?, ?, ?)".to_string(); let insert_pattern = "VALUES (?, ?, ?, ?, ?, ?)".to_string();
let insert_bulk_notes_sql = "INSERT INTO notes (id, content, kind, creation_date, updated_date, lastview_date) " let insert_bulk_notes_sql =
.to_string() "INSERT INTO notes (id, content, kind, creation_date, updated_date, lastview_date) ".to_string()
+ &[insert_pattern.as_str()].repeat(notes.len()).join(", ") + &[insert_pattern.as_str()]
+ &";".to_string(); .repeat(notes.len())
.join(", ") + &";".to_string();
let mut request = sqlx::query(&insert_bulk_notes_sql); let mut request = sqlx::query(&insert_bulk_notes_sql);
for note in notes { for note in notes {
@ -236,14 +248,18 @@ pub(crate) fn create_page(title: &str, slug: &str) -> NewNote {
// \___/| .__/\__,_\__,_|\__\___| \___/|_||_\___| |_|\_\___/\__\___| // \___/| .__/\__,_\__,_|\__\___| \___/|_||_\___| |_|\_\___/\__\___|
// |_| // |_|
pub(crate) async fn update_note_content<'a, E>(executor: E, note_id: &str, content: &str) -> SqlResult<()> pub(crate) async fn update_note_content<'a, E>(
executor: E,
note_id: &NoteId,
content: &str,
) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?"; let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?";
let count = sqlx::query(update_note_content_sql) let count = sqlx::query(update_note_content_sql)
.bind(content) .bind(content)
.bind(note_id) .bind(&**note_id)
.execute(executor) .execute(executor)
.await? .await?
.rows_affected(); .rows_affected();
@ -303,7 +319,7 @@ where
"values (?, ?, ?, ?)" "values (?, ?, ?, ?)"
); );
sqlx::query(insert_note_to_note_relationship_sql) let _ = sqlx::query(insert_note_to_note_relationship_sql)
.bind(parent_id) .bind(parent_id)
.bind(note_id) .bind(note_id)
.bind(&location) .bind(&location)
@ -313,7 +329,7 @@ where
Ok(()) Ok(())
} }
pub(crate) async fn make_room_for_new_note_relationship<'a, E>( pub(crate) async fn make_room_for_new_note<'a, E>(
executor: E, executor: E,
parent_id: &str, parent_id: &str,
location: i64, location: i64,
@ -327,7 +343,7 @@ where
"WHERE location >= ? and parent_id = ?;" "WHERE location >= ? and parent_id = ?;"
); );
sqlx::query(make_room_for_new_note_sql) let _ = sqlx::query(make_room_for_new_note_sql)
.bind(&location) .bind(&location)
.bind(parent_id) .bind(parent_id)
.execute(executor) .execute(executor)
@ -346,11 +362,14 @@ where
let row_count = assert_max_child_location_for_note(executor, note_id).await? + 1; let row_count = assert_max_child_location_for_note(executor, note_id).await? + 1;
Ok(match comp_loc { Ok(match comp_loc {
Some(location) => cmp::min(row_count, location), Some(location) => cmp::min(row_count, location),
None => row_count, None => row_count
}) })
} }
pub(crate) async fn assert_max_child_location_for_note<'a, E>(executor: E, note_id: &str) -> SqlResult<i64> pub(crate) async fn assert_max_child_location_for_note<'a, E>(
executor: E,
note_id: &str,
) -> SqlResult<i64>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
@ -384,10 +403,11 @@ where
} }
let insert_pattern = format!("(?, ?, '{}')", PageRelationshipKind::Page.to_string()); let insert_pattern = format!("(?, ?, '{}')", PageRelationshipKind::Page.to_string());
let insert_note_page_references_sql = "INSERT INTO note_page_relationships (note_id, page_id, kind) VALUES " let insert_note_page_references_sql =
.to_string() "INSERT INTO note_page_relationships (note_id, page_id, kind) VALUES ".to_string()
+ &[insert_pattern.as_str()].repeat(references.len()).join(", ") + &[insert_pattern.as_str()]
+ &";".to_string(); .repeat(references.len())
.join(", ") + &";".to_string();
let mut request = sqlx::query(&insert_note_page_references_sql); let mut request = sqlx::query(&insert_note_page_references_sql);
for reference in references { for reference in references {
@ -397,12 +417,16 @@ where
request.execute(executor).await.map(|_| ()) request.execute(executor).await.map(|_| ())
} }
pub(crate) async fn delete_bulk_note_to_page_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<()> pub(crate) async fn delete_bulk_note_to_page_relationships<'a, E>(
executor: E,
note_id: &str,
) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let delete_note_to_page_relationship_sql = "DELETE FROM note_page_relationships WHERE and note_id = ?;"; let delete_note_to_page_relationship_sql =
sqlx::query(delete_note_to_page_relationship_sql) "DELETE FROM note_page_relationships WHERE and note_id = ?;";
let _ = sqlx::query(delete_note_to_page_relationship_sql)
.bind(note_id) .bind(note_id)
.execute(executor) .execute(executor)
.await?; .await?;
@ -411,7 +435,10 @@ where
// Given the references supplied, and the references found in the datastore, // Given the references supplied, and the references found in the datastore,
// return a list of the references not found in the datastore. // return a list of the references not found in the datastore.
pub(crate) fn diff_references(references: &[String], found_references: &[PageTitle]) -> Vec<String> { pub(crate) fn diff_references(
references: &[String],
found_references: &[PageTitle],
) -> Vec<String> {
let all: HashSet<String> = references.iter().cloned().collect(); let all: HashSet<String> = references.iter().cloned().collect();
let found: HashSet<String> = found_references.iter().map(|r| r.content.clone()).collect(); let found: HashSet<String> = found_references.iter().map(|r| r.content.clone()).collect();
all.difference(&found).cloned().collect() all.difference(&found).cloned().collect()
@ -444,8 +471,9 @@ where
); );
} }
let find_all_references_for_sql = let find_all_references_for_sql = SELECT_ALL_REFERENCES_FOR_SQL_BASE.to_string()
SELECT_ALL_REFERENCES_FOR_SQL_BASE.to_string() + &["?"].repeat(references.len()).join(",") + &");".to_string(); + &["?"].repeat(references.len()).join(",")
+ &");".to_string();
let mut request = sqlx::query_as(&find_all_references_for_sql); let mut request = sqlx::query_as(&find_all_references_for_sql);
for id in references.iter() { for id in references.iter() {
@ -486,7 +514,10 @@ where
} }
} }
pub(crate) async fn delete_note_to_page_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<()> pub(crate) async fn delete_note_to_page_relationships<'a, E>(
executor: E,
note_id: &str,
) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
@ -498,7 +529,7 @@ where
); );
} }
sqlx::query(&DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL) let _ = sqlx::query(&DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL)
.bind(note_id) .bind(note_id)
.execute(executor) .execute(executor)
.await?; .await?;
@ -526,7 +557,7 @@ where
// After removing a note, recalculate the position of all notes under // After removing a note, recalculate the position of all notes under
// the parent note, such that there order is now completely // the parent note, such that there order is now completely
// sequential. // sequential.
pub(crate) async fn close_hole_for_deleted_note_relationship<'a, E>( pub(crate) async fn close_hole_for_deleted_note<'a, E>(
executor: E, executor: E,
parent_id: &str, parent_id: &str,
location: i64, location: i64,
@ -540,7 +571,7 @@ where
"WHERE location > ? and parent_id = ?;" "WHERE location > ? and parent_id = ?;"
); );
sqlx::query(close_hole_for_deleted_note_sql) let _ = sqlx::query(close_hole_for_deleted_note_sql)
.bind(&location) .bind(&location)
.bind(parent_id) .bind(parent_id)
.execute(executor) .execute(executor)
@ -548,24 +579,26 @@ where
Ok(()) Ok(())
} }
// Given a list of references found in the content, generate the // Given a list of references found in the content, generate the
// references that do not previously exist, returning all found // references that do not previously exist, returning all found
// references. NOTE: The function signature for this is for a // references. NOTE: The function signature for this is for a
// transaction, and uses a nested transaction. // transaction, and uses a nested transaction.
pub(crate) async fn validate_or_generate_all_found_references( pub(crate) async fn validate_or_generate_all_found_references(
txi: &mut Transaction<'_, Sqlite>, txi: &mut Transaction<'_, Sqlite>,
references: &[String], references: &[String]
) -> SqlResult<Vec<String>> { ) -> SqlResult<Vec<String>> {
let mut tx = txi.begin().await?; let mut tx = txi.begin().await?;
let found_references = find_all_page_from_list_of_references(&mut tx, &references).await?; let found_references =
find_all_page_from_list_of_references(&mut tx, &references).await?;
let new_references = diff_references(&references, &found_references); let new_references = diff_references(&references, &found_references);
let mut new_page: Vec<NewNote> = vec![]; let mut new_page: Vec<NewNote> = vec![];
for one_reference in new_references.iter() { for one_reference in new_references.iter() {
let slug = generate_slug(&mut tx, one_reference).await?; let slug = generate_slug(&mut tx, one_reference).await?;
new_page.push(create_page(&one_reference, &slug)); new_page.push(create_page(&one_reference, &slug));
} }
insert_bulk_notes(&mut tx, &new_page).await?; let _ = bulk_insert_notes(&mut tx, &new_page).await?;
let mut all_reference_ids: Vec<String> = found_references.iter().map(|r| r.id.clone()).collect(); let mut all_reference_ids: Vec<String> = found_references.iter().map(|r| r.id.clone()).collect();
all_reference_ids.append(&mut new_page.iter().map(|r| r.id.clone()).collect()); all_reference_ids.append(&mut new_page.iter().map(|r| r.id.clone()).collect());
@ -585,7 +618,8 @@ pub(crate) async fn count_existing_note_relationships<'a, E>(executor: E, note_i
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let count_existing_note_relationships_sql = "SELECT COUNT(*) as count FROM note_relationships WHERE note_id = ?;"; let count_existing_note_relationships_sql =
"SELECT COUNT(*) as count FROM note_relationships WHERE note_id = ?;";
let count: RowCount = sqlx::query_as(&count_existing_note_relationships_sql) let count: RowCount = sqlx::query_as(&count_existing_note_relationships_sql)
.bind(note_id) .bind(note_id)
.fetch_one(executor) .fetch_one(executor)

View File

@ -0,0 +1,272 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! # Storage layer for Notesmachine
//!
//! This library implements the core functionality of Notesmachine and
//! describes that functionality to a storage layer. There's a bit of
//! intermingling in here which can't be helped, although it may make
//! sense in the future to separate the decomposition of the note
//! content into a higher layer.
//!
//! Notesmachine storage notes consist of two items: Note and Page.
//! This distinction is somewhat arbitrary, as structurally these two
//! items are stored in the same table.
//!
//! - Boxes have titles (and date metadata)
//! - Notes have content and a type (and date metadata)
//! - Notes are stored in boxes
//! - Notes are positioned with respect to other notes.
//! - There are two positions:
//! - Siblings, creating lists
//! - Children, creating trees like this one
//! - Notes may have references (pointers) to other boxes
//! - Notes may be moved around
//! - Notes may be deleted
//! - Boxes may be deleted
//! - When a box is renamed, every reference to that box is auto-edited to
//! reflect the change. If a box is renamed to match an existing box, the
//! notes in both boxes are merged.
//!
//! Note-to-note relationships form trees, and are kept in a SQL database of
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
//! `position` is a monotonic index on the parent (that is, every pair
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
//! an enum and can specify that the relationship is *original*,
//! *embedding*, or *referencing*. An embedded or referenced note may be
//! read/write or read-only with respect to the original, but there is only
//! one original note at any time.
//!
//! Note-to-box relationships form a graph, and are kept in the SQL database
//! as a collection of *edges* from the note to the box (and naturally
//! vice-versa).
//!
//! - Decision: When an original note is deleted, do all references and
//! embeddings also get deleted, or is the oldest one elevated to be a new
//! "original"? Or is that something the user may choose?
//!
//! - Decision: Should the merging issue be handled at this layer, or would
//! it make sense to move this to a higher layer, and only provide the
//! hooks for it here?
//!
use crate::errors::NoteStoreError;
use crate::reference_parser::build_references;
use crate::store::private::*;
use crate::structs::*;
use sqlx::sqlite::SqlitePool;
use std::sync::Arc;
/// A handle to our Sqlite database.
#[derive(Clone, Debug)]
pub struct NoteStore(Arc<SqlitePool>);
pub type NoteResult<T> = core::result::Result<T, NoteStoreError>;
// After wrestling for a while with the fact that 'box' is a reserved
// word in Rust, I decided to just go with Note (note) and Page
// (box).
impl NoteStore {
/// Initializes a new instance of the note store. Note that the
/// note store holds an Arc internally; this code is (I think)
/// safe to Send.
pub async fn new(url: &str) -> NoteResult<Self> {
let pool = SqlitePool::connect(url).await?;
Ok(NoteStore(Arc::new(pool)))
}
/// Erase all the data in the database and restore it
/// to its original empty form. Do not use unless you
/// really, really want that to happen.
pub async fn reset_database(&self) -> NoteResult<()> {
reset_database(&*self.0)
.await
.map_err(NoteStoreError::DBError)
}
/// Fetch page by slug
///
/// Supports the use case of the user navigating to a known place
/// via a bookmark or other URL. Since the title isn't clear from
/// the slug, the slug is insufficient to generate a new page, so
/// this use case says that in the event of a failure to find the
/// requested page, return a basic NotFound.
pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
let page = select_page_by_slug(&*self.0, slug).await?;
if page.is_empty() {
return Err(NoteStoreError::NotFound);
}
let note_id = &page[0].id;
let backreferences = select_backreferences_for_page(&*self.0, &note_id).await?;
Ok((page, backreferences))
}
/// Fetch page by title
///
/// The most common use case: the user is navigating by requesting
/// a page. The page either exists or it doesn't. If it
/// doesn't, we go out and make it. Since we know it doesn't exist,
/// we also know no backreferences to it exist, so in that case you
/// get back two empty vecs.
pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
if title.len() == 0 {
return Err(NoteStoreError::NotFound);
}
let page = select_page_by_title(&*self.0, title).await?;
if page.len() > 0 {
let note_id = &page[0].id;
let backreferences = select_backreferences_for_page(&*self.0, &note_id).await?;
return Ok((page, backreferences));
}
// Sanity check!
let references = build_references(&title);
if references.len() > 0 {
return Err(NoteStoreError::InvalidNoteStructure(
"Titles may not contain nested references.".to_string(),
));
}
let mut tx = self.0.begin().await?;
let slug = generate_slug(&mut tx, title).await?;
let page = create_page(&title, &slug);
let _ = insert_note(&mut tx, &page).await?;
tx.commit().await?;
Ok((vec![Note::from(page)], vec![]))
}
pub async fn add_note(
&self,
note: &NewNote,
parent_id: &str,
location: Option<i64>,
) -> NoteResult<String> {
let kind = RelationshipKind::Direct;
let new_id = self.insert_note(note, parent_id, location, kind).await?;
Ok(new_id)
}
/// Move a note from one location to another.
pub async fn move_note(
&self,
note_id: &str,
old_parent_id: &str,
new_parent_id: &str,
new_location: i64,
) -> NoteResult<()> {
let mut tx = self.0.begin().await?;
let old_note = select_note_to_note_relationship(&mut tx, &old_parent_id, &note_id).await?;
let old_note_location = old_note.location;
let old_note_kind = old_note.kind;
let _ = delete_note_to_note_relationship(&mut tx, &old_parent_id, &note_id).await?;
let _ = close_hole_for_deleted_note(&mut tx, &old_parent_id, old_note_location).await?;
let new_location = determine_max_child_location_for_note(&mut tx, &new_parent_id, Some(new_location)).await?;
let _ = make_room_for_new_note(&mut tx, &new_parent_id, new_location).await?;
let _ = insert_note_to_note_relationship(
&mut tx,
&new_parent_id,
&note_id,
new_location,
&old_note_kind,
)
.await?;
tx.commit().await?;
Ok(())
}
/// Updates a note's content. Completely rebuilds the note's
/// outgoing edge reference list every time.
pub async fn update_note_content(&self, note_id: &str, content: &str) -> NoteResult<()> {
let references = build_references(&content);
let note_id = NoteId(note_id.to_string());
let mut tx = self.0.begin().await?;
let _ = update_note_content(&mut tx, &note_id, &content).await?;
let _ = delete_bulk_note_to_page_relationships(&mut tx, &note_id).await?;
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
let _ = insert_bulk_note_to_page_relationships(&mut tx, &note_id, &known_reference_ids)
.await?;
tx.commit().await?;
Ok(())
}
/// Deletes a note. If the note's relationship drops to zero, all
/// references from that note to pages are also deleted.
pub async fn delete_note(&self, note_id: &str, note_parent_id: &str) -> NoteResult<()> {
let mut tx = self.0.begin().await?;
let note_id = NoteId(note_id.to_string());
let parent_id = ParentId(note_parent_id.to_string());
if *parent_id != *note_id {
let _ = delete_note_to_note_relationship(&mut tx, &parent_id, &note_id);
}
// The big one: if zero parents report having an interest in this note, then it,
// *and any sub-relationships*, go away.
if count_existing_note_relationships(&mut tx, &note_id).await? == 0 {
let _ = delete_note_to_page_relationships(&mut tx, &note_id).await?;
let _ = delete_note(&mut tx, &note_id).await?;
}
tx.commit().await?;
Ok(())
}
}
// The Private stuff
impl NoteStore {
// Pretty much the most dangerous function in our system. Has to
// have ALL the error checking.
async fn insert_note(
&self,
note: &NewNote,
parent_id: &str,
location: Option<i64>,
kind: RelationshipKind,
) -> NoteResult<String> {
if let Some(location) = location {
if location < 0 {
return Err(NoteStoreError::InvalidNoteStructure(
"Add note: A negative location is not valid.".to_string(),
));
}
}
if parent_id.is_empty() {
return Err(NoteStoreError::InvalidNoteStructure(
"Add note: A parent note ID is required.".to_string(),
));
}
if note.id.is_empty() {
return Err(NoteStoreError::InvalidNoteStructure(
"Add note: Your note should have an id already".to_string(),
));
}
if note.content.is_empty() {
return Err(NoteStoreError::InvalidNoteStructure(
"Add note: Empty notes are not supported.".to_string(),
));
}
let references = build_references(&note.content);
let mut tx = self.0.begin().await?;
let location = determine_max_child_location_for_note(&mut tx, parent_id, location).await?;
let note_id = NoteId(note.id.clone());
insert_note(&mut tx, &note).await?;
make_room_for_new_note(&mut tx, &parent_id, location).await?;
insert_note_to_note_relationship(&mut tx, &parent_id, &note_id, location, &kind).await?;
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
let _ = insert_bulk_note_to_page_relationships(&mut tx, &note_id, &known_reference_ids)
.await?;
tx.commit().await?;
Ok(note_id.to_string())
}
}

View File

@ -1,5 +1,7 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use derive_builder::Builder; use derive_builder::Builder;
use friendly_id;
use shrinkwraprs::Shrinkwrap;
use sqlx::{self, FromRow}; use sqlx::{self, FromRow};
// Page is German for "Box," and is used both because this is // Page is German for "Box," and is used both because this is
@ -48,6 +50,12 @@ macro_rules! build_conversion_enums {
}; };
} }
#[derive(Shrinkwrap, Clone)]
pub(crate) struct NoteId(pub String);
#[derive(Shrinkwrap, Clone)]
pub(crate) struct ParentId(pub String);
// The different kinds of objects we support. // The different kinds of objects we support.
build_conversion_enums!( build_conversion_enums!(