Compare commits

..

No commits in common. "canon" and "one-table-to-rule-them-all" have entirely different histories.

18 changed files with 333 additions and 547 deletions

View File

@ -1,11 +0,0 @@
[[source]]
url = "https://pypi.python.org/simple"
verify_ssl = true
name = "pypi"
[packages]
[dev-packages]
[requires]
python_version = "2.7"

View File

@ -1 +0,0 @@
cognitive-complexity-threshold = 9

View File

@ -1,5 +1,2 @@
[ ] Add RelationshipKind to Notes passed out [ ] Add RelationshipKind to Notes passed out
[ ] Add KastenKind to Backreferences passed out [ ] Add KastenKind to Backreferences passed out
[ ] Provide the array of note references (the 'cycle' manager) to make
mapping from Vec->Tree easier.

View File

@ -1 +0,0 @@
Content analysis

View File

@ -1,3 +1,4 @@
use sqlx;
use thiserror::Error; use thiserror::Error;
/// All the ways looking up objects can fail /// All the ways looking up objects can fail
@ -8,9 +9,6 @@ pub enum NoteStoreError {
#[error("Invalid Note Structure")] #[error("Invalid Note Structure")]
InvalidNoteStructure(String), InvalidNoteStructure(String),
/// The requested kasten or note was not found. As much as
/// possible, this should be preferred to a
/// sqlx::Error::RowNotFound.
#[error("Not found")] #[error("Not found")]
NotFound, NotFound,

View File

@ -1,11 +1,13 @@
mod errors; mod errors;
mod parser; mod reference_parser;
mod store; mod store;
mod store_private;
mod structs; mod structs;
pub use crate::errors::NoteStoreError; pub use crate::errors::NoteStoreError;
pub use crate::store::NoteStore; pub use crate::store::NoteStore;
pub use crate::structs::{Note, NoteKind, NoteRelationship, PageRelationship}; pub use crate::structs::{Note, NoteKind, NoteRelationship, KastenRelationship};
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
@ -28,8 +30,8 @@ mod tests {
#[tokio::test(threaded_scheduler)] #[tokio::test(threaded_scheduler)]
async fn fetching_unfound_page_by_slug_works() { async fn fetching_unfound_page_by_slug_works() {
let storagepool = fresh_inmemory_database().await; let storagepool = fresh_inmemory_database().await;
let foundpage = storagepool.get_page_by_slug("nonexistent-page").await; let foundkasten = storagepool.get_kasten_by_slug("nonexistent-kasten").await;
assert!(foundpage.is_err()); assert!(foundkasten.is_err());
} }
// Request for the page by title. If the page exists, return it. // Request for the page by title. If the page exists, return it.
@ -41,7 +43,7 @@ mod tests {
let title = "Nonexistent Page"; let title = "Nonexistent Page";
let now = chrono::Utc::now(); let now = chrono::Utc::now();
let storagepool = fresh_inmemory_database().await; let storagepool = fresh_inmemory_database().await;
let newpageresult = storagepool.get_page_by_title(&title).await; let newpageresult = storagepool.get_kasten_by_title(&title).await;
assert!(newpageresult.is_ok(), "{:?}", newpageresult); assert!(newpageresult.is_ok(), "{:?}", newpageresult);
let (newpages, _) = newpageresult.unwrap(); let (newpages, _) = newpageresult.unwrap();
@ -51,7 +53,7 @@ mod tests {
assert_eq!(newpage.content, title, "{:?}", newpage.content); assert_eq!(newpage.content, title, "{:?}", newpage.content);
assert_eq!(newpage.id, "nonexistent-page"); assert_eq!(newpage.id, "nonexistent-page");
assert_eq!(newpage.kind, NoteKind::Page); assert_eq!(newpage.kind, NoteKind::Kasten);
assert!((newpage.creation_date - now).num_minutes() < 1); assert!((newpage.creation_date - now).num_minutes() < 1);
assert!((newpage.updated_date - now).num_minutes() < 1); assert!((newpage.updated_date - now).num_minutes() < 1);
assert!((newpage.lastview_date - now).num_minutes() < 1); assert!((newpage.lastview_date - now).num_minutes() < 1);
@ -69,7 +71,7 @@ mod tests {
async fn can_nest_notes() { async fn can_nest_notes() {
let title = "Nonexistent Page"; let title = "Nonexistent Page";
let storagepool = fresh_inmemory_database().await; let storagepool = fresh_inmemory_database().await;
let newpageresult = storagepool.get_page_by_title(&title).await; let newpageresult = storagepool.get_kasten_by_title(&title).await;
assert!(newpageresult.is_ok(), "{:?}", newpageresult); assert!(newpageresult.is_ok(), "{:?}", newpageresult);
let (newpages, _) = newpageresult.unwrap(); let (newpages, _) = newpageresult.unwrap();
@ -99,7 +101,7 @@ mod tests {
assert!(note4_id.is_ok(), "{:?}", note4_id); assert!(note4_id.is_ok(), "{:?}", note4_id);
let _note4_id = note4_id.unwrap(); let _note4_id = note4_id.unwrap();
let newpageresult = storagepool.get_page_by_title(&title).await; let newpageresult = storagepool.get_kasten_by_title(&title).await;
assert!(newpageresult.is_ok(), "{:?}", newpageresult); assert!(newpageresult.is_ok(), "{:?}", newpageresult);
let (newpages, _) = newpageresult.unwrap(); let (newpages, _) = newpageresult.unwrap();
@ -112,4 +114,6 @@ mod tests {
assert_eq!(newpages[1].parent_id, Some(newroot.id.clone())); assert_eq!(newpages[1].parent_id, Some(newroot.id.clone()));
assert_eq!(newpages[2].parent_id, Some(newpages[1].id.clone())); assert_eq!(newpages[2].parent_id, Some(newpages[1].id.clone()));
} }
} }

View File

@ -1,59 +0,0 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! # Storage layer for Notesmachine
//!
//! This library implements the core functionality of Notesmachine and
//! describes that functionality to a storage layer. There's a bit of
//! intermingling in here which can't be helped, although it may make
//! sense in the future to separate the decomposition of the note
//! content into a higher layer.
//!
//! Notesmachine storage notes consist of two items: Note and Kasten.
//! This distinction is somewhat arbitrary, as structurally these two
//! items are stored in the same table.
//!
//! - Boxes have titles (and date metadata)
//! - Notes have content and a type (and date metadata)
//! - Notes are stored in boxes
//! - Notes are positioned with respect to other notes.
//! - There are two positions:
//! - Siblings, creating lists
//! - Children, creating trees like this one
//! - Notes may have references (pointers) to other boxes
//! - Notes may be moved around
//! - Notes may be deleted
//! - Boxes may be deleted
//! - When a box is renamed, every reference to that box is auto-edited to
//! reflect the change. If a box is renamed to match an existing box, the
//! notes in both boxes are merged.
//!
//! Note-to-note relationships form trees, and are kept in a SQL database of
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
//! `position` is a monotonic index on the parent (that is, every pair
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
//! an enum and can specify that the relationship is *original*,
//! *embedding*, or *referencing*. An embedded or referenced note may be
//! read/write or read-only with respect to the original, but there is only
//! one original note at any time.
//!
//! Note-to-box relationships form a graph, and are kept in the SQL database
//! as a collection of *edges* from the note to the box (and naturally
//! vice-versa).
//!
//! - Decision: When an original note is deleted, do all references and
//! embeddings also get deleted, or is the oldest one elevated to be a new
//! "original"? Or is that something the user may choose?
//!
//! - Decision: Should the merging issue be handled at this layer, or would
//! it make sense to move this to a higher layer, and only provide the
//! hooks for it here?
//!
mod references;
use references::{build_page_titles, find_links};
pub(crate) fn build_references(content: &str) -> Vec<String> {
build_page_titles(&find_links(content))
}

View File

@ -4,7 +4,7 @@ use lazy_static::lazy_static;
use regex::bytes::Regex as BytesRegex; use regex::bytes::Regex as BytesRegex;
use regex::Regex; use regex::Regex;
struct Finder(pub Vec<String>); pub struct Finder(pub Vec<String>);
impl Finder { impl Finder {
pub fn new() -> Self { pub fn new() -> Self {
@ -24,7 +24,7 @@ impl Finder {
} }
} }
pub(super) fn find_links(document: &str) -> Vec<String> { fn find_links(document: &str) -> Vec<String> {
let arena = Arena::new(); let arena = Arena::new();
let mut finder = Finder::new(); let mut finder = Finder::new();
let root = parse_document(&arena, document, &ComrakOptions::default()); let root = parse_document(&arena, document, &ComrakOptions::default());
@ -50,48 +50,25 @@ pub(super) fn find_links(document: &str) -> Vec<String> {
finder.0 finder.0
} }
// This function is for the camel and snake case handers.
fn recase(title: &str) -> String { fn recase(title: &str) -> String {
lazy_static! { lazy_static! {
// Take every word that has a pattern of a capital letter
// followed by a lower case, and put a space between the
// capital and anything that preceeds it.
// TODO: Make Unicode aware.
static ref RE_PASS1: Regex = Regex::new(r"(?P<s>.)(?P<n>[A-Z][a-z]+)").unwrap(); static ref RE_PASS1: Regex = Regex::new(r"(?P<s>.)(?P<n>[A-Z][a-z]+)").unwrap();
// Take every instance of a lower case letter or number,
// followed by a capital letter, and put a space between them.
// TODO: Make Unicode aware. [[:lower:]] is an ASCII-ism.
static ref RE_PASS2: Regex = Regex::new(r"(?P<s>[[:lower:]]|\d)(?P<n>[[:upper:]])").unwrap(); static ref RE_PASS2: Regex = Regex::new(r"(?P<s>[[:lower:]]|\d)(?P<n>[[:upper:]])").unwrap();
static ref RE_PASS4: Regex = Regex::new(r"(?P<s>[a-z])(?P<n>\d)").unwrap();
// Take every instance of a word suffixed by a number and put
// a space between them.
// TODO: Make Unicode aware. [[:lower:]] is an ASCII-ism.
static ref RE_PASS4: Regex = Regex::new(r"(?P<s>[[:lower:]])(?P<n>\d)").unwrap();
// Take every instance of the one-or-more-of the symbols listed, and
// replace them with a space. This function is Unicode-irrelevant,
// although there is a list of symbols in the backreference parser
// that may disagree.
// TODO: Examime backreference parser and determine if this is
// sufficient.
static ref RE_PASS3: Regex = Regex::new(r"(:|_|-| )+").unwrap(); static ref RE_PASS3: Regex = Regex::new(r"(:|_|-| )+").unwrap();
} }
// This should panic if misused, so... :-) // This should panic if misused, so... :-)
let pass = title.to_string(); let pass = title.to_string();
let pass = pass.strip_prefix("#").unwrap(); let pass = pass.strip_prefix("#").unwrap();
let pass = RE_PASS1.replace_all(&pass, "$s $n"); let pass = RE_PASS1.replace_all(&pass, "$s $n");
let pass = RE_PASS4.replace_all(&pass, "$s $n"); let pass = RE_PASS4.replace_all(&pass, "$s $n");
let pass = RE_PASS2.replace_all(&pass, "$s $n"); let pass = RE_PASS2.replace_all(&pass, "$s $n");
RE_PASS3.replace_all(&pass, " ").trim().to_string() RE_PASS3.replace_all(&pass, " ").trim().to_string()
} }
pub(super) fn build_page_titles(references: &[String]) -> Vec<String> { fn build_page_titles(references: &[String]) -> Vec<String> {
references references
.iter() .iter()
.filter_map(|s| match s.chars().next() { .filter_map(|s| match s.chars().next() {
@ -104,6 +81,10 @@ pub(super) fn build_page_titles(references: &[String]) -> Vec<String> {
.collect() .collect()
} }
pub(crate) fn build_references(content: &str) -> Vec<String> {
build_page_titles(&find_links(content))
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;

View File

@ -1,6 +1,6 @@
DROP TABLE IF EXISTS notes; DROP TABLE IF EXISTS notes;
DROP TABLE IF EXISTS note_relationships; DROP TABLE IF EXISTS note_relationships;
DROP TABLE IF EXISTS note_page_relationships; DROP TABLE IF EXISTS note_kasten_relationships;
DROP TABLE IF EXISTS favorites; DROP TABLE IF EXISTS favorites;
CREATE TABLE notes ( CREATE TABLE notes (
@ -21,9 +21,9 @@ CREATE TABLE favorites (
FOREIGN KEY (id) REFERENCES notes (id) ON DELETE CASCADE FOREIGN KEY (id) REFERENCES notes (id) ON DELETE CASCADE
); );
-- This table represents the forest of data relating a page to its -- This table represents the forest of data relating a kasten to its
-- collections of notes. The root is itself "a note," but the content -- collections of notes. The root is itself "a note," but the content
-- of that note will always be just the title of the page. -- of that note will always be just the title of the kasten.
-- --
CREATE TABLE note_relationships ( CREATE TABLE note_relationships (
note_id TEXT NOT NULL, note_id TEXT NOT NULL,
@ -37,22 +37,16 @@ CREATE TABLE note_relationships (
CHECK (note_id <> parent_id) CHECK (note_id <> parent_id)
); );
-- This table represents the graph of data relating notes to pages. -- This table represents the graph of data relating notes to kastens.
-- --
CREATE TABLE note_page_relationships ( CREATE TABLE note_kasten_relationships (
note_id TEXT NOT NULL, note_id TEXT NOT NULL,
page_id TEXT NOT NULL, kasten_id TEXT NOT NULL,
kind TEXT NOT NULL, kind TEXT NOT NULL,
-- If either note disappears, we want all the edges to disappear as well. -- If either note disappears, we want all the edges to disappear as well.
FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE CASCADE, FOREIGN KEY (note_id) REFERENCES notes (id) ON DELETE CASCADE,
FOREIGN KEY (page_id) REFERENCES notes (id) ON DELETE CASCADE, FOREIGN KEY (kasten_id) REFERENCES notes (id) ON DELETE CASCADE,
UNIQUE (note_id, page_id), UNIQUE (note_id, kasten_id),
CHECK (note_id <> page_id) CHECK (note_id <> kasten_id)
); );
-- A fabulous constraint. This index prevents us from saying that
-- if a note points to a page, the page may not point to a
-- note. Now, it's absolutely required that a page_id point to
-- a PageType note; the content should be a title only.
CREATE UNIQUE INDEX note_page_unique_idx
ON note_page_relationships (MIN(note_id, page_id), MAX(note_id, page_id));

View File

@ -45,8 +45,8 @@ FROM (
ON note_parents.id = note_relationships.parent_id ON note_parents.id = note_relationships.parent_id
WHERE notes.id WHERE notes.id
IN (SELECT note_id IN (SELECT note_id
FROM note_page_relationships FROM note_kasten_relationships
WHERE page_id = ?) -- IMPORTANT: THIS IS THE PARAMETER WHERE kasten_id = ?) -- IMPORTANT: THIS IS THE PARAMETER
UNION UNION
SELECT DISTINCT SELECT DISTINCT

View File

@ -10,7 +10,7 @@
//! sense in the future to separate the decomposition of the note //! sense in the future to separate the decomposition of the note
//! content into a higher layer. //! content into a higher layer.
//! //!
//! Notesmachine storage notes consist of two items: Note and Page. //! Notesmachine storage notes consist of two items: Note and Kasten.
//! This distinction is somewhat arbitrary, as structurally these two //! This distinction is somewhat arbitrary, as structurally these two
//! items are stored in the same table. //! items are stored in the same table.
//! //!
@ -50,22 +50,24 @@
//! it make sense to move this to a higher layer, and only provide the //! it make sense to move this to a higher layer, and only provide the
//! hooks for it here? //! hooks for it here?
//! //!
#![allow(clippy::len_zero)]
use crate::errors::NoteStoreError; use crate::errors::NoteStoreError;
use crate::parser::build_references; use crate::reference_parser::build_references;
use crate::store::private::*; use crate::store_private::*;
use crate::structs::*; use crate::structs::*;
use sqlx::sqlite::SqlitePool; use sqlx::sqlite::SqlitePool;
use std::cmp;
// use std::collections::HashMap;
use std::sync::Arc; use std::sync::Arc;
/// A handle to our Sqlite database. /// A handle to our Sqlite database.
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct NoteStore(Arc<SqlitePool>); pub struct NoteStore(Arc<SqlitePool>);
pub type NoteResult<T> = core::result::Result<T, NoteStoreError>; type NoteResult<T> = core::result::Result<T, NoteStoreError>;
// After wrestling for a while with the fact that 'box' is a reserved // After wrestling for a while with the fact that 'box' is a reserved
// word in Rust, I decided to just go with Note (note) and Page // word in Rust, I decided to just go with Note (note) and Kasten
// (box). // (box).
impl NoteStore { impl NoteStore {
@ -90,34 +92,32 @@ impl NoteStore {
/// the slug, the slug is insufficient to generate a new page, so /// the slug, the slug is insufficient to generate a new page, so
/// this use case says that in the event of a failure to find the /// this use case says that in the event of a failure to find the
/// requested page, return a basic NotFound. /// requested page, return a basic NotFound.
pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> { pub async fn get_kasten_by_slug(&self, slug: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
let page = select_page_by_slug(&*self.0, slug).await?; let kasten = select_kasten_by_slug(&*self.0, &NoteId(slug.to_string())).await?;
if page.is_empty() { if kasten.is_empty() {
return Err(NoteStoreError::NotFound); return Err(NoteStoreError::NotFound)
} }
let note_id = &page[0].id; let note_id = NoteId(kasten[0].id.clone());
let backreferences = select_backreferences_for_page(&*self.0, &note_id).await?; Ok((kasten, select_backreferences_for_kasten(&*self.0, &note_id).await?))
Ok((page, backreferences))
} }
/// Fetch page by title /// Fetch page by title
///
/// The most common use case: the user is navigating by requesting /// The most common use case: the user is navigating by requesting
/// a page. The page either exists or it doesn't. If it /// a page. The page either exists or it doesn't. If it
/// doesn't, we go out and make it. Since we know it doesn't exist, /// doesn't, we go out and make it. Since we know it doesn't exist,
/// we also know no backreferences to it exist, so in that case you /// we also know no backreferences to it exist, so in that case you
/// get back two empty vecs. /// get back two empty vecs.
pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> { pub async fn get_kasten_by_title(&self, title: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
if title.is_empty() { if title.len() == 0 {
return Err(NoteStoreError::NotFound); return Err(NoteStoreError::NotFound);
} }
let page = select_page_by_title(&*self.0, title).await?; let kasten = select_kasten_by_title(&*self.0, title).await?;
if page.len() > 0 { if kasten.len() > 0 {
let note_id = &page[0].id; let note_id = NoteId(kasten[0].id.clone());
let backreferences = select_backreferences_for_page(&*self.0, &note_id).await?; return Ok((kasten, select_backreferences_for_kasten(&*self.0, &note_id).await?));
return Ok((page, backreferences));
} }
// Sanity check! // Sanity check!
@ -130,17 +130,21 @@ impl NoteStore {
let mut tx = self.0.begin().await?; let mut tx = self.0.begin().await?;
let slug = generate_slug(&mut tx, title).await?; let slug = generate_slug(&mut tx, title).await?;
let page = create_page(&title, &slug); let zettlekasten = create_zettlekasten(&title, &slug);
insert_note(&mut tx, &page).await?; let _ = insert_note(&mut tx, &zettlekasten).await?;
tx.commit().await?; tx.commit().await?;
Ok((vec![Note::from(page)], vec![])) Ok((vec![Note::from(zettlekasten)], vec![]))
} }
pub async fn add_note(&self, note: &NewNote, parent_id: &str, location: Option<i64>) -> NoteResult<String> { pub async fn add_note(&self, note: &NewNote) -> NoteResult<String> {
let kind = RelationshipKind::Direct; self.insert_note(
let new_id = self.insert_note(note, parent_id, location, kind).await?; note,
Ok(new_id) &ParentId(parent_id.to_string()),
location,
RelationshipKind::Direct,
)
.await
} }
/// Move a note from one location to another. /// Move a note from one location to another.
@ -153,15 +157,21 @@ impl NoteStore {
) -> NoteResult<()> { ) -> NoteResult<()> {
let mut tx = self.0.begin().await?; let mut tx = self.0.begin().await?;
let old_parent_id = ParentId(old_parent_id.to_string());
let new_parent_id = ParentId(new_parent_id.to_string());
let note_id = NoteId(note_id.to_string());
let old_note = select_note_to_note_relationship(&mut tx, &old_parent_id, &note_id).await?; let old_note = select_note_to_note_relationship(&mut tx, &old_parent_id, &note_id).await?;
let old_note_location = old_note.location; let old_note_location = old_note.location;
let old_note_kind = old_note.kind; let old_note_kind = old_note.kind;
delete_note_to_note_relationship(&mut tx, &old_parent_id, &note_id).await?; let _ = delete_note_to_note_relationship(&mut tx, &old_parent_id, &note_id).await?;
close_hole_for_deleted_note_relationship(&mut tx, &old_parent_id, old_note_location).await?; let _ = close_hole_for_deleted_note(&mut tx, &old_parent_id, old_note_location).await?;
let new_location = determine_max_child_location_for_note(&mut tx, &new_parent_id, Some(new_location)).await?; let parent_max_location = assert_max_child_location_for_note(&mut tx, &new_parent_id).await?;
make_room_for_new_note_relationship(&mut tx, &new_parent_id, new_location).await?; let new_location = cmp::min(parent_max_location + 1, new_location);
insert_note_to_note_relationship(&mut tx, &new_parent_id, &note_id, new_location, &old_note_kind).await?; let _ = make_room_for_new_note(&mut tx, &new_parent_id, new_location).await?;
let _ =
insert_note_to_note_relationship(&mut tx, &new_parent_id, &note_id, new_location, &old_note_kind).await?;
tx.commit().await?; tx.commit().await?;
Ok(()) Ok(())
} }
@ -170,11 +180,23 @@ impl NoteStore {
/// outgoing edge reference list every time. /// outgoing edge reference list every time.
pub async fn update_note_content(&self, note_id: &str, content: &str) -> NoteResult<()> { pub async fn update_note_content(&self, note_id: &str, content: &str) -> NoteResult<()> {
let references = build_references(&content); let references = build_references(&content);
let note_id = NoteId(note_id.to_string());
let mut tx = self.0.begin().await?; let mut tx = self.0.begin().await?;
update_note_content(&mut tx, &note_id, &content).await?; let _ = update_note_content(&mut tx, &note_id, &content).await?;
delete_bulk_note_to_page_relationships(&mut tx, &note_id).await?; let _ = delete_bulk_note_to_kasten_relationships(&mut tx, &note_id).await?;
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?; let found_references = find_all_kasten_from_list_of_references(&mut tx, &references).await?;
insert_bulk_note_to_page_relationships(&mut tx, &note_id, &known_reference_ids).await?; let new_references = diff_references(&references, &found_references);
let mut known_reference_ids: Vec<NoteId> = Vec::new();
for one_reference in new_references.iter() {
let slug = generate_slug(&mut tx, one_reference).await?;
let zettlekasten = create_zettlekasten(&one_reference, &slug);
let _ = insert_note(&mut tx, &zettlekasten).await?;
known_reference_ids.push(NoteId(slug));
}
known_reference_ids.append(&mut found_references.iter().map(|r| NoteId(r.id.clone())).collect());
let _ = insert_bulk_note_to_kasten_relationships(&mut tx, &note_id, &known_reference_ids).await?;
tx.commit().await?; tx.commit().await?;
Ok(()) Ok(())
} }
@ -183,22 +205,20 @@ impl NoteStore {
/// references from that note to pages are also deleted. /// references from that note to pages are also deleted.
pub async fn delete_note(&self, note_id: &str, note_parent_id: &str) -> NoteResult<()> { pub async fn delete_note(&self, note_id: &str, note_parent_id: &str) -> NoteResult<()> {
let mut tx = self.0.begin().await?; let mut tx = self.0.begin().await?;
let note_id = NoteId(note_id.to_string());
let parent_id = ParentId(note_parent_id.to_string());
let note_id = note_id.to_string(); let _ = delete_note_to_note_relationship(&mut tx, &parent_id, &note_id);
let parent_id = note_parent_id.to_string();
if parent_id != note_id {
delete_note_to_note_relationship(&mut tx, &parent_id, &note_id).await?;
}
// The big one: if zero parents report having an interest in this note, then it, // The big one: if zero parents report having an interest in this note, then it,
// *and any sub-relationships*, go away. // *and any sub-relationships*, go away.
if count_existing_note_relationships(&mut tx, &note_id).await? == 0 { if count_existing_note_relationships(&mut tx, &note_id).await? == 0 {
delete_note_to_page_relationships(&mut tx, &note_id).await?; let _ = delete_note_to_kasten_relationships(&mut tx, &note_id).await?;
delete_note(&mut tx, &note_id).await?; let _ = delete_note(&mut tx, &note_id).await?;
} }
tx.commit().await?; tx.commit().await?;
Ok(()) Ok(())
} }
} }
// The Private stuff // The Private stuff
@ -209,19 +229,19 @@ impl NoteStore {
async fn insert_note( async fn insert_note(
&self, &self,
note: &NewNote, note: &NewNote,
parent_id: &str, parent_id: &ParentId,
location: Option<i64>, location: Option<i64>,
kind: RelationshipKind, kind: RelationshipKind,
) -> NoteResult<String> { ) -> NoteResult<String> {
if let Some(location) = location { if let Some(location) = location {
if location < 0 { if location < 0 {
return Err(NoteStoreError::InvalidNoteStructure( return Err(NoteStoreError::InvalidNoteStructure(
"Add note: A negative location is not valid.".to_string(), "Add note: A negative location is not valid.".to_string(),
)); ));
} }
} }
if parent_id.is_empty() { if parent_id.is_empty() {
return Err(NoteStoreError::InvalidNoteStructure( return Err(NoteStoreError::InvalidNoteStructure(
"Add note: A parent note ID is required.".to_string(), "Add note: A parent note ID is required.".to_string(),
)); ));
@ -242,13 +262,33 @@ impl NoteStore {
let references = build_references(&note.content); let references = build_references(&note.content);
let mut tx = self.0.begin().await?; let mut tx = self.0.begin().await?;
let location = determine_max_child_location_for_note(&mut tx, parent_id, location).await?; let location = {
let max_child = assert_max_child_location_for_note(&mut tx, parent_id).await? + 1;
if let Some(location) = location {
cmp::min(max_child, location)
} else {
max_child
}
};
let note_id = NoteId(note.id.clone());
insert_note(&mut tx, &note).await?; insert_note(&mut tx, &note).await?;
make_room_for_new_note_relationship(&mut tx, &parent_id, location).await?; make_room_for_new_note(&mut tx, &parent_id, location).await?;
insert_note_to_note_relationship(&mut tx, &parent_id, &note.id, location, &kind).await?; insert_note_to_note_relationship(&mut tx, &parent_id, &note_id, location, &kind).await?;
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
insert_bulk_note_to_page_relationships(&mut tx, &note.id, &known_reference_ids).await?; let found_references = find_all_kasten_from_list_of_references(&mut tx, &references).await?;
let new_references = diff_references(&references, &found_references);
let mut known_reference_ids: Vec<NoteId> = Vec::new();
for one_reference in new_references.iter() {
let slug = generate_slug(&mut tx, one_reference).await?;
let zettlekasten = create_zettlekasten(&one_reference, &slug);
let _ = insert_note(&mut tx, &zettlekasten).await?;
known_reference_ids.push(NoteId(slug));
}
known_reference_ids.append(&mut found_references.iter().map(|r| NoteId(r.id.clone())).collect());
let _ = insert_bulk_note_to_kasten_relationships(&mut tx, &note_id, &known_reference_ids).await?;
tx.commit().await?; tx.commit().await?;
Ok(note.id.to_string()) Ok(note_id.to_string())
} }
} }

View File

@ -1,58 +0,0 @@
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
//! # Storage layer for Notesmachine
//!
//! This library implements the core functionality of Notesmachine and
//! describes that functionality to a storage layer. There's a bit of
//! intermingling in here which can't be helped, although it may make
//! sense in the future to separate the decomposition of the note
//! content into a higher layer.
//!
//! Notesmachine storage notes consist of two items: Note and Kasten.
//! This distinction is somewhat arbitrary, as structurally these two
//! items are stored in the same table.
//!
//! - Boxes have titles (and date metadata)
//! - Notes have content and a type (and date metadata)
//! - Notes are stored in boxes
//! - Notes are positioned with respect to other notes.
//! - There are two positions:
//! - Siblings, creating lists
//! - Children, creating trees like this one
//! - Notes may have references (pointers) to other boxes
//! - Notes may be moved around
//! - Notes may be deleted
//! - Boxes may be deleted
//! - When a box is renamed, every reference to that box is auto-edited to
//! reflect the change. If a box is renamed to match an existing box, the
//! notes in both boxes are merged.
//!
//! Note-to-note relationships form trees, and are kept in a SQL database of
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
//! `position` is a monotonic index on the parent (that is, every pair
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
//! an enum and can specify that the relationship is *original*,
//! *embedding*, or *referencing*. An embedded or referenced note may be
//! read/write or read-only with respect to the original, but there is only
//! one original note at any time.
//!
//! Note-to-box relationships form a graph, and are kept in the SQL database
//! as a collection of *edges* from the note to the box (and naturally
//! vice-versa).
//!
//! - Decision: When an original note is deleted, do all references and
//! embeddings also get deleted, or is the oldest one elevated to be a new
//! "original"? Or is that something the user may choose?
//!
//! - Decision: Should the merging issue be handled at this layer, or would
//! it make sense to move this to a higher layer, and only provide the
//! hooks for it here?
//!
mod api;
mod private;
pub use crate::store::api::NoteResult;
pub use crate::store::api::NoteStore;

View File

@ -2,8 +2,7 @@ use crate::structs::*;
use lazy_static::lazy_static; use lazy_static::lazy_static;
use regex::Regex; use regex::Regex;
use slug::slugify; use slug::slugify;
use sqlx::{sqlite::Sqlite, Acquire, Done, Executor, Transaction}; use sqlx::{sqlite::Sqlite, Done, Executor};
use std::cmp;
use std::collections::HashSet; use std::collections::HashSet;
type SqlResult<T> = sqlx::Result<T>; type SqlResult<T> = sqlx::Result<T>;
@ -19,8 +18,14 @@ type SqlResult<T> = sqlx::Result<T>;
// coherent and easily readable, and hides away the gnarliness of some // coherent and easily readable, and hides away the gnarliness of some
// of the SQL queries. // of the SQL queries.
// Important!!! Note_relationships are usually (parent_note -> note),
// but Note to Kasten relationships are always (note-as-parent ->
// kasten_note), so when looking for "all the notes referring to this
// kasten", you use the kasten's id as the TARGET note_id, and the
// note referring to the kasten in the parent_id.
lazy_static! { lazy_static! {
static ref SELECT_PAGE_BY_TITLE_SQL: String = str::replace( static ref SELECT_KASTEN_BY_TITLE_SQL: String = str::replace(
include_str!("sql/select_notes_by_parameter.sql"), include_str!("sql/select_notes_by_parameter.sql"),
"QUERYPARAMETER", "QUERYPARAMETER",
"notes.content" "notes.content"
@ -28,7 +33,7 @@ lazy_static! {
} }
lazy_static! { lazy_static! {
static ref SELECT_PAGE_BY_ID_SQL: String = str::replace( static ref SELECT_KASTEN_BY_ID_SQL: String = str::replace(
include_str!("sql/select_notes_by_parameter.sql"), include_str!("sql/select_notes_by_parameter.sql"),
"QUERYPARAMETER", "QUERYPARAMETER",
"notes.id" "notes.id"
@ -36,8 +41,8 @@ lazy_static! {
} }
lazy_static! { lazy_static! {
static ref SELECT_NOTES_BACKREFERENCING_PAGE_SQL: &'static str = static ref SELECT_NOTES_BACKREFENCING_KASTEN_SQL: &'static str =
include_str!("sql/select_notes_backreferencing_page.sql"); include_str!("sql/select_notes_backreferencing_kasten.sql");
} }
// ___ _ // ___ _
@ -60,51 +65,37 @@ where
// |_|\___|\__\__|_||_| |_|\_\__,_/__/\__\___|_||_| // |_|\___|\__\__|_||_| |_|\_\__,_/__/\__\___|_||_|
// //
// The next three functions are essentially the same, although the internal pub(crate) async fn select_kasten_by_slug<'a, E>(executor: E, slug: &NoteId) -> SqlResult<Vec<Note>>
// SQL operations are quite different between the first two and the last.
async fn select_object_by_query<'a, E>(executor: E, query: &str, field: &str) -> SqlResult<Vec<Note>>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let r: Vec<RowNote> = sqlx::query_as(query).bind(field).fetch_all(executor).await?; let r: Vec<RowNote> = sqlx::query_as(&SELECT_KASTEN_BY_ID_SQL)
Ok(r.into_iter().map(Note::from).collect()) .bind(&**slug)
.fetch_all(executor)
.await?;
Ok(r.into_iter().map(|z| Note::from(z)).collect())
} }
// Select the requested page via its id. This is fairly rare; pub(crate) async fn select_kasten_by_title<'a, E>(executor: E, title: &str) -> SqlResult<Vec<Note>>
// pages should usually be picked up via their title, but if you're
// navigating to an instance, this is how you specify the page in a
// URL. The return value is an array of Note objects; it is the
// responsibility of client code to restructure these into a tree-like
// object.
//
// Recommended: Clients should update the URL whenever changing
// page.
pub(crate) async fn select_page_by_slug<'a, E>(executor: E, slug: &str) -> SqlResult<Vec<Note>>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
select_object_by_query(executor, &SELECT_PAGE_BY_ID_SQL, &slug).await let r: Vec<RowNote> = sqlx::query_as(&SELECT_KASTEN_BY_TITLE_SQL)
.bind(&title)
.fetch_all(executor)
.await?;
Ok(r.into_iter().map(|z| Note::from(z)).collect())
} }
// Fetch the page by title. The return value is an array of Note pub(crate) async fn select_backreferences_for_kasten<'a, E>(executor: E, kasten_id: &NoteId) -> SqlResult<Vec<Note>>
// objects; it is the responsibility of client code to restructure
// these into a tree-like object.
pub(crate) async fn select_page_by_title<'a, E>(executor: E, title: &str) -> SqlResult<Vec<Note>>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
select_object_by_query(executor, &SELECT_PAGE_BY_TITLE_SQL, &title).await let r: Vec<RowNote> = sqlx::query_as(&SELECT_NOTES_BACKREFENCING_KASTEN_SQL)
} .bind(&**kasten_id)
.fetch_all(executor)
// Fetch all backreferences to a page. The return value is an array .await?;
// of arrays, and inside each array is a list from a root page to Ok(r.into_iter().map(|z| Note::from(z)).collect())
// the note that references the give page. Clients may choose how
// they want to display that collection.
pub(crate) async fn select_backreferences_for_page<'a, E>(executor: E, page_id: &str) -> SqlResult<Vec<Note>>
where
E: Executor<'a, Database = Sqlite>,
{
select_object_by_query(executor, &SELECT_NOTES_BACKREFERENCING_PAGE_SQL, &page_id).await
} }
// ___ _ ___ _ _ _ // ___ _ ___ _ _ _
@ -113,55 +104,26 @@ where
// |___|_||_/__/\___|_| \__| \___/|_||_\___| |_|\_\___/\__\___| // |___|_||_/__/\___|_| \__| \___/|_||_\___| |_|\_\___/\__\___|
// //
// Inserts a single note into the notes table. That is all. pub(crate) async fn insert_note<'a, E>(executor: E, zettle: &NewNote) -> SqlResult<String>
pub(crate) async fn insert_note<'a, E>(executor: E, note: &NewNote) -> SqlResult<String>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let insert_one_note_sql = concat!( let insert_one_page_sql = concat!(
"INSERT INTO notes (id, content, kind, ", "INSERT INTO notes (id, content, kind, ",
" creation_date, updated_date, lastview_date) ", " creation_date, updated_date, lastview_date) ",
"VALUES (?, ?, ?, ?, ?, ?);" "VALUES (?, ?, ?, ?, ?, ?);"
); );
sqlx::query(insert_one_note_sql) let _ = sqlx::query(insert_one_page_sql)
.bind(&note.id) .bind(&zettle.id)
.bind(&note.content) .bind(&zettle.content)
.bind(note.kind.to_string()) .bind(zettle.kind.to_string())
.bind(&note.creation_date) .bind(&zettle.creation_date)
.bind(&note.updated_date) .bind(&zettle.updated_date)
.bind(&note.lastview_date) .bind(&zettle.lastview_date)
.execute(executor) .execute(executor)
.await?; .await?;
Ok(note.id.clone()) Ok(zettle.id.clone())
}
// Inserts a single note into the notes table. That is all.
pub(crate) async fn insert_bulk_notes<'a, E>(executor: E, notes: &[NewNote]) -> SqlResult<()>
where
E: Executor<'a, Database = Sqlite>,
{
if notes.is_empty() {
return Ok(());
}
let insert_pattern = "VALUES (?, ?, ?, ?, ?, ?)".to_string();
let insert_bulk_notes_sql = "INSERT INTO notes (id, content, kind, creation_date, updated_date, lastview_date) "
.to_string()
+ &[insert_pattern.as_str()].repeat(notes.len()).join(", ")
+ &";".to_string();
let mut request = sqlx::query(&insert_bulk_notes_sql);
for note in notes {
request = request
.bind(&note.id)
.bind(&note.content)
.bind(note.kind.to_string())
.bind(&note.creation_date)
.bind(&note.updated_date)
.bind(&note.lastview_date);
}
request.execute(executor).await.map(|_| ())
} }
// ___ _ _ _ _ __ _ // ___ _ _ _ _ __ _
@ -192,8 +154,7 @@ pub(crate) fn find_maximal_slug_number(slugs: &[JustId]) -> Option<u32> {
// Given an initial string and an existing collection of slugs, // Given an initial string and an existing collection of slugs,
// generate a new slug that does not conflict with the current // generate a new slug that does not conflict with the current
// collection. Right now we're using the slugify operation, which... // collection.
// isn't all that.
pub(crate) async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult<String> pub(crate) async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult<String>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
@ -202,7 +163,7 @@ where
static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap(); static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap();
static ref SLUG_FINDER_SQL: String = format!( static ref SLUG_FINDER_SQL: String = format!(
"SELECT id FROM notes WHERE kind = '{}' AND id LIKE '?%';", "SELECT id FROM notes WHERE kind = '{}' AND id LIKE '?%';",
NoteKind::Page.to_string() NoteKind::Kasten.to_string()
); );
} }
@ -219,13 +180,11 @@ where
}) })
} }
// A helper function: given a title and a slug, create a PageType pub(crate) fn create_zettlekasten(title: &str, slug: &str) -> NewNote {
// note.
pub(crate) fn create_page(title: &str, slug: &str) -> NewNote {
NewNoteBuilder::default() NewNoteBuilder::default()
.id(slug.to_string()) .id(slug.to_string())
.content(title.to_string()) .content(title.to_string())
.kind(NoteKind::Page) .kind(NoteKind::Kasten)
.build() .build()
.unwrap() .unwrap()
} }
@ -236,14 +195,14 @@ pub(crate) fn create_page(title: &str, slug: &str) -> NewNote {
// \___/| .__/\__,_\__,_|\__\___| \___/|_||_\___| |_|\_\___/\__\___| // \___/| .__/\__,_\__,_|\__\___| \___/|_||_\___| |_|\_\___/\__\___|
// |_| // |_|
pub(crate) async fn update_note_content<'a, E>(executor: E, note_id: &str, content: &str) -> SqlResult<()> pub(crate) async fn update_note_content<'a, E>(executor: E, note_id: &NoteId, content: &str) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?"; let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?";
let count = sqlx::query(update_note_content_sql) let count = sqlx::query(update_note_content_sql)
.bind(content) .bind(content)
.bind(note_id) .bind(&**note_id)
.execute(executor) .execute(executor)
.await? .await?
.rows_affected(); .rows_affected();
@ -262,8 +221,8 @@ where
pub(crate) async fn select_note_to_note_relationship<'a, E>( pub(crate) async fn select_note_to_note_relationship<'a, E>(
executor: E, executor: E,
parent_id: &str, parent_id: &ParentId,
note_id: &str, note_id: &NoteId,
) -> SqlResult<NoteRelationship> ) -> SqlResult<NoteRelationship>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
@ -275,8 +234,8 @@ where
"LIMIT 1" "LIMIT 1"
); );
let s: NoteRelationshipRow = sqlx::query_as(get_note_to_note_relationship_sql) let s: NoteRelationshipRow = sqlx::query_as(get_note_to_note_relationship_sql)
.bind(parent_id) .bind(&**parent_id)
.bind(note_id) .bind(&**note_id)
.fetch_one(executor) .fetch_one(executor)
.await?; .await?;
Ok(NoteRelationship::from(s)) Ok(NoteRelationship::from(s))
@ -290,8 +249,8 @@ where
pub(crate) async fn insert_note_to_note_relationship<'a, E>( pub(crate) async fn insert_note_to_note_relationship<'a, E>(
executor: E, executor: E,
parent_id: &str, parent_id: &ParentId,
note_id: &str, note_id: &NoteId,
location: i64, location: i64,
kind: &RelationshipKind, kind: &RelationshipKind,
) -> SqlResult<()> ) -> SqlResult<()>
@ -303,21 +262,17 @@ where
"values (?, ?, ?, ?)" "values (?, ?, ?, ?)"
); );
sqlx::query(insert_note_to_note_relationship_sql) let _ = sqlx::query(insert_note_to_note_relationship_sql)
.bind(parent_id) .bind(&**parent_id)
.bind(note_id) .bind(&**note_id)
.bind(&location) .bind(&location)
.bind(kind.to_string()) .bind(&kind.to_string())
.execute(executor) .execute(executor)
.await?; .await?;
Ok(()) Ok(())
} }
pub(crate) async fn make_room_for_new_note_relationship<'a, E>( pub(crate) async fn make_room_for_new_note<'a, E>(executor: E, parent_id: &ParentId, location: i64) -> SqlResult<()>
executor: E,
parent_id: &str,
location: i64,
) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
@ -327,30 +282,15 @@ where
"WHERE location >= ? and parent_id = ?;" "WHERE location >= ? and parent_id = ?;"
); );
sqlx::query(make_room_for_new_note_sql) let _ = sqlx::query(make_room_for_new_note_sql)
.bind(&location) .bind(&location)
.bind(parent_id) .bind(&**parent_id)
.execute(executor) .execute(executor)
.await?; .await?;
Ok(()) Ok(())
} }
pub(crate) async fn determine_max_child_location_for_note<'a, E>( pub(crate) async fn assert_max_child_location_for_note<'a, E>(executor: E, note_id: &ParentId) -> SqlResult<i64>
executor: E,
note_id: &str,
comp_loc: Option<i64>,
) -> SqlResult<i64>
where
E: Executor<'a, Database = Sqlite>,
{
let row_count = assert_max_child_location_for_note(executor, note_id).await? + 1;
Ok(match comp_loc {
Some(location) => cmp::min(row_count, location),
None => row_count,
})
}
pub(crate) async fn assert_max_child_location_for_note<'a, E>(executor: E, note_id: &str) -> SqlResult<i64>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
@ -358,7 +298,7 @@ where
"SELECT MAX(location) AS count FROM note_relationships WHERE parent_id = ?;"; "SELECT MAX(location) AS count FROM note_relationships WHERE parent_id = ?;";
let count: RowCount = sqlx::query_as(assert_max_child_location_for_note_sql) let count: RowCount = sqlx::query_as(assert_max_child_location_for_note_sql)
.bind(note_id) .bind(&**note_id)
.fetch_one(executor) .fetch_one(executor)
.await?; .await?;
@ -371,10 +311,10 @@ where
// |_|\_\___/\__\___| \__\___/ |_|\_\__,_/__/\__\___|_||_| |_|_\___|_\__,_|\__|_\___/_||_/__/_||_|_| .__/__/ // |_|\_\___/\__\___| \__\___/ |_|\_\__,_/__/\__\___|_||_| |_|_\___|_\__,_|\__|_\___/_||_/__/_||_|_| .__/__/
// |_| // |_|
pub(crate) async fn insert_bulk_note_to_page_relationships<'a, E>( pub(crate) async fn insert_bulk_note_to_kasten_relationships<'a, E>(
executor: E, executor: E,
note_id: &str, note_id: &NoteId,
references: &[String], references: &[NoteId],
) -> SqlResult<()> ) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
@ -383,27 +323,27 @@ where
return Ok(()); return Ok(());
} }
let insert_pattern = format!("(?, ?, '{}')", PageRelationshipKind::Page.to_string()); let insert_pattern = format!("(?, ?, '{}')", KastenRelationshipKind::Kasten.to_string());
let insert_note_page_references_sql = "INSERT INTO note_page_relationships (note_id, page_id, kind) VALUES " let insert_note_page_references_sql = "INSERT INTO note_kasten_relationships (note_id, kasten_id, kind) VALUES "
.to_string() .to_string()
+ &[insert_pattern.as_str()].repeat(references.len()).join(", ") + &[insert_pattern.as_str()].repeat(references.len()).join(", ")
+ &";".to_string(); + &";".to_string();
let mut request = sqlx::query(&insert_note_page_references_sql); let mut request = sqlx::query(&insert_note_page_references_sql);
for reference in references { for reference in references {
request = request.bind(note_id).bind(reference); request = request.bind(&**note_id).bind(&**reference);
} }
request.execute(executor).await.map(|_| ()) request.execute(executor).await.map(|_| ())
} }
pub(crate) async fn delete_bulk_note_to_page_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<()> pub(crate) async fn delete_bulk_note_to_kasten_relationships<'a, E>(executor: E, note_id: &NoteId) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let delete_note_to_page_relationship_sql = "DELETE FROM note_page_relationships WHERE and note_id = ?;"; let delete_note_to_kasten_relationship_sql = "DELETE FROM note_kasten_relationships WHERE and note_id = ?;";
sqlx::query(delete_note_to_page_relationship_sql) let _ = sqlx::query(delete_note_to_kasten_relationship_sql)
.bind(note_id) .bind(&**note_id)
.execute(executor) .execute(executor)
.await?; .await?;
Ok(()) Ok(())
@ -426,7 +366,7 @@ pub(crate) fn diff_references(references: &[String], found_references: &[PageTit
// Returns all the (Id, title) pairs found in the database out of a // Returns all the (Id, title) pairs found in the database out of a
// list of titles. Used by insert_note and update_note_content to // list of titles. Used by insert_note and update_note_content to
// find the ids of all the references in a given document. // find the ids of all the references in a given document.
pub(crate) async fn find_all_page_from_list_of_references<'a, E>( pub(crate) async fn find_all_kasten_from_list_of_references<'a, E>(
executor: E, executor: E,
references: &[String], references: &[String],
) -> SqlResult<Vec<PageTitle>> ) -> SqlResult<Vec<PageTitle>>
@ -440,7 +380,7 @@ where
lazy_static! { lazy_static! {
static ref SELECT_ALL_REFERENCES_FOR_SQL_BASE: String = format!( static ref SELECT_ALL_REFERENCES_FOR_SQL_BASE: String = format!(
"SELECT id, content FROM notes WHERE kind = '{}' AND content IN (", "SELECT id, content FROM notes WHERE kind = '{}' AND content IN (",
NoteKind::Page.to_string() NoteKind::Kasten.to_string()
); );
} }
@ -462,8 +402,8 @@ where
pub(crate) async fn delete_note_to_note_relationship<'a, E>( pub(crate) async fn delete_note_to_note_relationship<'a, E>(
executor: E, executor: E,
parent_id: &str, parent_id: &ParentId,
note_id: &str, note_id: &NoteId,
) -> SqlResult<()> ) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
@ -474,8 +414,8 @@ where
); );
let count = sqlx::query(delete_note_to_note_relationship_sql) let count = sqlx::query(delete_note_to_note_relationship_sql)
.bind(parent_id) .bind(&**parent_id)
.bind(note_id) .bind(&**note_id)
.execute(executor) .execute(executor)
.await? .await?
.rows_affected(); .rows_affected();
@ -486,33 +426,33 @@ where
} }
} }
pub(crate) async fn delete_note_to_page_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<()> pub(crate) async fn delete_note_to_kasten_relationships<'a, E>(executor: E, note_id: &NoteId) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
lazy_static! { lazy_static! {
static ref DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL: String = format!( static ref DELETE_NOTE_TO_KASTEN_RELATIONSHIPS_SQL: String = format!(
"DELETE FROM note_relationships WHERE kind in ('{}', '{}') AND parent_id = ?;", "DELETE FROM note_relationships WHERE kind in ('{}', '{}') AND parent_id = ?;",
PageRelationshipKind::Page.to_string(), KastenRelationshipKind::Kasten.to_string(),
PageRelationshipKind::Unacked.to_string() KastenRelationshipKind::Unacked.to_string()
); );
} }
sqlx::query(&DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL) let _ = sqlx::query(&DELETE_NOTE_TO_KASTEN_RELATIONSHIPS_SQL)
.bind(note_id) .bind(&**note_id)
.execute(executor) .execute(executor)
.await?; .await?;
Ok(()) Ok(())
} }
pub(crate) async fn delete_note<'a, E>(executor: E, note_id: &str) -> SqlResult<()> pub(crate) async fn delete_note<'a, E>(executor: E, note_id: &NoteId) -> SqlResult<()>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let delete_note_sql = "DELETE FROM notes WHERE note_id = ?"; let delete_note_sql = "DELETE FROM notes WHERE note_id = ?";
let count = sqlx::query(delete_note_sql) let count = sqlx::query(delete_note_sql)
.bind(note_id) .bind(&**note_id)
.execute(executor) .execute(executor)
.await? .await?
.rows_affected(); .rows_affected();
@ -526,9 +466,9 @@ where
// After removing a note, recalculate the position of all notes under // After removing a note, recalculate the position of all notes under
// the parent note, such that there order is now completely // the parent note, such that there order is now completely
// sequential. // sequential.
pub(crate) async fn close_hole_for_deleted_note_relationship<'a, E>( pub(crate) async fn close_hole_for_deleted_note<'a, E>(
executor: E, executor: E,
parent_id: &str, parent_id: &ParentId,
location: i64, location: i64,
) -> SqlResult<()> ) -> SqlResult<()>
where where
@ -540,39 +480,14 @@ where
"WHERE location > ? and parent_id = ?;" "WHERE location > ? and parent_id = ?;"
); );
sqlx::query(close_hole_for_deleted_note_sql) let _ = sqlx::query(close_hole_for_deleted_note_sql)
.bind(&location) .bind(&location)
.bind(parent_id) .bind(&**parent_id)
.execute(executor) .execute(executor)
.await?; .await?;
Ok(()) Ok(())
} }
// Given a list of references found in the content, generate the
// references that do not previously exist, returning all found
// references. NOTE: The function signature for this is for a
// transaction, and uses a nested transaction.
pub(crate) async fn validate_or_generate_all_found_references(
txi: &mut Transaction<'_, Sqlite>,
references: &[String],
) -> SqlResult<Vec<String>> {
let mut tx = txi.begin().await?;
let found_references = find_all_page_from_list_of_references(&mut tx, &references).await?;
let new_references = diff_references(&references, &found_references);
let mut new_page: Vec<NewNote> = vec![];
for one_reference in new_references.iter() {
let slug = generate_slug(&mut tx, one_reference).await?;
new_page.push(create_page(&one_reference, &slug));
}
insert_bulk_notes(&mut tx, &new_page).await?;
let mut all_reference_ids: Vec<String> = found_references.iter().map(|r| r.id.clone()).collect();
all_reference_ids.append(&mut new_page.iter().map(|r| r.id.clone()).collect());
tx.commit().await?;
Ok(all_reference_ids)
}
// __ __ _ // __ __ _
// | \/ (_)___ __ // | \/ (_)___ __
// | |\/| | (_-</ _| // | |\/| | (_-</ _|
@ -581,14 +496,17 @@ pub(crate) async fn validate_or_generate_all_found_references(
// The dreaded miscellaneous! // The dreaded miscellaneous!
pub(crate) async fn count_existing_note_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<i64> pub(crate) async fn count_existing_note_relationships<'a, E>(executor: E, note_id: &NoteId) -> SqlResult<i64>
where where
E: Executor<'a, Database = Sqlite>, E: Executor<'a, Database = Sqlite>,
{ {
let count_existing_note_relationships_sql = "SELECT COUNT(*) as count FROM note_relationships WHERE note_id = ?;"; let count_existing_note_relationships_sql =
"SELECT COUNT(*) as count FROM note_relationships WHERE note_id = ?;";
let count: RowCount = sqlx::query_as(&count_existing_note_relationships_sql) let count: RowCount = sqlx::query_as(&count_existing_note_relationships_sql)
.bind(note_id) .bind(&**note_id)
.fetch_one(executor) .fetch_one(executor)
.await?; .await?;
Ok(count.count) Ok(count.count)
} }

View File

@ -1,9 +1,11 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use derive_builder::Builder; use derive_builder::Builder;
use friendly_id;
use shrinkwraprs::Shrinkwrap;
use sqlx::{self, FromRow}; use sqlx::{self, FromRow};
// Page is German for "Box," and is used both because this is // Kasten is German for "Box," and is used both because this is
// supposed to be a Page, and because "Box" is a heavily // supposed to be a Zettlekasten, and because "Box" is a heavily
// reserved word in Rust. So, for that matter, are "crate" and // reserved word in Rust. So, for that matter, are "crate" and
// "cargo," "cell," and so forth. If I'd wanted to go the Full // "cargo," "cell," and so forth. If I'd wanted to go the Full
// Noguchi, I guess I could have used "envelope." // Noguchi, I guess I could have used "envelope."
@ -48,11 +50,17 @@ macro_rules! build_conversion_enums {
}; };
} }
#[derive(Shrinkwrap, Clone)]
pub(crate) struct NoteId(pub String);
#[derive(Shrinkwrap, Clone)]
pub(crate) struct ParentId(pub String);
// The different kinds of objects we support. // The different kinds of objects we support.
build_conversion_enums!( build_conversion_enums!(
NoteKind, NoteKind,
"box" => Page, "box" => Kasten,
"note" => Note, "note" => Note,
"resource" => Resource, "resource" => Resource,
); );
@ -70,8 +78,8 @@ build_conversion_enums!(
); );
build_conversion_enums!( build_conversion_enums!(
PageRelationshipKind, KastenRelationshipKind,
"page" => Page, "kasten" => Kasten,
"unacked" => Unacked, "unacked" => Unacked,
"cancelled" => Cancelled, "cancelled" => Cancelled,
); );
@ -128,7 +136,7 @@ impl From<RowNote> for Note {
/// A new Note object as it's inserted into the system. It has no /// A new Note object as it's inserted into the system. It has no
/// parent or location information; those are data relative to the /// parent or location information; those are data relative to the
/// parent, and must be provided by the client. In the case of a /// parent, and must be provided by the client. In the case of a
/// Page, no location or parent is necessary. /// Kasten, no location or parent is necessary.
#[derive(Clone, Debug, Builder)] #[derive(Clone, Debug, Builder)]
pub struct NewNote { pub struct NewNote {
#[builder(default = r#"friendly_id::create()"#)] #[builder(default = r#"friendly_id::create()"#)]
@ -147,8 +155,8 @@ pub struct NewNote {
} }
impl From<NewNote> for Note { impl From<NewNote> for Note {
/// Only used for building new pages, so the decision- making is /// Only used for building new kastens, so the decision- making is
/// limited to page-level things, like pointing to self and /// limited to kasten-level things, like pointing to self and
/// having a location of zero. /// having a location of zero.
fn from(note: NewNote) -> Self { fn from(note: NewNote) -> Self {
Self { Self {
@ -209,25 +217,25 @@ impl From<NoteRelationshipRow> for NoteRelationship {
} }
#[derive(Clone, Debug, FromRow)] #[derive(Clone, Debug, FromRow)]
pub(crate) struct PageRelationshipRow { pub(crate) struct KastenRelationshipRow {
pub note_id: String, pub note_id: String,
pub page_id: String, pub kasten_id: String,
pub kind: String, pub kind: String,
} }
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct PageRelationship { pub struct KastenRelationship {
pub note_id: String, pub note_id: String,
pub page_id: String, pub kasten_id: String,
pub kind: PageRelationshipKind, pub kind: KastenRelationshipKind,
} }
impl From<PageRelationshipRow> for PageRelationship { impl From<KastenRelationshipRow> for KastenRelationship {
fn from(rel: PageRelationshipRow) -> Self { fn from(rel: KastenRelationshipRow) -> Self {
Self { Self {
page_id: rel.page_id, kasten_id: rel.kasten_id,
note_id: rel.note_id, note_id: rel.note_id,
kind: PageRelationshipKind::from(rel.kind), kind: KastenRelationshipKind::from(rel.kind),
} }
} }
} }

View File

@ -1,15 +0,0 @@
.PHONY: all
all: help
.PHONY: help
help:
@M=$$(perl -ne 'm/((\w|-)*):.*##/ && print length($$1)."\n"' Makefile | \
sort -nr | head -1) && \
perl -ne "m/^((\w|-)*):.*##\s*(.*)/ && print(sprintf(\"%s: %s\t%s\n\", \$$1, \" \"x($$M-length(\$$1)), \$$3))" Makefile
# This is necessary because I'm trying hard not to use
# any `nightly` features. But rustfmt is likely to be
# a `nightly-only` feature for a long time to come, so
# this is my hack.
fmt: ## Format the code, using the most modern version of rustfmt
rustup run nightly cargo fmt

View File

@ -12,10 +12,10 @@
mod make_tree; mod make_tree;
mod structs; mod structs;
use crate::make_tree::{make_backreferences, make_note_tree};
use crate::structs::{Note, Page};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use nm_store::{NewNote, NoteStore, NoteStoreError}; use nm_store::{NoteStore, NoteStoreError, NewNote};
use crate::structs::{Page, Note};
use crate::make_tree::{make_note_tree, make_backreferences};
#[derive(Debug)] #[derive(Debug)]
pub struct Notesmachine(pub(crate) NoteStore); pub struct Notesmachine(pub(crate) NoteStore);
@ -23,97 +23,87 @@ pub struct Notesmachine(pub(crate) NoteStore);
type Result<T> = core::result::Result<T, NoteStoreError>; type Result<T> = core::result::Result<T, NoteStoreError>;
pub fn make_page(foundtree: &Note, backreferences: Vec<Vec<Note>>) -> Page { pub fn make_page(foundtree: &Note, backreferences: Vec<Vec<Note>>) -> Page {
Page { Page {
slug: foundtree.id, slug: foundtree.id,
title: foundtree.content, title: foundtree.content,
creation_date: foundtree.creation_date, creation_date: foundtree.creation_date,
updated_date: foundtree.updated_date, updated_date: foundtree.updated_date,
lastview_date: foundtree.lastview_date, lastview_date: foundtree.lastview_date,
deleted_date: foundtree.deleted_date, deleted_date: foundtree.deleted_date,
notes: foundtree.children, notes: foundtree.children,
backreferences: backreferences, backreferences: backreferences
} }
} }
impl Notesmachine { impl Notesmachine {
pub async fn new(url: &str) -> Result<Self> { pub async fn new(url: &str) -> Result<Self> {
let notestore = NoteStore::new(url).await?; let notestore = NoteStore::new(url).await?;
Ok(Notesmachine(notestore)) Ok(Notesmachine(notestore))
} }
pub async fn get_page_via_slug(&self, slug: &str) -> Result<Page> { pub async fn get_page_via_slug(&self, slug: &str) -> Result<Page> {
let (rawtree, rawbackreferences) = self.0.get_kasten_by_slug(slug).await?; let (rawtree, rawbackreferences) = self.0.get_kasten_by_slug(slug).await?;
Ok(make_page( Ok(make_page(&make_note_tree(&rawtree), make_backreferences(&rawbackreferences)))
&make_note_tree(&rawtree), }
make_backreferences(&rawbackreferences),
))
}
pub async fn get_page(&self, title: &str) -> Result<Page> { pub async fn get_page(&self, title: &str) -> Result<Page> {
let (rawtree, rawbackreferences) = self.0.get_kasten_by_title(title).await?; let (rawtree, rawbackreferences) = self.0.get_kasten_by_title(title).await?;
Ok(make_page( Ok(make_page(&make_note_tree(&rawtree), make_backreferences(&rawbackreferences)))
&make_note_tree(&rawtree), }
make_backreferences(&rawbackreferences),
))
}
// TODO: // TODO:
// You should be able to: // You should be able to:
// Add a note that has no parent (gets added to "today") // Add a note that has no parent (gets added to "today")
// Add a note that specifies only the page (gets added to page/root) // Add a note that specifies only the page (gets added to page/root)
// Add a note that has no location (gets tacked onto the end of the above) // Add a note that has no location (gets tacked onto the end of the above)
// Add a note that specifies the date of creation. // Add a note that specifies the date of creation.
pub async fn add_note(&self, note: &NewNote) -> Result<String> { pub async fn add_note(&self, note: &NewNote) -> Result<String> {
let mut note = note.clone(); let mut note = note.clone();
if note.parent_id.is_none() { if note.parent_id.is_none() {
note.parent_id = self.get_today_page().await?; let (parent, _) = self.get_today_page().await?;
} note.parent_id = parent.id;
Ok(self.0.add_note(&note)) }
} Ok(self.0.add_note(&note))
}
// pub async fn reference_note(&self, note_id: &str, new_parent_id: &str, new_location: i64) -> Result<()> { // pub async fn reference_note(&self, note_id: &str, new_parent_id: &str, new_location: i64) -> Result<()> {
// todo!(); // todo!();
// } // }
// //
// pub async fn embed_note(&self, note_id: &str, new_parent_id: &str, new_location: i64) -> Result<()> { // pub async fn embed_note(&self, note_id: &str, new_parent_id: &str, new_location: i64) -> Result<()> {
// todo!(); // todo!();
// } // }
pub async fn move_note( pub async fn move_note(&self, note_id: &str, old_parent_id: &str, new_parent_id: &str, location: i64) -> Result<()> {
&self, self.0.move_note(note_id, old_parent_id, new_parent_id, location)
note_id: &str, }
old_parent_id: &str,
new_parent_id: &str,
location: i64,
) -> Result<()> {
self.0.move_note(note_id, old_parent_id, new_parent_id, location).await
}
pub async fn update_note(&self, note_id: &str, content: &str) -> Result<()> { pub async fn update_note(&self, note_id: &str, content: &str) -> Result<()> {
self.0.update_note_content(note_id, content).await self.0.update_note(note_id, content)
} }
pub async fn delete_note(&self, note_id: &str, parent_note_id: &str) -> Result<()> { pub async fn delete_note(&self, note_id: &str) -> Result<()> {
self.0.delete_note(note_id, parent_note_id).await self.0.delete_note(note_id)
} }
} }
// Private stuff // Private stuff
impl Notesmachine { impl Notesmachine {
async fn get_today_page(&self) -> Result<String> { async fn get_today_page(&self) -> Result<String> {
let title = chrono::Utc::now().format("%F").to_string(); let title = chrono::Utc::now().format("%F").to_string();
let (rawtree, _) = self.0.get_kasten_by_title(title).await?; let (rawtree, _) = self.0.get_kasten_by_title(title).await?;
Ok(rawtree.id) Ok(rawtree.id)
} }
} }
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use tokio; use tokio;
async fn fresh_inmemory_database() -> Notesmachine { async fn fresh_inmemory_database() -> Notesmachine {
let notesmachine = Notesmachine::new("sqlite://:memory:").await; let notesmachine = Notesmachine::new("sqlite://:memory:").await;
assert!(notesmachine.is_ok(), "{:?}", notesmachine); assert!(notesmachine.is_ok(), "{:?}", notesmachine);
let notesmachine = notesmachine.unwrap(); let notesmachine = notesmachine.unwrap();
@ -125,7 +115,7 @@ mod tests {
#[tokio::test(threaded_scheduler)] #[tokio::test(threaded_scheduler)]
async fn fetching_unfound_page_by_slug_works() { async fn fetching_unfound_page_by_slug_works() {
let notesmachine = fresh_inmemory_database().await; let notesmachine = fresh_inmemory_database().await;
let unfoundpage = notesmachine.get_page_via_slug("nonexistent-slug").await; let unfoundpage = notesmachine.navigate_via_slug("nonexistent-slug").await;
assert!(unfoundpage.is_err()); assert!(unfoundpage.is_err());
} }
@ -133,14 +123,15 @@ mod tests {
async fn fetching_unfound_page_by_title_works() { async fn fetching_unfound_page_by_title_works() {
let title = "Nonexistent Page"; let title = "Nonexistent Page";
let notesmachine = fresh_inmemory_database().await; let notesmachine = fresh_inmemory_database().await;
let newpageresult = notesmachine.get_page(&title).await; let newpageresult = notesmachine.get_box(&title).await;
assert!(newpageresult.is_ok(), "{:?}", newpageresult); assert!(newpageresult.is_ok(), "{:?}", newpageresult);
let newpage = newpageresult.unwrap(); let newpage = newpageresult.unwrap();
assert_eq!(newpage.title, title, "{:?}", newpage.title); assert_eq!(newpage.title, title, "{:?}", newpage.title);
assert_eq!(newpage.slug, "nonexistent-page", "{:?}", newpage.slug); assert_eq!(newpage.slug, "nonexistent-page", "{:?}", newpage.slug);
assert_eq!(newpage.root_note.content, "", "{:?}", newpage.root_note.content); assert_eq!(newpage.root_note.content, "", "{:?}", newpage.root_note.content);
assert_eq!(newpage.root_note.notetype, "root", "{:?}", newpage.root_note.notetype); assert_eq!(newpage.root_note.notetype, "root", "{:?}", newpage.root_note.notetype);
assert_eq!(newpage.root_note.children.len(), 0, "{:?}", newpage.root_note.children); assert_eq!(newpage.root_note.children.len(), 0, "{:?}", newpage.root_note.children);
} }
} }

View File

@ -1,6 +1,7 @@
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug)] #[derive(Clone, Serialize, Deserialize, Debug)]
pub struct Note { pub struct Note {
pub id: String, pub id: String,
pub parent_id: Option<String>, pub parent_id: Option<String>,
@ -14,7 +15,6 @@ pub struct Note {
pub children: Vec<Note>, pub children: Vec<Note>,
} }
#[derive(Clone, Debug)]
pub struct Page { pub struct Page {
pub slug: String, pub slug: String,
pub title: String, pub title: String,