Compare commits
No commits in common. "canon" and "private-store" have entirely different histories.
canon
...
private-st
|
@ -1 +0,0 @@
|
||||||
cognitive-complexity-threshold = 9
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
use sqlx;
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
|
|
||||||
/// All the ways looking up objects can fail
|
/// All the ways looking up objects can fail
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
mod errors;
|
mod errors;
|
||||||
mod parser;
|
mod reference_parser;
|
||||||
mod store;
|
mod store;
|
||||||
mod structs;
|
mod structs;
|
||||||
|
|
||||||
|
@ -7,6 +7,7 @@ pub use crate::errors::NoteStoreError;
|
||||||
pub use crate::store::NoteStore;
|
pub use crate::store::NoteStore;
|
||||||
pub use crate::structs::{Note, NoteKind, NoteRelationship, PageRelationship};
|
pub use crate::structs::{Note, NoteKind, NoteRelationship, PageRelationship};
|
||||||
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
@ -112,4 +113,6 @@ mod tests {
|
||||||
assert_eq!(newpages[1].parent_id, Some(newroot.id.clone()));
|
assert_eq!(newpages[1].parent_id, Some(newroot.id.clone()));
|
||||||
assert_eq!(newpages[2].parent_id, Some(newpages[1].id.clone()));
|
assert_eq!(newpages[2].parent_id, Some(newpages[1].id.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,59 +0,0 @@
|
||||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
//! # Storage layer for Notesmachine
|
|
||||||
//!
|
|
||||||
//! This library implements the core functionality of Notesmachine and
|
|
||||||
//! describes that functionality to a storage layer. There's a bit of
|
|
||||||
//! intermingling in here which can't be helped, although it may make
|
|
||||||
//! sense in the future to separate the decomposition of the note
|
|
||||||
//! content into a higher layer.
|
|
||||||
//!
|
|
||||||
//! Notesmachine storage notes consist of two items: Note and Kasten.
|
|
||||||
//! This distinction is somewhat arbitrary, as structurally these two
|
|
||||||
//! items are stored in the same table.
|
|
||||||
//!
|
|
||||||
//! - Boxes have titles (and date metadata)
|
|
||||||
//! - Notes have content and a type (and date metadata)
|
|
||||||
//! - Notes are stored in boxes
|
|
||||||
//! - Notes are positioned with respect to other notes.
|
|
||||||
//! - There are two positions:
|
|
||||||
//! - Siblings, creating lists
|
|
||||||
//! - Children, creating trees like this one
|
|
||||||
//! - Notes may have references (pointers) to other boxes
|
|
||||||
//! - Notes may be moved around
|
|
||||||
//! - Notes may be deleted
|
|
||||||
//! - Boxes may be deleted
|
|
||||||
//! - When a box is renamed, every reference to that box is auto-edited to
|
|
||||||
//! reflect the change. If a box is renamed to match an existing box, the
|
|
||||||
//! notes in both boxes are merged.
|
|
||||||
//!
|
|
||||||
//! Note-to-note relationships form trees, and are kept in a SQL database of
|
|
||||||
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
|
|
||||||
//! `position` is a monotonic index on the parent (that is, every pair
|
|
||||||
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
|
|
||||||
//! an enum and can specify that the relationship is *original*,
|
|
||||||
//! *embedding*, or *referencing*. An embedded or referenced note may be
|
|
||||||
//! read/write or read-only with respect to the original, but there is only
|
|
||||||
//! one original note at any time.
|
|
||||||
//!
|
|
||||||
//! Note-to-box relationships form a graph, and are kept in the SQL database
|
|
||||||
//! as a collection of *edges* from the note to the box (and naturally
|
|
||||||
//! vice-versa).
|
|
||||||
//!
|
|
||||||
//! - Decision: When an original note is deleted, do all references and
|
|
||||||
//! embeddings also get deleted, or is the oldest one elevated to be a new
|
|
||||||
//! "original"? Or is that something the user may choose?
|
|
||||||
//!
|
|
||||||
//! - Decision: Should the merging issue be handled at this layer, or would
|
|
||||||
//! it make sense to move this to a higher layer, and only provide the
|
|
||||||
//! hooks for it here?
|
|
||||||
//!
|
|
||||||
|
|
||||||
mod references;
|
|
||||||
use references::{build_page_titles, find_links};
|
|
||||||
|
|
||||||
pub(crate) fn build_references(content: &str) -> Vec<String> {
|
|
||||||
build_page_titles(&find_links(content))
|
|
||||||
}
|
|
|
@ -4,7 +4,7 @@ use lazy_static::lazy_static;
|
||||||
use regex::bytes::Regex as BytesRegex;
|
use regex::bytes::Regex as BytesRegex;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
|
||||||
struct Finder(pub Vec<String>);
|
pub struct Finder(pub Vec<String>);
|
||||||
|
|
||||||
impl Finder {
|
impl Finder {
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
|
@ -24,7 +24,7 @@ impl Finder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn find_links(document: &str) -> Vec<String> {
|
fn find_links(document: &str) -> Vec<String> {
|
||||||
let arena = Arena::new();
|
let arena = Arena::new();
|
||||||
let mut finder = Finder::new();
|
let mut finder = Finder::new();
|
||||||
let root = parse_document(&arena, document, &ComrakOptions::default());
|
let root = parse_document(&arena, document, &ComrakOptions::default());
|
||||||
|
@ -50,48 +50,25 @@ pub(super) fn find_links(document: &str) -> Vec<String> {
|
||||||
finder.0
|
finder.0
|
||||||
}
|
}
|
||||||
|
|
||||||
// This function is for the camel and snake case handers.
|
|
||||||
fn recase(title: &str) -> String {
|
fn recase(title: &str) -> String {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
// Take every word that has a pattern of a capital letter
|
|
||||||
// followed by a lower case, and put a space between the
|
|
||||||
// capital and anything that preceeds it.
|
|
||||||
|
|
||||||
// TODO: Make Unicode aware.
|
|
||||||
static ref RE_PASS1: Regex = Regex::new(r"(?P<s>.)(?P<n>[A-Z][a-z]+)").unwrap();
|
static ref RE_PASS1: Regex = Regex::new(r"(?P<s>.)(?P<n>[A-Z][a-z]+)").unwrap();
|
||||||
|
|
||||||
// Take every instance of a lower case letter or number,
|
|
||||||
// followed by a capital letter, and put a space between them.
|
|
||||||
|
|
||||||
// TODO: Make Unicode aware. [[:lower:]] is an ASCII-ism.
|
|
||||||
static ref RE_PASS2: Regex = Regex::new(r"(?P<s>[[:lower:]]|\d)(?P<n>[[:upper:]])").unwrap();
|
static ref RE_PASS2: Regex = Regex::new(r"(?P<s>[[:lower:]]|\d)(?P<n>[[:upper:]])").unwrap();
|
||||||
|
static ref RE_PASS4: Regex = Regex::new(r"(?P<s>[a-z])(?P<n>\d)").unwrap();
|
||||||
// Take every instance of a word suffixed by a number and put
|
|
||||||
// a space between them.
|
|
||||||
|
|
||||||
// TODO: Make Unicode aware. [[:lower:]] is an ASCII-ism.
|
|
||||||
static ref RE_PASS4: Regex = Regex::new(r"(?P<s>[[:lower:]])(?P<n>\d)").unwrap();
|
|
||||||
|
|
||||||
// Take every instance of the one-or-more-of the symbols listed, and
|
|
||||||
// replace them with a space. This function is Unicode-irrelevant,
|
|
||||||
// although there is a list of symbols in the backreference parser
|
|
||||||
// that may disagree.
|
|
||||||
|
|
||||||
// TODO: Examime backreference parser and determine if this is
|
|
||||||
// sufficient.
|
|
||||||
static ref RE_PASS3: Regex = Regex::new(r"(:|_|-| )+").unwrap();
|
static ref RE_PASS3: Regex = Regex::new(r"(:|_|-| )+").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
// This should panic if misused, so... :-)
|
// This should panic if misused, so... :-)
|
||||||
let pass = title.to_string();
|
let pass = title.to_string();
|
||||||
let pass = pass.strip_prefix("#").unwrap();
|
let pass = pass.strip_prefix("#").unwrap();
|
||||||
|
|
||||||
let pass = RE_PASS1.replace_all(&pass, "$s $n");
|
let pass = RE_PASS1.replace_all(&pass, "$s $n");
|
||||||
let pass = RE_PASS4.replace_all(&pass, "$s $n");
|
let pass = RE_PASS4.replace_all(&pass, "$s $n");
|
||||||
let pass = RE_PASS2.replace_all(&pass, "$s $n");
|
let pass = RE_PASS2.replace_all(&pass, "$s $n");
|
||||||
RE_PASS3.replace_all(&pass, " ").trim().to_string()
|
RE_PASS3.replace_all(&pass, " ").trim().to_string()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn build_page_titles(references: &[String]) -> Vec<String> {
|
fn build_page_titles(references: &[String]) -> Vec<String> {
|
||||||
references
|
references
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|s| match s.chars().next() {
|
.filter_map(|s| match s.chars().next() {
|
||||||
|
@ -104,6 +81,10 @@ pub(super) fn build_page_titles(references: &[String]) -> Vec<String> {
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(crate) fn build_references(content: &str) -> Vec<String> {
|
||||||
|
build_page_titles(&find_links(content))
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
|
@ -1,254 +0,0 @@
|
||||||
// This Source Code Form is subject to the terms of the Mozilla Public
|
|
||||||
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
||||||
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
|
||||||
|
|
||||||
//! # Storage layer for Notesmachine
|
|
||||||
//!
|
|
||||||
//! This library implements the core functionality of Notesmachine and
|
|
||||||
//! describes that functionality to a storage layer. There's a bit of
|
|
||||||
//! intermingling in here which can't be helped, although it may make
|
|
||||||
//! sense in the future to separate the decomposition of the note
|
|
||||||
//! content into a higher layer.
|
|
||||||
//!
|
|
||||||
//! Notesmachine storage notes consist of two items: Note and Page.
|
|
||||||
//! This distinction is somewhat arbitrary, as structurally these two
|
|
||||||
//! items are stored in the same table.
|
|
||||||
//!
|
|
||||||
//! - Boxes have titles (and date metadata)
|
|
||||||
//! - Notes have content and a type (and date metadata)
|
|
||||||
//! - Notes are stored in boxes
|
|
||||||
//! - Notes are positioned with respect to other notes.
|
|
||||||
//! - There are two positions:
|
|
||||||
//! - Siblings, creating lists
|
|
||||||
//! - Children, creating trees like this one
|
|
||||||
//! - Notes may have references (pointers) to other boxes
|
|
||||||
//! - Notes may be moved around
|
|
||||||
//! - Notes may be deleted
|
|
||||||
//! - Boxes may be deleted
|
|
||||||
//! - When a box is renamed, every reference to that box is auto-edited to
|
|
||||||
//! reflect the change. If a box is renamed to match an existing box, the
|
|
||||||
//! notes in both boxes are merged.
|
|
||||||
//!
|
|
||||||
//! Note-to-note relationships form trees, and are kept in a SQL database of
|
|
||||||
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
|
|
||||||
//! `position` is a monotonic index on the parent (that is, every pair
|
|
||||||
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
|
|
||||||
//! an enum and can specify that the relationship is *original*,
|
|
||||||
//! *embedding*, or *referencing*. An embedded or referenced note may be
|
|
||||||
//! read/write or read-only with respect to the original, but there is only
|
|
||||||
//! one original note at any time.
|
|
||||||
//!
|
|
||||||
//! Note-to-box relationships form a graph, and are kept in the SQL database
|
|
||||||
//! as a collection of *edges* from the note to the box (and naturally
|
|
||||||
//! vice-versa).
|
|
||||||
//!
|
|
||||||
//! - Decision: When an original note is deleted, do all references and
|
|
||||||
//! embeddings also get deleted, or is the oldest one elevated to be a new
|
|
||||||
//! "original"? Or is that something the user may choose?
|
|
||||||
//!
|
|
||||||
//! - Decision: Should the merging issue be handled at this layer, or would
|
|
||||||
//! it make sense to move this to a higher layer, and only provide the
|
|
||||||
//! hooks for it here?
|
|
||||||
//!
|
|
||||||
#![allow(clippy::len_zero)]
|
|
||||||
use crate::errors::NoteStoreError;
|
|
||||||
use crate::parser::build_references;
|
|
||||||
use crate::store::private::*;
|
|
||||||
use crate::structs::*;
|
|
||||||
use sqlx::sqlite::SqlitePool;
|
|
||||||
use std::sync::Arc;
|
|
||||||
|
|
||||||
/// A handle to our Sqlite database.
|
|
||||||
#[derive(Clone, Debug)]
|
|
||||||
pub struct NoteStore(Arc<SqlitePool>);
|
|
||||||
|
|
||||||
pub type NoteResult<T> = core::result::Result<T, NoteStoreError>;
|
|
||||||
|
|
||||||
// After wrestling for a while with the fact that 'box' is a reserved
|
|
||||||
// word in Rust, I decided to just go with Note (note) and Page
|
|
||||||
// (box).
|
|
||||||
|
|
||||||
impl NoteStore {
|
|
||||||
/// Initializes a new instance of the note store. Note that the
|
|
||||||
/// note store holds an Arc internally; this code is (I think)
|
|
||||||
/// safe to Send.
|
|
||||||
pub async fn new(url: &str) -> NoteResult<Self> {
|
|
||||||
let pool = SqlitePool::connect(url).await?;
|
|
||||||
Ok(NoteStore(Arc::new(pool)))
|
|
||||||
}
|
|
||||||
/// Erase all the data in the database and restore it
|
|
||||||
/// to its original empty form. Do not use unless you
|
|
||||||
/// really, really want that to happen.
|
|
||||||
pub async fn reset_database(&self) -> NoteResult<()> {
|
|
||||||
reset_database(&*self.0).await.map_err(NoteStoreError::DBError)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch page by slug
|
|
||||||
///
|
|
||||||
/// Supports the use case of the user navigating to a known place
|
|
||||||
/// via a bookmark or other URL. Since the title isn't clear from
|
|
||||||
/// the slug, the slug is insufficient to generate a new page, so
|
|
||||||
/// this use case says that in the event of a failure to find the
|
|
||||||
/// requested page, return a basic NotFound.
|
|
||||||
pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
|
|
||||||
let page = select_page_by_slug(&*self.0, slug).await?;
|
|
||||||
if page.is_empty() {
|
|
||||||
return Err(NoteStoreError::NotFound);
|
|
||||||
}
|
|
||||||
|
|
||||||
let note_id = &page[0].id;
|
|
||||||
let backreferences = select_backreferences_for_page(&*self.0, ¬e_id).await?;
|
|
||||||
Ok((page, backreferences))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Fetch page by title
|
|
||||||
///
|
|
||||||
/// The most common use case: the user is navigating by requesting
|
|
||||||
/// a page. The page either exists or it doesn't. If it
|
|
||||||
/// doesn't, we go out and make it. Since we know it doesn't exist,
|
|
||||||
/// we also know no backreferences to it exist, so in that case you
|
|
||||||
/// get back two empty vecs.
|
|
||||||
pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
|
|
||||||
if title.is_empty() {
|
|
||||||
return Err(NoteStoreError::NotFound);
|
|
||||||
}
|
|
||||||
|
|
||||||
let page = select_page_by_title(&*self.0, title).await?;
|
|
||||||
if page.len() > 0 {
|
|
||||||
let note_id = &page[0].id;
|
|
||||||
let backreferences = select_backreferences_for_page(&*self.0, ¬e_id).await?;
|
|
||||||
return Ok((page, backreferences));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sanity check!
|
|
||||||
let references = build_references(&title);
|
|
||||||
if references.len() > 0 {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Titles may not contain nested references.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
let slug = generate_slug(&mut tx, title).await?;
|
|
||||||
let page = create_page(&title, &slug);
|
|
||||||
insert_note(&mut tx, &page).await?;
|
|
||||||
tx.commit().await?;
|
|
||||||
|
|
||||||
Ok((vec![Note::from(page)], vec![]))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn add_note(&self, note: &NewNote, parent_id: &str, location: Option<i64>) -> NoteResult<String> {
|
|
||||||
let kind = RelationshipKind::Direct;
|
|
||||||
let new_id = self.insert_note(note, parent_id, location, kind).await?;
|
|
||||||
Ok(new_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Move a note from one location to another.
|
|
||||||
pub async fn move_note(
|
|
||||||
&self,
|
|
||||||
note_id: &str,
|
|
||||||
old_parent_id: &str,
|
|
||||||
new_parent_id: &str,
|
|
||||||
new_location: i64,
|
|
||||||
) -> NoteResult<()> {
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
|
|
||||||
let old_note = select_note_to_note_relationship(&mut tx, &old_parent_id, ¬e_id).await?;
|
|
||||||
let old_note_location = old_note.location;
|
|
||||||
let old_note_kind = old_note.kind;
|
|
||||||
|
|
||||||
delete_note_to_note_relationship(&mut tx, &old_parent_id, ¬e_id).await?;
|
|
||||||
close_hole_for_deleted_note_relationship(&mut tx, &old_parent_id, old_note_location).await?;
|
|
||||||
let new_location = determine_max_child_location_for_note(&mut tx, &new_parent_id, Some(new_location)).await?;
|
|
||||||
make_room_for_new_note_relationship(&mut tx, &new_parent_id, new_location).await?;
|
|
||||||
insert_note_to_note_relationship(&mut tx, &new_parent_id, ¬e_id, new_location, &old_note_kind).await?;
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Updates a note's content. Completely rebuilds the note's
|
|
||||||
/// outgoing edge reference list every time.
|
|
||||||
pub async fn update_note_content(&self, note_id: &str, content: &str) -> NoteResult<()> {
|
|
||||||
let references = build_references(&content);
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
update_note_content(&mut tx, ¬e_id, &content).await?;
|
|
||||||
delete_bulk_note_to_page_relationships(&mut tx, ¬e_id).await?;
|
|
||||||
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
|
|
||||||
insert_bulk_note_to_page_relationships(&mut tx, ¬e_id, &known_reference_ids).await?;
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Deletes a note. If the note's relationship drops to zero, all
|
|
||||||
/// references from that note to pages are also deleted.
|
|
||||||
pub async fn delete_note(&self, note_id: &str, note_parent_id: &str) -> NoteResult<()> {
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
|
|
||||||
let note_id = note_id.to_string();
|
|
||||||
let parent_id = note_parent_id.to_string();
|
|
||||||
|
|
||||||
if parent_id != note_id {
|
|
||||||
delete_note_to_note_relationship(&mut tx, &parent_id, ¬e_id).await?;
|
|
||||||
}
|
|
||||||
// The big one: if zero parents report having an interest in this note, then it,
|
|
||||||
// *and any sub-relationships*, go away.
|
|
||||||
if count_existing_note_relationships(&mut tx, ¬e_id).await? == 0 {
|
|
||||||
delete_note_to_page_relationships(&mut tx, ¬e_id).await?;
|
|
||||||
delete_note(&mut tx, ¬e_id).await?;
|
|
||||||
}
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The Private stuff
|
|
||||||
|
|
||||||
impl NoteStore {
|
|
||||||
// Pretty much the most dangerous function in our system. Has to
|
|
||||||
// have ALL the error checking.
|
|
||||||
async fn insert_note(
|
|
||||||
&self,
|
|
||||||
note: &NewNote,
|
|
||||||
parent_id: &str,
|
|
||||||
location: Option<i64>,
|
|
||||||
kind: RelationshipKind,
|
|
||||||
) -> NoteResult<String> {
|
|
||||||
if let Some(location) = location {
|
|
||||||
if location < 0 {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Add note: A negative location is not valid.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if parent_id.is_empty() {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Add note: A parent note ID is required.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if note.id.is_empty() {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Add note: Your note should have an id already".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
if note.content.is_empty() {
|
|
||||||
return Err(NoteStoreError::InvalidNoteStructure(
|
|
||||||
"Add note: Empty notes are not supported.".to_string(),
|
|
||||||
));
|
|
||||||
}
|
|
||||||
|
|
||||||
let references = build_references(¬e.content);
|
|
||||||
|
|
||||||
let mut tx = self.0.begin().await?;
|
|
||||||
let location = determine_max_child_location_for_note(&mut tx, parent_id, location).await?;
|
|
||||||
insert_note(&mut tx, ¬e).await?;
|
|
||||||
make_room_for_new_note_relationship(&mut tx, &parent_id, location).await?;
|
|
||||||
insert_note_to_note_relationship(&mut tx, &parent_id, ¬e.id, location, &kind).await?;
|
|
||||||
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
|
|
||||||
insert_bulk_note_to_page_relationships(&mut tx, ¬e.id, &known_reference_ids).await?;
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(note.id.to_string())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -51,8 +51,8 @@
|
||||||
//! hooks for it here?
|
//! hooks for it here?
|
||||||
//!
|
//!
|
||||||
|
|
||||||
mod api;
|
|
||||||
mod private;
|
mod private;
|
||||||
|
mod store;
|
||||||
|
|
||||||
pub use crate::store::api::NoteResult;
|
pub use crate::store::store::NoteResult;
|
||||||
pub use crate::store::api::NoteStore;
|
pub use crate::store::store::NoteStore;
|
||||||
|
|
|
@ -3,8 +3,8 @@ use lazy_static::lazy_static;
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use slug::slugify;
|
use slug::slugify;
|
||||||
use sqlx::{sqlite::Sqlite, Acquire, Done, Executor, Transaction};
|
use sqlx::{sqlite::Sqlite, Acquire, Done, Executor, Transaction};
|
||||||
use std::cmp;
|
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
use std::cmp;
|
||||||
|
|
||||||
type SqlResult<T> = sqlx::Result<T>;
|
type SqlResult<T> = sqlx::Result<T>;
|
||||||
|
|
||||||
|
@ -20,24 +20,25 @@ type SqlResult<T> = sqlx::Result<T>;
|
||||||
// of the SQL queries.
|
// of the SQL queries.
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref SELECT_PAGE_BY_TITLE_SQL: String = str::replace(
|
static ref SELECT_PAGE_BY_TITLE_SQL: String = str::replace(
|
||||||
include_str!("sql/select_notes_by_parameter.sql"),
|
include_str!("sql/select_notes_by_parameter.sql"),
|
||||||
"QUERYPARAMETER",
|
"QUERYPARAMETER",
|
||||||
"notes.content"
|
"notes.content"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref SELECT_PAGE_BY_ID_SQL: String = str::replace(
|
static ref SELECT_PAGE_BY_ID_SQL: String = str::replace(
|
||||||
include_str!("sql/select_notes_by_parameter.sql"),
|
include_str!("sql/select_notes_by_parameter.sql"),
|
||||||
"QUERYPARAMETER",
|
"QUERYPARAMETER",
|
||||||
"notes.id"
|
"notes.id"
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref SELECT_NOTES_BACKREFERENCING_PAGE_SQL: &'static str =
|
static ref SELECT_NOTES_BACKREFERENCING_PAGE_SQL: &'static str =
|
||||||
include_str!("sql/select_notes_backreferencing_page.sql");
|
include_str!("sql/select_notes_backreferencing_page.sql");
|
||||||
}
|
}
|
||||||
|
|
||||||
// ___ _
|
// ___ _
|
||||||
|
@ -48,10 +49,13 @@ lazy_static! {
|
||||||
|
|
||||||
pub(crate) async fn reset_database<'a, E>(executor: E) -> SqlResult<()>
|
pub(crate) async fn reset_database<'a, E>(executor: E) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let initialize_sql = include_str!("sql/initialize_database.sql");
|
let initialize_sql = include_str!("sql/initialize_database.sql");
|
||||||
sqlx::query(initialize_sql).execute(executor).await.map(|_| ())
|
sqlx::query(initialize_sql)
|
||||||
|
.execute(executor)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
// ___ _ _ _ __ _
|
// ___ _ _ _ __ _
|
||||||
|
@ -64,12 +68,16 @@ where
|
||||||
// SQL operations are quite different between the first two and the last.
|
// SQL operations are quite different between the first two and the last.
|
||||||
async fn select_object_by_query<'a, E>(executor: E, query: &str, field: &str) -> SqlResult<Vec<Note>>
|
async fn select_object_by_query<'a, E>(executor: E, query: &str, field: &str) -> SqlResult<Vec<Note>>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let r: Vec<RowNote> = sqlx::query_as(query).bind(field).fetch_all(executor).await?;
|
let r: Vec<RowNote> = sqlx::query_as(query)
|
||||||
Ok(r.into_iter().map(Note::from).collect())
|
.bind(field)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(r.into_iter().map(|z| Note::from(z)).collect())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Select the requested page via its id. This is fairly rare;
|
// Select the requested page via its id. This is fairly rare;
|
||||||
// pages should usually be picked up via their title, but if you're
|
// pages should usually be picked up via their title, but if you're
|
||||||
// navigating to an instance, this is how you specify the page in a
|
// navigating to an instance, this is how you specify the page in a
|
||||||
|
@ -81,9 +89,9 @@ where
|
||||||
// page.
|
// page.
|
||||||
pub(crate) async fn select_page_by_slug<'a, E>(executor: E, slug: &str) -> SqlResult<Vec<Note>>
|
pub(crate) async fn select_page_by_slug<'a, E>(executor: E, slug: &str) -> SqlResult<Vec<Note>>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
select_object_by_query(executor, &SELECT_PAGE_BY_ID_SQL, &slug).await
|
select_object_by_query(executor, &SELECT_PAGE_BY_ID_SQL, &slug).await
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch the page by title. The return value is an array of Note
|
// Fetch the page by title. The return value is an array of Note
|
||||||
|
@ -91,20 +99,23 @@ where
|
||||||
// these into a tree-like object.
|
// these into a tree-like object.
|
||||||
pub(crate) async fn select_page_by_title<'a, E>(executor: E, title: &str) -> SqlResult<Vec<Note>>
|
pub(crate) async fn select_page_by_title<'a, E>(executor: E, title: &str) -> SqlResult<Vec<Note>>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
select_object_by_query(executor, &SELECT_PAGE_BY_TITLE_SQL, &title).await
|
select_object_by_query(executor, &SELECT_PAGE_BY_TITLE_SQL, &title).await
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fetch all backreferences to a page. The return value is an array
|
// Fetch all backreferences to a page. The return value is an array
|
||||||
// of arrays, and inside each array is a list from a root page to
|
// of arrays, and inside each array is a list from a root page to
|
||||||
// the note that references the give page. Clients may choose how
|
// the note that references the give page. Clients may choose how
|
||||||
// they want to display that collection.
|
// they want to display that collection.
|
||||||
pub(crate) async fn select_backreferences_for_page<'a, E>(executor: E, page_id: &str) -> SqlResult<Vec<Note>>
|
pub(crate) async fn select_backreferences_for_page<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
page_id: &str,
|
||||||
|
) -> SqlResult<Vec<Note>>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
select_object_by_query(executor, &SELECT_NOTES_BACKREFERENCING_PAGE_SQL, &page_id).await
|
select_object_by_query(executor, &SELECT_NOTES_BACKREFERENCING_PAGE_SQL, &page_id).await
|
||||||
}
|
}
|
||||||
|
|
||||||
// ___ _ ___ _ _ _
|
// ___ _ ___ _ _ _
|
||||||
|
@ -116,52 +127,53 @@ where
|
||||||
// Inserts a single note into the notes table. That is all.
|
// Inserts a single note into the notes table. That is all.
|
||||||
pub(crate) async fn insert_note<'a, E>(executor: E, note: &NewNote) -> SqlResult<String>
|
pub(crate) async fn insert_note<'a, E>(executor: E, note: &NewNote) -> SqlResult<String>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let insert_one_note_sql = concat!(
|
let insert_one_note_sql = concat!(
|
||||||
"INSERT INTO notes (id, content, kind, ",
|
"INSERT INTO notes (id, content, kind, ",
|
||||||
" creation_date, updated_date, lastview_date) ",
|
" creation_date, updated_date, lastview_date) ",
|
||||||
"VALUES (?, ?, ?, ?, ?, ?);"
|
"VALUES (?, ?, ?, ?, ?, ?);"
|
||||||
);
|
);
|
||||||
|
|
||||||
sqlx::query(insert_one_note_sql)
|
let _ = sqlx::query(insert_one_note_sql)
|
||||||
.bind(¬e.id)
|
.bind(¬e.id)
|
||||||
.bind(¬e.content)
|
.bind(¬e.content)
|
||||||
.bind(note.kind.to_string())
|
.bind(note.kind.to_string())
|
||||||
.bind(¬e.creation_date)
|
.bind(¬e.creation_date)
|
||||||
.bind(¬e.updated_date)
|
.bind(¬e.updated_date)
|
||||||
.bind(¬e.lastview_date)
|
.bind(¬e.lastview_date)
|
||||||
.execute(executor)
|
.execute(executor)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(note.id.clone())
|
Ok(note.id.clone())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Inserts a single note into the notes table. That is all.
|
// Inserts a single note into the notes table. That is all.
|
||||||
pub(crate) async fn insert_bulk_notes<'a, E>(executor: E, notes: &[NewNote]) -> SqlResult<()>
|
pub(crate) async fn bulk_insert_notes<'a, E>(executor: E, notes: &[NewNote]) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
if notes.is_empty() {
|
if notes.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let insert_pattern = "VALUES (?, ?, ?, ?, ?, ?)".to_string();
|
||||||
|
let insert_bulk_notes_sql =
|
||||||
|
"INSERT INTO notes (id, content, kind, creation_date, updated_date, lastview_date) ".to_string()
|
||||||
|
+ &[insert_pattern.as_str()]
|
||||||
|
.repeat(notes.len())
|
||||||
|
.join(", ") + &";".to_string();
|
||||||
|
|
||||||
let insert_pattern = "VALUES (?, ?, ?, ?, ?, ?)".to_string();
|
let mut request = sqlx::query(&insert_bulk_notes_sql);
|
||||||
let insert_bulk_notes_sql = "INSERT INTO notes (id, content, kind, creation_date, updated_date, lastview_date) "
|
for note in notes {
|
||||||
.to_string()
|
request = request
|
||||||
+ &[insert_pattern.as_str()].repeat(notes.len()).join(", ")
|
.bind(¬e.id)
|
||||||
+ &";".to_string();
|
.bind(¬e.content)
|
||||||
|
.bind(note.kind.to_string())
|
||||||
let mut request = sqlx::query(&insert_bulk_notes_sql);
|
.bind(¬e.creation_date)
|
||||||
for note in notes {
|
.bind(¬e.updated_date)
|
||||||
request = request
|
.bind(¬e.lastview_date);
|
||||||
.bind(¬e.id)
|
}
|
||||||
.bind(¬e.content)
|
request.execute(executor).await.map(|_| ())
|
||||||
.bind(note.kind.to_string())
|
|
||||||
.bind(¬e.creation_date)
|
|
||||||
.bind(¬e.updated_date)
|
|
||||||
.bind(¬e.lastview_date);
|
|
||||||
}
|
|
||||||
request.execute(executor).await.map(|_| ())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// ___ _ _ _ _ __ _
|
// ___ _ _ _ _ __ _
|
||||||
|
@ -173,21 +185,21 @@ where
|
||||||
// Given a possible slug, find the slug with the highest
|
// Given a possible slug, find the slug with the highest
|
||||||
// uniquification number, and return that number, if any.
|
// uniquification number, and return that number, if any.
|
||||||
pub(crate) fn find_maximal_slug_number(slugs: &[JustId]) -> Option<u32> {
|
pub(crate) fn find_maximal_slug_number(slugs: &[JustId]) -> Option<u32> {
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap();
|
static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
if slugs.is_empty() {
|
if slugs.is_empty() {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut slug_counters: Vec<u32> = slugs
|
let mut slug_counters: Vec<u32> = slugs
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|slug| RE_CAP_NUM.captures(&slug.id))
|
.filter_map(|slug| RE_CAP_NUM.captures(&slug.id))
|
||||||
.map(|cap| cap.get(1).unwrap().as_str().parse::<u32>().unwrap())
|
.map(|cap| cap.get(1).unwrap().as_str().parse::<u32>().unwrap())
|
||||||
.collect();
|
.collect();
|
||||||
slug_counters.sort_unstable();
|
slug_counters.sort_unstable();
|
||||||
slug_counters.pop()
|
slug_counters.pop()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Given an initial string and an existing collection of slugs,
|
// Given an initial string and an existing collection of slugs,
|
||||||
|
@ -196,38 +208,38 @@ pub(crate) fn find_maximal_slug_number(slugs: &[JustId]) -> Option<u32> {
|
||||||
// isn't all that.
|
// isn't all that.
|
||||||
pub(crate) async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult<String>
|
pub(crate) async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult<String>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap();
|
static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap();
|
||||||
static ref SLUG_FINDER_SQL: String = format!(
|
static ref SLUG_FINDER_SQL: String = format!(
|
||||||
"SELECT id FROM notes WHERE kind = '{}' AND id LIKE '?%';",
|
"SELECT id FROM notes WHERE kind = '{}' AND id LIKE '?%';",
|
||||||
NoteKind::Page.to_string()
|
NoteKind::Page.to_string()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let initial_slug = slugify(title);
|
let initial_slug = slugify(title);
|
||||||
let sample_slug = RE_STRIP_NUM.replace_all(&initial_slug, "");
|
let sample_slug = RE_STRIP_NUM.replace_all(&initial_slug, "");
|
||||||
let similar_slugs: Vec<JustId> = sqlx::query_as(&SLUG_FINDER_SQL)
|
let similar_slugs: Vec<JustId> = sqlx::query_as(&SLUG_FINDER_SQL)
|
||||||
.bind(&*sample_slug)
|
.bind(&*sample_slug)
|
||||||
.fetch_all(executor)
|
.fetch_all(executor)
|
||||||
.await?;
|
.await?;
|
||||||
let maximal_slug_number = find_maximal_slug_number(&similar_slugs);
|
let maximal_slug_number = find_maximal_slug_number(&similar_slugs);
|
||||||
Ok(match maximal_slug_number {
|
Ok(match maximal_slug_number {
|
||||||
None => initial_slug,
|
None => initial_slug,
|
||||||
Some(slug_number) => format!("{}-{}", initial_slug, slug_number + 1),
|
Some(slug_number) => format!("{}-{}", initial_slug, slug_number + 1),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// A helper function: given a title and a slug, create a PageType
|
// A helper function: given a title and a slug, create a PageType
|
||||||
// note.
|
// note.
|
||||||
pub(crate) fn create_page(title: &str, slug: &str) -> NewNote {
|
pub(crate) fn create_page(title: &str, slug: &str) -> NewNote {
|
||||||
NewNoteBuilder::default()
|
NewNoteBuilder::default()
|
||||||
.id(slug.to_string())
|
.id(slug.to_string())
|
||||||
.content(title.to_string())
|
.content(title.to_string())
|
||||||
.kind(NoteKind::Page)
|
.kind(NoteKind::Page)
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
// _ _ _ _ ___ _ _ _
|
// _ _ _ _ ___ _ _ _
|
||||||
|
@ -236,22 +248,26 @@ pub(crate) fn create_page(title: &str, slug: &str) -> NewNote {
|
||||||
// \___/| .__/\__,_\__,_|\__\___| \___/|_||_\___| |_|\_\___/\__\___|
|
// \___/| .__/\__,_\__,_|\__\___| \___/|_||_\___| |_|\_\___/\__\___|
|
||||||
// |_|
|
// |_|
|
||||||
|
|
||||||
pub(crate) async fn update_note_content<'a, E>(executor: E, note_id: &str, content: &str) -> SqlResult<()>
|
pub(crate) async fn update_note_content<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
note_id: &NoteId,
|
||||||
|
content: &str,
|
||||||
|
) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?";
|
let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?";
|
||||||
let count = sqlx::query(update_note_content_sql)
|
let count = sqlx::query(update_note_content_sql)
|
||||||
.bind(content)
|
.bind(content)
|
||||||
.bind(note_id)
|
.bind(&**note_id)
|
||||||
.execute(executor)
|
.execute(executor)
|
||||||
.await?
|
.await?
|
||||||
.rows_affected();
|
.rows_affected();
|
||||||
|
|
||||||
match count {
|
match count {
|
||||||
1 => Ok(()),
|
1 => Ok(()),
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ___ _ _ ___ _ _ _ ___ _ _ _ _ _
|
// ___ _ _ ___ _ _ _ ___ _ _ _ _ _
|
||||||
|
@ -261,25 +277,25 @@ where
|
||||||
// |_|
|
// |_|
|
||||||
|
|
||||||
pub(crate) async fn select_note_to_note_relationship<'a, E>(
|
pub(crate) async fn select_note_to_note_relationship<'a, E>(
|
||||||
executor: E,
|
executor: E,
|
||||||
parent_id: &str,
|
parent_id: &str,
|
||||||
note_id: &str,
|
note_id: &str,
|
||||||
) -> SqlResult<NoteRelationship>
|
) -> SqlResult<NoteRelationship>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let get_note_to_note_relationship_sql = concat!(
|
let get_note_to_note_relationship_sql = concat!(
|
||||||
"SELECT parent_id, note_id, location, kind ",
|
"SELECT parent_id, note_id, location, kind ",
|
||||||
"FROM note_relationships ",
|
"FROM note_relationships ",
|
||||||
"WHERE parent_id = ? and note_id = ? ",
|
"WHERE parent_id = ? and note_id = ? ",
|
||||||
"LIMIT 1"
|
"LIMIT 1"
|
||||||
);
|
);
|
||||||
let s: NoteRelationshipRow = sqlx::query_as(get_note_to_note_relationship_sql)
|
let s: NoteRelationshipRow = sqlx::query_as(get_note_to_note_relationship_sql)
|
||||||
.bind(parent_id)
|
.bind(parent_id)
|
||||||
.bind(note_id)
|
.bind(note_id)
|
||||||
.fetch_one(executor)
|
.fetch_one(executor)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(NoteRelationship::from(s))
|
Ok(NoteRelationship::from(s))
|
||||||
}
|
}
|
||||||
|
|
||||||
// _ _ _ _ _ _ _ ___ _ _ _ _ _
|
// _ _ _ _ _ _ _ ___ _ _ _ _ _
|
||||||
|
@ -289,80 +305,83 @@ where
|
||||||
// |_|
|
// |_|
|
||||||
|
|
||||||
pub(crate) async fn insert_note_to_note_relationship<'a, E>(
|
pub(crate) async fn insert_note_to_note_relationship<'a, E>(
|
||||||
executor: E,
|
executor: E,
|
||||||
parent_id: &str,
|
parent_id: &str,
|
||||||
note_id: &str,
|
note_id: &str,
|
||||||
location: i64,
|
location: i64,
|
||||||
kind: &RelationshipKind,
|
kind: &RelationshipKind,
|
||||||
) -> SqlResult<()>
|
) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let insert_note_to_note_relationship_sql = concat!(
|
let insert_note_to_note_relationship_sql = concat!(
|
||||||
"INSERT INTO note_relationships (parent_id, note_id, location, kind) ",
|
"INSERT INTO note_relationships (parent_id, note_id, location, kind) ",
|
||||||
"values (?, ?, ?, ?)"
|
"values (?, ?, ?, ?)"
|
||||||
);
|
);
|
||||||
|
|
||||||
sqlx::query(insert_note_to_note_relationship_sql)
|
let _ = sqlx::query(insert_note_to_note_relationship_sql)
|
||||||
.bind(parent_id)
|
.bind(parent_id)
|
||||||
.bind(note_id)
|
.bind(note_id)
|
||||||
.bind(&location)
|
.bind(&location)
|
||||||
.bind(kind.to_string())
|
.bind(kind.to_string())
|
||||||
.execute(executor)
|
.execute(executor)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn make_room_for_new_note_relationship<'a, E>(
|
pub(crate) async fn make_room_for_new_note<'a, E>(
|
||||||
executor: E,
|
executor: E,
|
||||||
parent_id: &str,
|
parent_id: &str,
|
||||||
location: i64,
|
location: i64,
|
||||||
) -> SqlResult<()>
|
) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let make_room_for_new_note_sql = concat!(
|
let make_room_for_new_note_sql = concat!(
|
||||||
"UPDATE note_relationships ",
|
"UPDATE note_relationships ",
|
||||||
"SET location = location + 1 ",
|
"SET location = location + 1 ",
|
||||||
"WHERE location >= ? and parent_id = ?;"
|
"WHERE location >= ? and parent_id = ?;"
|
||||||
);
|
);
|
||||||
|
|
||||||
sqlx::query(make_room_for_new_note_sql)
|
let _ = sqlx::query(make_room_for_new_note_sql)
|
||||||
.bind(&location)
|
.bind(&location)
|
||||||
.bind(parent_id)
|
.bind(parent_id)
|
||||||
.execute(executor)
|
.execute(executor)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn determine_max_child_location_for_note<'a, E>(
|
pub(crate) async fn determine_max_child_location_for_note<'a, E>(
|
||||||
executor: E,
|
executor: E,
|
||||||
note_id: &str,
|
note_id: &str,
|
||||||
comp_loc: Option<i64>,
|
comp_loc: Option<i64>,
|
||||||
) -> SqlResult<i64>
|
) -> SqlResult<i64>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let row_count = assert_max_child_location_for_note(executor, note_id).await? + 1;
|
let row_count = assert_max_child_location_for_note(executor, note_id).await? + 1;
|
||||||
Ok(match comp_loc {
|
Ok(match comp_loc {
|
||||||
Some(location) => cmp::min(row_count, location),
|
Some(location) => cmp::min(row_count, location),
|
||||||
None => row_count,
|
None => row_count
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn assert_max_child_location_for_note<'a, E>(executor: E, note_id: &str) -> SqlResult<i64>
|
pub(crate) async fn assert_max_child_location_for_note<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
note_id: &str,
|
||||||
|
) -> SqlResult<i64>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let assert_max_child_location_for_note_sql =
|
let assert_max_child_location_for_note_sql =
|
||||||
"SELECT MAX(location) AS count FROM note_relationships WHERE parent_id = ?;";
|
"SELECT MAX(location) AS count FROM note_relationships WHERE parent_id = ?;";
|
||||||
|
|
||||||
let count: RowCount = sqlx::query_as(assert_max_child_location_for_note_sql)
|
let count: RowCount = sqlx::query_as(assert_max_child_location_for_note_sql)
|
||||||
.bind(note_id)
|
.bind(note_id)
|
||||||
.fetch_one(executor)
|
.fetch_one(executor)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(count.count)
|
Ok(count.count)
|
||||||
}
|
}
|
||||||
|
|
||||||
// _ _ _ _ _ __ _ ___ _ _ _ _ _
|
// _ _ _ _ _ __ _ ___ _ _ _ _ _
|
||||||
|
@ -372,49 +391,57 @@ where
|
||||||
// |_|
|
// |_|
|
||||||
|
|
||||||
pub(crate) async fn insert_bulk_note_to_page_relationships<'a, E>(
|
pub(crate) async fn insert_bulk_note_to_page_relationships<'a, E>(
|
||||||
executor: E,
|
executor: E,
|
||||||
note_id: &str,
|
note_id: &str,
|
||||||
references: &[String],
|
references: &[String],
|
||||||
) -> SqlResult<()>
|
) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
if references.is_empty() {
|
if references.is_empty() {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let insert_pattern = format!("(?, ?, '{}')", PageRelationshipKind::Page.to_string());
|
let insert_pattern = format!("(?, ?, '{}')", PageRelationshipKind::Page.to_string());
|
||||||
let insert_note_page_references_sql = "INSERT INTO note_page_relationships (note_id, page_id, kind) VALUES "
|
let insert_note_page_references_sql =
|
||||||
.to_string()
|
"INSERT INTO note_page_relationships (note_id, page_id, kind) VALUES ".to_string()
|
||||||
+ &[insert_pattern.as_str()].repeat(references.len()).join(", ")
|
+ &[insert_pattern.as_str()]
|
||||||
+ &";".to_string();
|
.repeat(references.len())
|
||||||
|
.join(", ") + &";".to_string();
|
||||||
|
|
||||||
let mut request = sqlx::query(&insert_note_page_references_sql);
|
let mut request = sqlx::query(&insert_note_page_references_sql);
|
||||||
for reference in references {
|
for reference in references {
|
||||||
request = request.bind(note_id).bind(reference);
|
request = request.bind(note_id).bind(reference);
|
||||||
}
|
}
|
||||||
|
|
||||||
request.execute(executor).await.map(|_| ())
|
request.execute(executor).await.map(|_| ())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn delete_bulk_note_to_page_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<()>
|
pub(crate) async fn delete_bulk_note_to_page_relationships<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
note_id: &str,
|
||||||
|
) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let delete_note_to_page_relationship_sql = "DELETE FROM note_page_relationships WHERE and note_id = ?;";
|
let delete_note_to_page_relationship_sql =
|
||||||
sqlx::query(delete_note_to_page_relationship_sql)
|
"DELETE FROM note_page_relationships WHERE and note_id = ?;";
|
||||||
.bind(note_id)
|
let _ = sqlx::query(delete_note_to_page_relationship_sql)
|
||||||
.execute(executor)
|
.bind(note_id)
|
||||||
.await?;
|
.execute(executor)
|
||||||
Ok(())
|
.await?;
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// Given the references supplied, and the references found in the datastore,
|
// Given the references supplied, and the references found in the datastore,
|
||||||
// return a list of the references not found in the datastore.
|
// return a list of the references not found in the datastore.
|
||||||
pub(crate) fn diff_references(references: &[String], found_references: &[PageTitle]) -> Vec<String> {
|
pub(crate) fn diff_references(
|
||||||
let all: HashSet<String> = references.iter().cloned().collect();
|
references: &[String],
|
||||||
let found: HashSet<String> = found_references.iter().map(|r| r.content.clone()).collect();
|
found_references: &[PageTitle],
|
||||||
all.difference(&found).cloned().collect()
|
) -> Vec<String> {
|
||||||
|
let all: HashSet<String> = references.iter().cloned().collect();
|
||||||
|
let found: HashSet<String> = found_references.iter().map(|r| r.content.clone()).collect();
|
||||||
|
all.difference(&found).cloned().collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
// ___ _ _ _ _ __ _ ___ _ _ _ _ _
|
// ___ _ _ _ _ __ _ ___ _ _ _ _ _
|
||||||
|
@ -427,31 +454,32 @@ pub(crate) fn diff_references(references: &[String], found_references: &[PageTit
|
||||||
// list of titles. Used by insert_note and update_note_content to
|
// list of titles. Used by insert_note and update_note_content to
|
||||||
// find the ids of all the references in a given document.
|
// find the ids of all the references in a given document.
|
||||||
pub(crate) async fn find_all_page_from_list_of_references<'a, E>(
|
pub(crate) async fn find_all_page_from_list_of_references<'a, E>(
|
||||||
executor: E,
|
executor: E,
|
||||||
references: &[String],
|
references: &[String],
|
||||||
) -> SqlResult<Vec<PageTitle>>
|
) -> SqlResult<Vec<PageTitle>>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
if references.is_empty() {
|
if references.is_empty() {
|
||||||
return Ok(vec![]);
|
return Ok(vec![]);
|
||||||
}
|
}
|
||||||
|
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref SELECT_ALL_REFERENCES_FOR_SQL_BASE: String = format!(
|
static ref SELECT_ALL_REFERENCES_FOR_SQL_BASE: String = format!(
|
||||||
"SELECT id, content FROM notes WHERE kind = '{}' AND content IN (",
|
"SELECT id, content FROM notes WHERE kind = '{}' AND content IN (",
|
||||||
NoteKind::Page.to_string()
|
NoteKind::Page.to_string()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
let find_all_references_for_sql =
|
let find_all_references_for_sql = SELECT_ALL_REFERENCES_FOR_SQL_BASE.to_string()
|
||||||
SELECT_ALL_REFERENCES_FOR_SQL_BASE.to_string() + &["?"].repeat(references.len()).join(",") + &");".to_string();
|
+ &["?"].repeat(references.len()).join(",")
|
||||||
|
+ &");".to_string();
|
||||||
|
|
||||||
let mut request = sqlx::query_as(&find_all_references_for_sql);
|
let mut request = sqlx::query_as(&find_all_references_for_sql);
|
||||||
for id in references.iter() {
|
for id in references.iter() {
|
||||||
request = request.bind(id);
|
request = request.bind(id);
|
||||||
}
|
}
|
||||||
request.fetch_all(executor).await
|
request.fetch_all(executor).await
|
||||||
}
|
}
|
||||||
|
|
||||||
// ___ _ _
|
// ___ _ _
|
||||||
|
@ -461,116 +489,121 @@ where
|
||||||
//
|
//
|
||||||
|
|
||||||
pub(crate) async fn delete_note_to_note_relationship<'a, E>(
|
pub(crate) async fn delete_note_to_note_relationship<'a, E>(
|
||||||
executor: E,
|
executor: E,
|
||||||
parent_id: &str,
|
parent_id: &str,
|
||||||
note_id: &str,
|
note_id: &str,
|
||||||
) -> SqlResult<()>
|
) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let delete_note_to_note_relationship_sql = concat!(
|
let delete_note_to_note_relationship_sql = concat!(
|
||||||
"DELETE FROM note_relationships ",
|
"DELETE FROM note_relationships ",
|
||||||
"WHERE parent_id = ? and note_id = ? "
|
"WHERE parent_id = ? and note_id = ? "
|
||||||
);
|
);
|
||||||
|
|
||||||
let count = sqlx::query(delete_note_to_note_relationship_sql)
|
let count = sqlx::query(delete_note_to_note_relationship_sql)
|
||||||
.bind(parent_id)
|
.bind(parent_id)
|
||||||
.bind(note_id)
|
.bind(note_id)
|
||||||
.execute(executor)
|
.execute(executor)
|
||||||
.await?
|
.await?
|
||||||
.rows_affected();
|
.rows_affected();
|
||||||
|
|
||||||
match count {
|
match count {
|
||||||
1 => Ok(()),
|
1 => Ok(()),
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn delete_note_to_page_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<()>
|
pub(crate) async fn delete_note_to_page_relationships<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
note_id: &str,
|
||||||
|
) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
lazy_static! {
|
lazy_static! {
|
||||||
static ref DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL: String = format!(
|
static ref DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL: String = format!(
|
||||||
"DELETE FROM note_relationships WHERE kind in ('{}', '{}') AND parent_id = ?;",
|
"DELETE FROM note_relationships WHERE kind in ('{}', '{}') AND parent_id = ?;",
|
||||||
PageRelationshipKind::Page.to_string(),
|
PageRelationshipKind::Page.to_string(),
|
||||||
PageRelationshipKind::Unacked.to_string()
|
PageRelationshipKind::Unacked.to_string()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
sqlx::query(&DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL)
|
let _ = sqlx::query(&DELETE_NOTE_TO_PAGE_RELATIONSHIPS_SQL)
|
||||||
.bind(note_id)
|
.bind(note_id)
|
||||||
.execute(executor)
|
.execute(executor)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn delete_note<'a, E>(executor: E, note_id: &str) -> SqlResult<()>
|
pub(crate) async fn delete_note<'a, E>(executor: E, note_id: &str) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let delete_note_sql = "DELETE FROM notes WHERE note_id = ?";
|
let delete_note_sql = "DELETE FROM notes WHERE note_id = ?";
|
||||||
|
|
||||||
let count = sqlx::query(delete_note_sql)
|
let count = sqlx::query(delete_note_sql)
|
||||||
.bind(note_id)
|
.bind(note_id)
|
||||||
.execute(executor)
|
.execute(executor)
|
||||||
.await?
|
.await?
|
||||||
.rows_affected();
|
.rows_affected();
|
||||||
|
|
||||||
match count {
|
match count {
|
||||||
1 => Ok(()),
|
1 => Ok(()),
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// After removing a note, recalculate the position of all notes under
|
// After removing a note, recalculate the position of all notes under
|
||||||
// the parent note, such that there order is now completely
|
// the parent note, such that there order is now completely
|
||||||
// sequential.
|
// sequential.
|
||||||
pub(crate) async fn close_hole_for_deleted_note_relationship<'a, E>(
|
pub(crate) async fn close_hole_for_deleted_note<'a, E>(
|
||||||
executor: E,
|
executor: E,
|
||||||
parent_id: &str,
|
parent_id: &str,
|
||||||
location: i64,
|
location: i64,
|
||||||
) -> SqlResult<()>
|
) -> SqlResult<()>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let close_hole_for_deleted_note_sql = concat!(
|
let close_hole_for_deleted_note_sql = concat!(
|
||||||
"UPDATE note_relationships ",
|
"UPDATE note_relationships ",
|
||||||
"SET location = location - 1 ",
|
"SET location = location - 1 ",
|
||||||
"WHERE location > ? and parent_id = ?;"
|
"WHERE location > ? and parent_id = ?;"
|
||||||
);
|
);
|
||||||
|
|
||||||
sqlx::query(close_hole_for_deleted_note_sql)
|
let _ = sqlx::query(close_hole_for_deleted_note_sql)
|
||||||
.bind(&location)
|
.bind(&location)
|
||||||
.bind(parent_id)
|
.bind(parent_id)
|
||||||
.execute(executor)
|
.execute(executor)
|
||||||
.await?;
|
.await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
// Given a list of references found in the content, generate the
|
// Given a list of references found in the content, generate the
|
||||||
// references that do not previously exist, returning all found
|
// references that do not previously exist, returning all found
|
||||||
// references. NOTE: The function signature for this is for a
|
// references. NOTE: The function signature for this is for a
|
||||||
// transaction, and uses a nested transaction.
|
// transaction, and uses a nested transaction.
|
||||||
pub(crate) async fn validate_or_generate_all_found_references(
|
pub(crate) async fn validate_or_generate_all_found_references(
|
||||||
txi: &mut Transaction<'_, Sqlite>,
|
txi: &mut Transaction<'_, Sqlite>,
|
||||||
references: &[String],
|
references: &[String]
|
||||||
) -> SqlResult<Vec<String>> {
|
) -> SqlResult<Vec<String>> {
|
||||||
let mut tx = txi.begin().await?;
|
let mut tx = txi.begin().await?;
|
||||||
|
|
||||||
|
let found_references =
|
||||||
|
find_all_page_from_list_of_references(&mut tx, &references).await?;
|
||||||
|
let new_references = diff_references(&references, &found_references);
|
||||||
|
let mut new_page: Vec<NewNote> = vec![];
|
||||||
|
for one_reference in new_references.iter() {
|
||||||
|
let slug = generate_slug(&mut tx, one_reference).await?;
|
||||||
|
new_page.push(create_page(&one_reference, &slug));
|
||||||
|
}
|
||||||
|
let _ = bulk_insert_notes(&mut tx, &new_page).await?;
|
||||||
|
|
||||||
let found_references = find_all_page_from_list_of_references(&mut tx, &references).await?;
|
let mut all_reference_ids: Vec<String> = found_references.iter().map(|r| r.id.clone()).collect();
|
||||||
let new_references = diff_references(&references, &found_references);
|
all_reference_ids.append(&mut new_page.iter().map(|r| r.id.clone()).collect());
|
||||||
let mut new_page: Vec<NewNote> = vec![];
|
tx.commit().await?;
|
||||||
for one_reference in new_references.iter() {
|
Ok(all_reference_ids)
|
||||||
let slug = generate_slug(&mut tx, one_reference).await?;
|
|
||||||
new_page.push(create_page(&one_reference, &slug));
|
|
||||||
}
|
|
||||||
insert_bulk_notes(&mut tx, &new_page).await?;
|
|
||||||
|
|
||||||
let mut all_reference_ids: Vec<String> = found_references.iter().map(|r| r.id.clone()).collect();
|
|
||||||
all_reference_ids.append(&mut new_page.iter().map(|r| r.id.clone()).collect());
|
|
||||||
tx.commit().await?;
|
|
||||||
Ok(all_reference_ids)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// __ __ _
|
// __ __ _
|
||||||
|
@ -583,12 +616,13 @@ pub(crate) async fn validate_or_generate_all_found_references(
|
||||||
|
|
||||||
pub(crate) async fn count_existing_note_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<i64>
|
pub(crate) async fn count_existing_note_relationships<'a, E>(executor: E, note_id: &str) -> SqlResult<i64>
|
||||||
where
|
where
|
||||||
E: Executor<'a, Database = Sqlite>,
|
E: Executor<'a, Database = Sqlite>,
|
||||||
{
|
{
|
||||||
let count_existing_note_relationships_sql = "SELECT COUNT(*) as count FROM note_relationships WHERE note_id = ?;";
|
let count_existing_note_relationships_sql =
|
||||||
let count: RowCount = sqlx::query_as(&count_existing_note_relationships_sql)
|
"SELECT COUNT(*) as count FROM note_relationships WHERE note_id = ?;";
|
||||||
.bind(note_id)
|
let count: RowCount = sqlx::query_as(&count_existing_note_relationships_sql)
|
||||||
.fetch_one(executor)
|
.bind(note_id)
|
||||||
.await?;
|
.fetch_one(executor)
|
||||||
Ok(count.count)
|
.await?;
|
||||||
|
Ok(count.count)
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,272 @@
|
||||||
|
// This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
//! # Storage layer for Notesmachine
|
||||||
|
//!
|
||||||
|
//! This library implements the core functionality of Notesmachine and
|
||||||
|
//! describes that functionality to a storage layer. There's a bit of
|
||||||
|
//! intermingling in here which can't be helped, although it may make
|
||||||
|
//! sense in the future to separate the decomposition of the note
|
||||||
|
//! content into a higher layer.
|
||||||
|
//!
|
||||||
|
//! Notesmachine storage notes consist of two items: Note and Page.
|
||||||
|
//! This distinction is somewhat arbitrary, as structurally these two
|
||||||
|
//! items are stored in the same table.
|
||||||
|
//!
|
||||||
|
//! - Boxes have titles (and date metadata)
|
||||||
|
//! - Notes have content and a type (and date metadata)
|
||||||
|
//! - Notes are stored in boxes
|
||||||
|
//! - Notes are positioned with respect to other notes.
|
||||||
|
//! - There are two positions:
|
||||||
|
//! - Siblings, creating lists
|
||||||
|
//! - Children, creating trees like this one
|
||||||
|
//! - Notes may have references (pointers) to other boxes
|
||||||
|
//! - Notes may be moved around
|
||||||
|
//! - Notes may be deleted
|
||||||
|
//! - Boxes may be deleted
|
||||||
|
//! - When a box is renamed, every reference to that box is auto-edited to
|
||||||
|
//! reflect the change. If a box is renamed to match an existing box, the
|
||||||
|
//! notes in both boxes are merged.
|
||||||
|
//!
|
||||||
|
//! Note-to-note relationships form trees, and are kept in a SQL database of
|
||||||
|
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
|
||||||
|
//! `position` is a monotonic index on the parent (that is, every pair
|
||||||
|
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
|
||||||
|
//! an enum and can specify that the relationship is *original*,
|
||||||
|
//! *embedding*, or *referencing*. An embedded or referenced note may be
|
||||||
|
//! read/write or read-only with respect to the original, but there is only
|
||||||
|
//! one original note at any time.
|
||||||
|
//!
|
||||||
|
//! Note-to-box relationships form a graph, and are kept in the SQL database
|
||||||
|
//! as a collection of *edges* from the note to the box (and naturally
|
||||||
|
//! vice-versa).
|
||||||
|
//!
|
||||||
|
//! - Decision: When an original note is deleted, do all references and
|
||||||
|
//! embeddings also get deleted, or is the oldest one elevated to be a new
|
||||||
|
//! "original"? Or is that something the user may choose?
|
||||||
|
//!
|
||||||
|
//! - Decision: Should the merging issue be handled at this layer, or would
|
||||||
|
//! it make sense to move this to a higher layer, and only provide the
|
||||||
|
//! hooks for it here?
|
||||||
|
//!
|
||||||
|
|
||||||
|
use crate::errors::NoteStoreError;
|
||||||
|
use crate::reference_parser::build_references;
|
||||||
|
use crate::store::private::*;
|
||||||
|
use crate::structs::*;
|
||||||
|
use sqlx::sqlite::SqlitePool;
|
||||||
|
use std::sync::Arc;
|
||||||
|
|
||||||
|
/// A handle to our Sqlite database.
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct NoteStore(Arc<SqlitePool>);
|
||||||
|
|
||||||
|
pub type NoteResult<T> = core::result::Result<T, NoteStoreError>;
|
||||||
|
|
||||||
|
// After wrestling for a while with the fact that 'box' is a reserved
|
||||||
|
// word in Rust, I decided to just go with Note (note) and Page
|
||||||
|
// (box).
|
||||||
|
|
||||||
|
impl NoteStore {
|
||||||
|
/// Initializes a new instance of the note store. Note that the
|
||||||
|
/// note store holds an Arc internally; this code is (I think)
|
||||||
|
/// safe to Send.
|
||||||
|
pub async fn new(url: &str) -> NoteResult<Self> {
|
||||||
|
let pool = SqlitePool::connect(url).await?;
|
||||||
|
Ok(NoteStore(Arc::new(pool)))
|
||||||
|
}
|
||||||
|
/// Erase all the data in the database and restore it
|
||||||
|
/// to its original empty form. Do not use unless you
|
||||||
|
/// really, really want that to happen.
|
||||||
|
pub async fn reset_database(&self) -> NoteResult<()> {
|
||||||
|
reset_database(&*self.0)
|
||||||
|
.await
|
||||||
|
.map_err(NoteStoreError::DBError)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch page by slug
|
||||||
|
///
|
||||||
|
/// Supports the use case of the user navigating to a known place
|
||||||
|
/// via a bookmark or other URL. Since the title isn't clear from
|
||||||
|
/// the slug, the slug is insufficient to generate a new page, so
|
||||||
|
/// this use case says that in the event of a failure to find the
|
||||||
|
/// requested page, return a basic NotFound.
|
||||||
|
pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
|
||||||
|
let page = select_page_by_slug(&*self.0, slug).await?;
|
||||||
|
if page.is_empty() {
|
||||||
|
return Err(NoteStoreError::NotFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
let note_id = &page[0].id;
|
||||||
|
let backreferences = select_backreferences_for_page(&*self.0, ¬e_id).await?;
|
||||||
|
Ok((page, backreferences))
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Fetch page by title
|
||||||
|
///
|
||||||
|
/// The most common use case: the user is navigating by requesting
|
||||||
|
/// a page. The page either exists or it doesn't. If it
|
||||||
|
/// doesn't, we go out and make it. Since we know it doesn't exist,
|
||||||
|
/// we also know no backreferences to it exist, so in that case you
|
||||||
|
/// get back two empty vecs.
|
||||||
|
pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(Vec<Note>, Vec<Note>)> {
|
||||||
|
if title.len() == 0 {
|
||||||
|
return Err(NoteStoreError::NotFound);
|
||||||
|
}
|
||||||
|
|
||||||
|
let page = select_page_by_title(&*self.0, title).await?;
|
||||||
|
if page.len() > 0 {
|
||||||
|
let note_id = &page[0].id;
|
||||||
|
let backreferences = select_backreferences_for_page(&*self.0, ¬e_id).await?;
|
||||||
|
return Ok((page, backreferences));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanity check!
|
||||||
|
let references = build_references(&title);
|
||||||
|
if references.len() > 0 {
|
||||||
|
return Err(NoteStoreError::InvalidNoteStructure(
|
||||||
|
"Titles may not contain nested references.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
let slug = generate_slug(&mut tx, title).await?;
|
||||||
|
let page = create_page(&title, &slug);
|
||||||
|
let _ = insert_note(&mut tx, &page).await?;
|
||||||
|
tx.commit().await?;
|
||||||
|
|
||||||
|
Ok((vec![Note::from(page)], vec![]))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn add_note(
|
||||||
|
&self,
|
||||||
|
note: &NewNote,
|
||||||
|
parent_id: &str,
|
||||||
|
location: Option<i64>,
|
||||||
|
) -> NoteResult<String> {
|
||||||
|
let kind = RelationshipKind::Direct;
|
||||||
|
let new_id = self.insert_note(note, parent_id, location, kind).await?;
|
||||||
|
Ok(new_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Move a note from one location to another.
|
||||||
|
pub async fn move_note(
|
||||||
|
&self,
|
||||||
|
note_id: &str,
|
||||||
|
old_parent_id: &str,
|
||||||
|
new_parent_id: &str,
|
||||||
|
new_location: i64,
|
||||||
|
) -> NoteResult<()> {
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
|
||||||
|
let old_note = select_note_to_note_relationship(&mut tx, &old_parent_id, ¬e_id).await?;
|
||||||
|
let old_note_location = old_note.location;
|
||||||
|
let old_note_kind = old_note.kind;
|
||||||
|
|
||||||
|
let _ = delete_note_to_note_relationship(&mut tx, &old_parent_id, ¬e_id).await?;
|
||||||
|
let _ = close_hole_for_deleted_note(&mut tx, &old_parent_id, old_note_location).await?;
|
||||||
|
let new_location = determine_max_child_location_for_note(&mut tx, &new_parent_id, Some(new_location)).await?;
|
||||||
|
let _ = make_room_for_new_note(&mut tx, &new_parent_id, new_location).await?;
|
||||||
|
let _ = insert_note_to_note_relationship(
|
||||||
|
&mut tx,
|
||||||
|
&new_parent_id,
|
||||||
|
¬e_id,
|
||||||
|
new_location,
|
||||||
|
&old_note_kind,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Updates a note's content. Completely rebuilds the note's
|
||||||
|
/// outgoing edge reference list every time.
|
||||||
|
pub async fn update_note_content(&self, note_id: &str, content: &str) -> NoteResult<()> {
|
||||||
|
let references = build_references(&content);
|
||||||
|
let note_id = NoteId(note_id.to_string());
|
||||||
|
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
let _ = update_note_content(&mut tx, ¬e_id, &content).await?;
|
||||||
|
let _ = delete_bulk_note_to_page_relationships(&mut tx, ¬e_id).await?;
|
||||||
|
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
|
||||||
|
let _ = insert_bulk_note_to_page_relationships(&mut tx, ¬e_id, &known_reference_ids)
|
||||||
|
.await?;
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Deletes a note. If the note's relationship drops to zero, all
|
||||||
|
/// references from that note to pages are also deleted.
|
||||||
|
pub async fn delete_note(&self, note_id: &str, note_parent_id: &str) -> NoteResult<()> {
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
let note_id = NoteId(note_id.to_string());
|
||||||
|
let parent_id = ParentId(note_parent_id.to_string());
|
||||||
|
|
||||||
|
if *parent_id != *note_id {
|
||||||
|
let _ = delete_note_to_note_relationship(&mut tx, &parent_id, ¬e_id);
|
||||||
|
}
|
||||||
|
// The big one: if zero parents report having an interest in this note, then it,
|
||||||
|
// *and any sub-relationships*, go away.
|
||||||
|
if count_existing_note_relationships(&mut tx, ¬e_id).await? == 0 {
|
||||||
|
let _ = delete_note_to_page_relationships(&mut tx, ¬e_id).await?;
|
||||||
|
let _ = delete_note(&mut tx, ¬e_id).await?;
|
||||||
|
}
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The Private stuff
|
||||||
|
|
||||||
|
impl NoteStore {
|
||||||
|
// Pretty much the most dangerous function in our system. Has to
|
||||||
|
// have ALL the error checking.
|
||||||
|
async fn insert_note(
|
||||||
|
&self,
|
||||||
|
note: &NewNote,
|
||||||
|
parent_id: &str,
|
||||||
|
location: Option<i64>,
|
||||||
|
kind: RelationshipKind,
|
||||||
|
) -> NoteResult<String> {
|
||||||
|
if let Some(location) = location {
|
||||||
|
if location < 0 {
|
||||||
|
return Err(NoteStoreError::InvalidNoteStructure(
|
||||||
|
"Add note: A negative location is not valid.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if parent_id.is_empty() {
|
||||||
|
return Err(NoteStoreError::InvalidNoteStructure(
|
||||||
|
"Add note: A parent note ID is required.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if note.id.is_empty() {
|
||||||
|
return Err(NoteStoreError::InvalidNoteStructure(
|
||||||
|
"Add note: Your note should have an id already".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
if note.content.is_empty() {
|
||||||
|
return Err(NoteStoreError::InvalidNoteStructure(
|
||||||
|
"Add note: Empty notes are not supported.".to_string(),
|
||||||
|
));
|
||||||
|
}
|
||||||
|
|
||||||
|
let references = build_references(¬e.content);
|
||||||
|
|
||||||
|
let mut tx = self.0.begin().await?;
|
||||||
|
let location = determine_max_child_location_for_note(&mut tx, parent_id, location).await?;
|
||||||
|
let note_id = NoteId(note.id.clone());
|
||||||
|
insert_note(&mut tx, ¬e).await?;
|
||||||
|
make_room_for_new_note(&mut tx, &parent_id, location).await?;
|
||||||
|
insert_note_to_note_relationship(&mut tx, &parent_id, ¬e_id, location, &kind).await?;
|
||||||
|
let known_reference_ids = validate_or_generate_all_found_references(&mut tx, &references).await?;
|
||||||
|
let _ = insert_bulk_note_to_page_relationships(&mut tx, ¬e_id, &known_reference_ids)
|
||||||
|
.await?;
|
||||||
|
tx.commit().await?;
|
||||||
|
Ok(note_id.to_string())
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,5 +1,7 @@
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use derive_builder::Builder;
|
use derive_builder::Builder;
|
||||||
|
use friendly_id;
|
||||||
|
use shrinkwraprs::Shrinkwrap;
|
||||||
use sqlx::{self, FromRow};
|
use sqlx::{self, FromRow};
|
||||||
|
|
||||||
// Page is German for "Box," and is used both because this is
|
// Page is German for "Box," and is used both because this is
|
||||||
|
@ -48,6 +50,12 @@ macro_rules! build_conversion_enums {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Shrinkwrap, Clone)]
|
||||||
|
pub(crate) struct NoteId(pub String);
|
||||||
|
|
||||||
|
#[derive(Shrinkwrap, Clone)]
|
||||||
|
pub(crate) struct ParentId(pub String);
|
||||||
|
|
||||||
// The different kinds of objects we support.
|
// The different kinds of objects we support.
|
||||||
|
|
||||||
build_conversion_enums!(
|
build_conversion_enums!(
|
||||||
|
|
Loading…
Reference in New Issue