Hey, all the (current) tests are passing. That's... kinda nifty.
This commit is contained in:
parent
72fb3b11ee
commit
8c8352259a
|
@ -1,6 +1,7 @@
|
||||||
mod errors;
|
mod errors;
|
||||||
mod reference_parser;
|
mod reference_parser;
|
||||||
mod store;
|
mod store;
|
||||||
|
mod store_private;
|
||||||
mod structs;
|
mod structs;
|
||||||
|
|
||||||
pub use crate::errors::NoteStoreError;
|
pub use crate::errors::NoteStoreError;
|
||||||
|
@ -58,8 +59,8 @@ mod tests {
|
||||||
assert!(newpage.deleted_date.is_none());
|
assert!(newpage.deleted_date.is_none());
|
||||||
}
|
}
|
||||||
|
|
||||||
fn make_new_note(content: &str) -> row_structs::NewNote {
|
fn make_new_note(content: &str) -> structs::NewNote {
|
||||||
row_structs::NewNoteBuilder::default()
|
structs::NewNoteBuilder::default()
|
||||||
.content(content.to_string())
|
.content(content.to_string())
|
||||||
.build()
|
.build()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -87,12 +88,12 @@ mod tests {
|
||||||
let note3 = make_new_note("3");
|
let note3 = make_new_note("3");
|
||||||
let note3_uuid = storagepool.insert_nested_note(¬e3, ¬e1_uuid, 0).await;
|
let note3_uuid = storagepool.insert_nested_note(¬e3, ¬e1_uuid, 0).await;
|
||||||
assert!(note3_uuid.is_ok(), "{:?}", note3_uuid);
|
assert!(note3_uuid.is_ok(), "{:?}", note3_uuid);
|
||||||
let note3_uuid = note3_uuid.unwrap();
|
let _note3_uuid = note3_uuid.unwrap();
|
||||||
|
|
||||||
let note4 = make_new_note("4");
|
let note4 = make_new_note("4");
|
||||||
let note4_uuid = storagepool.insert_nested_note(¬e4, ¬e2_uuid, 0).await;
|
let note4_uuid = storagepool.insert_nested_note(¬e4, ¬e2_uuid, 0).await;
|
||||||
assert!(note4_uuid.is_ok(), "{:?}", note4_uuid);
|
assert!(note4_uuid.is_ok(), "{:?}", note4_uuid);
|
||||||
let note4_uuid = note4_uuid.unwrap();
|
let _note4_uuid = note4_uuid.unwrap();
|
||||||
|
|
||||||
let newpageresult = storagepool.get_page_by_title(&title).await;
|
let newpageresult = storagepool.get_page_by_title(&title).await;
|
||||||
let (newpage, newnotes) = newpageresult.unwrap();
|
let (newpage, newnotes) = newpageresult.unwrap();
|
||||||
|
|
|
@ -24,9 +24,7 @@ impl Finder {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a content block, return a list of all the page references found
|
fn find_links(document: &str) -> Vec<String> {
|
||||||
/// within the block. The references may need further massaging.
|
|
||||||
pub(crate) fn find_links(document: &str) -> Vec<String> {
|
|
||||||
let arena = Arena::new();
|
let arena = Arena::new();
|
||||||
let mut finder = Finder::new();
|
let mut finder = Finder::new();
|
||||||
let root = parse_document(&arena, document, &ComrakOptions::default());
|
let root = parse_document(&arena, document, &ComrakOptions::default());
|
||||||
|
@ -40,7 +38,9 @@ pub(crate) fn find_links(document: &str) -> Vec<String> {
|
||||||
NodeValue::Text(ref text) => Some(
|
NodeValue::Text(ref text) => Some(
|
||||||
RE_REFERENCES
|
RE_REFERENCES
|
||||||
.captures_iter(text)
|
.captures_iter(text)
|
||||||
.map(|t| String::from_utf8_lossy(&t.get(1).unwrap().as_bytes()).to_string())
|
.filter_map(|t| t.get(1))
|
||||||
|
.map(|t| String::from_utf8_lossy(t.as_bytes()).to_string())
|
||||||
|
.filter(|s| !s.is_empty())
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -71,13 +71,13 @@ fn recase(title: &str) -> String {
|
||||||
fn build_page_titles(references: &[String]) -> Vec<String> {
|
fn build_page_titles(references: &[String]) -> Vec<String> {
|
||||||
references
|
references
|
||||||
.iter()
|
.iter()
|
||||||
.map(|s| match s.chars().next() {
|
.filter_map(|s| match s.chars().next() {
|
||||||
Some('#') => recase(s),
|
Some('#') => Some(recase(s)),
|
||||||
Some('[') => s.strip_prefix("[[").unwrap().strip_suffix("]]").unwrap().to_string(),
|
Some('[') => Some(s.strip_prefix("[[").unwrap().strip_suffix("]]").unwrap().to_string()),
|
||||||
Some(_) => s.clone(),
|
Some(_) => Some(s.clone()),
|
||||||
_ => "".to_string(),
|
_ => None,
|
||||||
})
|
})
|
||||||
.filter(|s| s.is_empty())
|
.filter(|s| !s.is_empty())
|
||||||
.collect()
|
.collect()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,46 +1,89 @@
|
||||||
|
// This Source Code Form is subject to the terms of the Mozilla Public
|
||||||
|
// License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||||
|
// file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||||
|
|
||||||
|
//! # Storage layer for Notesmachine
|
||||||
|
//!
|
||||||
|
//! This library implements the core functionality of Notesmachine and
|
||||||
|
//! describes that functionality to a storage layer. There's a bit of
|
||||||
|
//! intermingling in here which can't be helped, although it may make sense
|
||||||
|
//! in the future to separate the decomposition of the note content into a
|
||||||
|
//! higher layer.
|
||||||
|
//!
|
||||||
|
//! Notesmachine storage notes consist of two items: Zettle and Kasten,
|
||||||
|
//! which are German for "Note" and "Box". Here are the basic rules:
|
||||||
|
//!
|
||||||
|
//! - Boxes have titles (and date metadata)
|
||||||
|
//! - Notes have content and a type (and date metadata)
|
||||||
|
//! - Notes are stored in boxes
|
||||||
|
//! - Notes are positioned with respect to other notes.
|
||||||
|
//! - There are two positions:
|
||||||
|
//! - Siblings, creating lists
|
||||||
|
//! - Children, creating trees like this one
|
||||||
|
//! - Notes may have references (pointers) to other boxes
|
||||||
|
//! - Notes may be moved around
|
||||||
|
//! - Notes may be deleted
|
||||||
|
//! - Boxes may be deleted
|
||||||
|
//! - When a box is renamed, every reference to that box is auto-edited to
|
||||||
|
//! reflect the change. If a box is renamed to match an existing box, the
|
||||||
|
//! notes in both boxes are merged.
|
||||||
|
//!
|
||||||
|
//! Note-to-note relationships form trees, and are kept in a SQL database of
|
||||||
|
//! (`parent_id`, `child_id`, `position`, `relationship_type`). The
|
||||||
|
//! `position` is a monotonic index on the parent (that is, every pair
|
||||||
|
//! (`parent_id`, `position`) must be unique). The `relationship_type` is
|
||||||
|
//! an enum and can specify that the relationship is *original*,
|
||||||
|
//! *embedding*, or *referencing*. An embedded or referenced note may be
|
||||||
|
//! read/write or read-only with respect to the original, but there is only
|
||||||
|
//! one original note at any time.
|
||||||
|
//!
|
||||||
|
//! Note-to-box relationships form a graph, and are kept in the SQL database
|
||||||
|
//! as a collection of *edges* from the note to the box (and naturally
|
||||||
|
//! vice-versa).
|
||||||
|
//!
|
||||||
|
//! - Decision: When an original note is deleted, do all references and
|
||||||
|
//! embeddings also get deleted, or is the oldest one elevated to be a new
|
||||||
|
//! "original"? Or is that something the user may choose?
|
||||||
|
//!
|
||||||
|
//! - Decision: Should the merging issue be handled at this layer, or would
|
||||||
|
//! it make sense to move this to a higher layer, and only provide the
|
||||||
|
//! hooks for it here?
|
||||||
|
//!
|
||||||
|
|
||||||
use crate::errors::NoteStoreError;
|
use crate::errors::NoteStoreError;
|
||||||
use crate::reference_parser::build_references;
|
use crate::reference_parser::build_references;
|
||||||
use crate::structs::{
|
use crate::store_private::*;
|
||||||
JustId, JustSlugs, NewNote, NewNoteBuilder, NewPage, NewPageBuilder, NoteRelationship, PageTitles, RawNote,
|
use crate::structs::*;
|
||||||
RawPage, RowCount,
|
use sqlx::sqlite::SqlitePool;
|
||||||
};
|
use std::cmp;
|
||||||
use lazy_static::lazy_static;
|
|
||||||
use regex::Regex;
|
|
||||||
use shrinkwraprs::Shrinkwrap;
|
|
||||||
use slug::slugify;
|
|
||||||
use sqlx::{
|
|
||||||
sqlite::{Sqlite, SqlitePool, SqliteRow},
|
|
||||||
Done, Executor, Row,
|
|
||||||
};
|
|
||||||
use std::collections::HashMap;
|
use std::collections::HashMap;
|
||||||
use std::collections::HashSet;
|
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
#[derive(Shrinkwrap, Copy, Clone)]
|
|
||||||
struct PageId(i64);
|
|
||||||
|
|
||||||
#[derive(Shrinkwrap, Copy, Clone)]
|
|
||||||
struct NoteId(i64);
|
|
||||||
|
|
||||||
#[derive(Shrinkwrap, Copy, Clone)]
|
|
||||||
struct ParentId(i64);
|
|
||||||
|
|
||||||
/// A handle to our Sqlite database.
|
/// A handle to our Sqlite database.
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct NoteStore(Arc<SqlitePool>);
|
pub struct NoteStore(Arc<SqlitePool>);
|
||||||
|
|
||||||
type NoteResult<T> = core::result::Result<T, NoteStoreError>;
|
type NoteResult<T> = core::result::Result<T, NoteStoreError>;
|
||||||
type SqlResult<T> = sqlx::Result<T>;
|
|
||||||
|
// One thing that's pretty terrible about this code is that the
|
||||||
|
// Executor type in Sqlx is move-only, so it can only be used once per
|
||||||
|
// outgoing function call. That means that a lot of this code is
|
||||||
|
// internally duplicated, which sucks. I tried using the Acquire()
|
||||||
|
// trait, but its interaction with Executor was not very
|
||||||
|
// deterministic.
|
||||||
|
|
||||||
impl NoteStore {
|
impl NoteStore {
|
||||||
|
/// Initializes a new instance of the note store. Note that the
|
||||||
|
/// note store holds an Arc internally; this code is (I think)
|
||||||
|
/// safe to Send.
|
||||||
pub async fn new(url: &str) -> NoteResult<Self> {
|
pub async fn new(url: &str) -> NoteResult<Self> {
|
||||||
let pool = SqlitePool::connect(url).await?;
|
let pool = SqlitePool::connect(url).await?;
|
||||||
Ok(NoteStore(Arc::new(pool)))
|
Ok(NoteStore(Arc::new(pool)))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Erase all the data in the database and restore it
|
/// Erase all the data in the database and restore it
|
||||||
// to its original empty form. Do not use unless you
|
/// to its original empty form. Do not use unless you
|
||||||
// really, really want that to happen.
|
/// really, really want that to happen.
|
||||||
pub async fn reset_database(&self) -> NoteResult<()> {
|
pub async fn reset_database(&self) -> NoteResult<()> {
|
||||||
reset_database(&*self.0).await.map_err(NoteStoreError::DBError)
|
reset_database(&*self.0).await.map_err(NoteStoreError::DBError)
|
||||||
}
|
}
|
||||||
|
@ -93,7 +136,6 @@ impl NoteStore {
|
||||||
Ok((page, notes))
|
Ok((page, notes))
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Make sure the position is sane.
|
|
||||||
/// Insert a note as the child of an existing note, at a set position.
|
/// Insert a note as the child of an existing note, at a set position.
|
||||||
pub async fn insert_nested_note(
|
pub async fn insert_nested_note(
|
||||||
&self,
|
&self,
|
||||||
|
@ -107,19 +149,24 @@ impl NoteStore {
|
||||||
let mut tx = self.0.begin().await?;
|
let mut tx = self.0.begin().await?;
|
||||||
|
|
||||||
// Start by building the note and putting it into its relationship.
|
// Start by building the note and putting it into its relationship.
|
||||||
|
println!("Select_note_id_for_uuid");
|
||||||
let parent_id: ParentId = select_note_id_for_uuid(&mut tx, parent_note_uuid).await?;
|
let parent_id: ParentId = select_note_id_for_uuid(&mut tx, parent_note_uuid).await?;
|
||||||
|
|
||||||
|
// Ensure new position is sane
|
||||||
|
println!("Assert Max Child Position");
|
||||||
let parent_max_position = assert_max_child_position_for_note(&mut tx, parent_id).await?;
|
let parent_max_position = assert_max_child_position_for_note(&mut tx, parent_id).await?;
|
||||||
let position = if position > parent_max_position {
|
let position = cmp::min(parent_max_position + 1, position);
|
||||||
parent_max_position + 1
|
|
||||||
} else {
|
println!("Insert_one_new_note");
|
||||||
position
|
|
||||||
};
|
|
||||||
let new_note_id = insert_one_new_note(&mut tx, &new_note).await?;
|
let new_note_id = insert_one_new_note(&mut tx, &new_note).await?;
|
||||||
|
println!("make_room_for_new_note");
|
||||||
let _ = make_room_for_new_note(&mut tx, parent_id, position).await?;
|
let _ = make_room_for_new_note(&mut tx, parent_id, position).await?;
|
||||||
|
println!("Insert_note_to_note_relationship");
|
||||||
let _ = insert_note_to_note_relationship(&mut tx, parent_id, new_note_id, position, "note").await?;
|
let _ = insert_note_to_note_relationship(&mut tx, parent_id, new_note_id, position, "note").await?;
|
||||||
|
|
||||||
// From the references, make lists of pages that exist, and pages
|
// From the references, make lists of pages that exist, and pages
|
||||||
// that do not.
|
// that do not.
|
||||||
|
println!("Find_all_page_references");
|
||||||
let found_references = find_all_page_references_for(&mut tx, &references).await?;
|
let found_references = find_all_page_references_for(&mut tx, &references).await?;
|
||||||
let new_references = diff_references(&references, &found_references);
|
let new_references = diff_references(&references, &found_references);
|
||||||
let mut known_reference_ids: Vec<PageId> = Vec::new();
|
let mut known_reference_ids: Vec<PageId> = Vec::new();
|
||||||
|
@ -127,14 +174,18 @@ impl NoteStore {
|
||||||
// Create the pages that don't exist
|
// Create the pages that don't exist
|
||||||
for one_reference in new_references.iter() {
|
for one_reference in new_references.iter() {
|
||||||
let new_root_note = create_unique_root_note();
|
let new_root_note = create_unique_root_note();
|
||||||
|
println!("Insert_one_new_root_note");
|
||||||
let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?;
|
let new_root_note_id = insert_one_new_note(&mut tx, &new_root_note).await?;
|
||||||
|
println!("Generate_slug");
|
||||||
let new_page_slug = generate_slug(&mut tx, &one_reference).await?;
|
let new_page_slug = generate_slug(&mut tx, &one_reference).await?;
|
||||||
let new_page = create_new_page_for(&one_reference, &new_page_slug, new_root_note_id);
|
let new_page = create_new_page_for(&one_reference, &new_page_slug, new_root_note_id);
|
||||||
|
println!("insert_one_new_page");
|
||||||
known_reference_ids.push(insert_one_new_page(&mut tx, &new_page).await?)
|
known_reference_ids.push(insert_one_new_page(&mut tx, &new_page).await?)
|
||||||
}
|
}
|
||||||
|
|
||||||
// And associate the note with all the pages.
|
// And associate the note with all the pages.
|
||||||
known_reference_ids.append(&mut found_references.iter().map(|r| PageId(r.id)).collect());
|
known_reference_ids.append(&mut found_references.iter().map(|r| PageId(r.id)).collect());
|
||||||
|
println!("insert_note_to_page_relationships");
|
||||||
let _ = insert_note_to_page_relationships(&mut tx, new_note_id, &known_reference_ids).await?;
|
let _ = insert_note_to_page_relationships(&mut tx, new_note_id, &known_reference_ids).await?;
|
||||||
|
|
||||||
tx.commit().await?;
|
tx.commit().await?;
|
||||||
|
@ -145,7 +196,6 @@ impl NoteStore {
|
||||||
// dependent entirely on the *content*, and not the *position*, of
|
// dependent entirely on the *content*, and not the *position*, of
|
||||||
// the note and the referenced page.
|
// the note and the referenced page.
|
||||||
//
|
//
|
||||||
// TODO: Ensure the position is sane.
|
|
||||||
/// Move a note from one location to another.
|
/// Move a note from one location to another.
|
||||||
pub async fn move_note(
|
pub async fn move_note(
|
||||||
&self,
|
&self,
|
||||||
|
@ -178,11 +228,7 @@ impl NoteStore {
|
||||||
let _ = delete_note_to_note_relationship(&mut tx, old_parent_id, note_id).await?;
|
let _ = delete_note_to_note_relationship(&mut tx, old_parent_id, note_id).await?;
|
||||||
let _ = close_hole_for_deleted_note(&mut tx, old_parent_id, old_note_position).await?;
|
let _ = close_hole_for_deleted_note(&mut tx, old_parent_id, old_note_position).await?;
|
||||||
let parent_max_position = assert_max_child_position_for_note(&mut tx, new_parent_id).await?;
|
let parent_max_position = assert_max_child_position_for_note(&mut tx, new_parent_id).await?;
|
||||||
let new_position = if new_position > parent_max_position {
|
let new_position = cmp::min(parent_max_position + 1, new_position);
|
||||||
parent_max_position + 1
|
|
||||||
} else {
|
|
||||||
new_position
|
|
||||||
};
|
|
||||||
let _ = make_room_for_new_note(&mut tx, new_parent_id, new_position).await?;
|
let _ = make_room_for_new_note(&mut tx, new_parent_id, new_position).await?;
|
||||||
let _ =
|
let _ =
|
||||||
insert_note_to_note_relationship(&mut tx, new_parent_id, note_id, new_position, old_note_nature).await?;
|
insert_note_to_note_relationship(&mut tx, new_parent_id, note_id, new_position, old_note_nature).await?;
|
||||||
|
@ -208,7 +254,8 @@ impl NoteStore {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete a note
|
/// Deletes a note. If the note's relationship drops to zero, all
|
||||||
|
/// references from that note to pages are also deleted.
|
||||||
pub async fn delete_note(&self, note_uuid: &str, note_parent_uuid: &str) -> NoteResult<()> {
|
pub async fn delete_note(&self, note_uuid: &str, note_parent_uuid: &str) -> NoteResult<()> {
|
||||||
let mut tx = self.0.begin().await?;
|
let mut tx = self.0.begin().await?;
|
||||||
let condemned_note_id: NoteId = NoteId(select_note_id_for_uuid(&mut tx, note_uuid).await?.0);
|
let condemned_note_id: NoteId = NoteId(select_note_id_for_uuid(&mut tx, note_uuid).await?.0);
|
||||||
|
@ -222,7 +269,8 @@ impl NoteStore {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Update a note's content
|
/// Updates a note's content. Completely rebuilds the note's
|
||||||
|
/// outgoing edge reference list every time.
|
||||||
pub async fn update_note_content(&self, note_uuid: &str, content: &str) -> NoteResult<()> {
|
pub async fn update_note_content(&self, note_uuid: &str, content: &str) -> NoteResult<()> {
|
||||||
let references = build_references(&content);
|
let references = build_references(&content);
|
||||||
|
|
||||||
|
@ -252,451 +300,3 @@ impl NoteStore {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// ___ _ _
|
|
||||||
// | _ \_ _(_)_ ____ _| |_ ___
|
|
||||||
// | _/ '_| \ V / _` | _/ -_)
|
|
||||||
// |_| |_| |_|\_/\__,_|\__\___|
|
|
||||||
//
|
|
||||||
|
|
||||||
// I'm putting a lot of faith in Rust's ability to inline stuff. I'm
|
|
||||||
// sure this is okay. But really, this lets the API be clean and
|
|
||||||
// coherent and easily readable, and hides away the gnarliness of some
|
|
||||||
// of the SQL queries.
|
|
||||||
|
|
||||||
async fn reset_database<'a, E>(executor: E) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let initialize_sql = include_str!("sql/initialize_database.sql");
|
|
||||||
sqlx::query(initialize_sql).execute(executor).await.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn select_page_by_slug<'a, E>(executor: E, slug: &str) -> SqlResult<RawPage>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let select_one_page_by_slug_sql = concat!(
|
|
||||||
"SELECT id, title, slug, note_id, creation_date, updated_date, ",
|
|
||||||
"lastview_date, deleted_date FROM pages WHERE slug=?;"
|
|
||||||
);
|
|
||||||
Ok(sqlx::query_as(&select_one_page_by_slug_sql)
|
|
||||||
.bind(&slug)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn select_page_by_title<'a, E>(executor: E, title: &str) -> SqlResult<RawPage>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let select_one_page_by_title_sql = concat!(
|
|
||||||
"SELECT id, title, slug, note_id, creation_date, updated_date, ",
|
|
||||||
"lastview_date, deleted_date FROM pages WHERE title=?;"
|
|
||||||
);
|
|
||||||
Ok(sqlx::query_as(&select_one_page_by_title_sql)
|
|
||||||
.bind(&title)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn select_note_id_for_uuid<'a, E>(executor: E, uuid: &str) -> SqlResult<ParentId>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let select_note_id_for_uuid_sql = "SELECT id FROM notes WHERE uuid = ?";
|
|
||||||
let id: JustId = sqlx::query_as(&select_note_id_for_uuid_sql)
|
|
||||||
.bind(&uuid)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(ParentId(id.id))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn make_room_for_new_note<'a, E>(executor: E, parent_id: ParentId, position: i64) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let make_room_for_new_note_sql = concat!(
|
|
||||||
"UPDATE note_relationships ",
|
|
||||||
"SET position = position + 1 ",
|
|
||||||
"WHERE position >= ? and parent_id = ?;"
|
|
||||||
);
|
|
||||||
|
|
||||||
sqlx::query(make_room_for_new_note_sql)
|
|
||||||
.bind(&position)
|
|
||||||
.bind(&*parent_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await
|
|
||||||
.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn insert_note_to_note_relationship<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
parent_id: ParentId,
|
|
||||||
note_id: NoteId,
|
|
||||||
position: i64,
|
|
||||||
nature: &str,
|
|
||||||
) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let insert_note_to_note_relationship_sql = concat!(
|
|
||||||
"INSERT INTO note_relationships (parent_id, note_id, position, nature) ",
|
|
||||||
"values (?, ?, ?, ?)"
|
|
||||||
);
|
|
||||||
|
|
||||||
sqlx::query(insert_note_to_note_relationship_sql)
|
|
||||||
.bind(&*parent_id)
|
|
||||||
.bind(&*note_id)
|
|
||||||
.bind(&position)
|
|
||||||
.bind(&nature)
|
|
||||||
.execute(executor)
|
|
||||||
.await
|
|
||||||
.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn select_note_collection_from_root<'a, E>(executor: E, root: i64) -> SqlResult<Vec<RawNote>>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let select_note_collection_from_root_sql = include_str!("sql/select_note_collection_from_root.sql");
|
|
||||||
Ok(sqlx::query_as(&select_note_collection_from_root_sql)
|
|
||||||
.bind(&root)
|
|
||||||
.fetch_all(executor)
|
|
||||||
.await?)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn insert_one_new_note<'a, E>(executor: E, note: &NewNote) -> SqlResult<NoteId>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let insert_one_note_sql = concat!(
|
|
||||||
"INSERT INTO notes ( ",
|
|
||||||
" uuid, ",
|
|
||||||
" content, ",
|
|
||||||
" notetype, ",
|
|
||||||
" creation_date, ",
|
|
||||||
" updated_date, ",
|
|
||||||
" lastview_date) ",
|
|
||||||
"VALUES (?, ?, ?, ?, ?, ?);"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(NoteId(
|
|
||||||
sqlx::query(insert_one_note_sql)
|
|
||||||
.bind(¬e.uuid)
|
|
||||||
.bind(¬e.content)
|
|
||||||
.bind(¬e.notetype)
|
|
||||||
.bind(¬e.creation_date)
|
|
||||||
.bind(¬e.updated_date)
|
|
||||||
.bind(¬e.lastview_date)
|
|
||||||
.execute(executor)
|
|
||||||
.await?
|
|
||||||
.last_insert_rowid(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given a possible slug, find the slug with the highest
|
|
||||||
// uniquification number, and return that number, if any.
|
|
||||||
fn find_maximal_slug(slugs: &[JustSlugs]) -> Option<u32> {
|
|
||||||
lazy_static! {
|
|
||||||
static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
if slugs.is_empty() {
|
|
||||||
return None;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut slug_counters: Vec<u32> = slugs
|
|
||||||
.iter()
|
|
||||||
.filter_map(|slug| RE_CAP_NUM.captures(&slug.slug))
|
|
||||||
.map(|cap| cap.get(1).unwrap().as_str().parse::<u32>().unwrap())
|
|
||||||
.collect();
|
|
||||||
slug_counters.sort_unstable();
|
|
||||||
slug_counters.pop()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given an initial string and an existing collection of slugs,
|
|
||||||
// generate a new slug that does not conflict with the current
|
|
||||||
// collection.
|
|
||||||
async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult<String>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
lazy_static! {
|
|
||||||
static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap();
|
|
||||||
}
|
|
||||||
|
|
||||||
let initial_slug = slugify(title);
|
|
||||||
let sample_slug = RE_STRIP_NUM.replace_all(&initial_slug, "");
|
|
||||||
let slug_finder_sql = "SELECT slug FROM pages WHERE slug LIKE '?%';";
|
|
||||||
let similar_slugs: Vec<JustSlugs> = sqlx::query_as(&slug_finder_sql)
|
|
||||||
.bind(&*sample_slug)
|
|
||||||
.fetch_all(executor)
|
|
||||||
.await?;
|
|
||||||
let maximal_slug = find_maximal_slug(&similar_slugs);
|
|
||||||
match maximal_slug {
|
|
||||||
None => Ok(initial_slug),
|
|
||||||
Some(max_slug) => Ok(format!("{}-{}", initial_slug, max_slug + 1)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn insert_one_new_page<'a, E>(executor: E, page: &NewPage) -> SqlResult<PageId>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let insert_one_page_sql = concat!(
|
|
||||||
"INSERT INTO pages ( ",
|
|
||||||
" slug, ",
|
|
||||||
" title, ",
|
|
||||||
" note_id, ",
|
|
||||||
" creation_date, ",
|
|
||||||
" updated_date, ",
|
|
||||||
" lastview_date) ",
|
|
||||||
"VALUES (?, ?, ?, ?, ?, ?);"
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(PageId(
|
|
||||||
sqlx::query(insert_one_page_sql)
|
|
||||||
.bind(&page.slug)
|
|
||||||
.bind(&page.title)
|
|
||||||
.bind(&page.note_id)
|
|
||||||
.bind(&page.creation_date)
|
|
||||||
.bind(&page.updated_date)
|
|
||||||
.bind(&page.lastview_date)
|
|
||||||
.execute(executor)
|
|
||||||
.await?
|
|
||||||
.last_insert_rowid(),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn insert_note_to_page_relationships<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
note_id: NoteId,
|
|
||||||
references: &[PageId],
|
|
||||||
) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let insert_note_page_references_sql = "INSERT INTO page_relationships (note_id, page_id) VALUES ".to_string()
|
|
||||||
+ &["(?, ?)"].repeat(references.len()).join(", ")
|
|
||||||
+ &";".to_string();
|
|
||||||
|
|
||||||
let mut request = sqlx::query(&insert_note_page_references_sql);
|
|
||||||
for reference in references {
|
|
||||||
request = request.bind(*note_id).bind(**reference);
|
|
||||||
}
|
|
||||||
|
|
||||||
request.execute(executor).await.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
// For a given collection of uuids, retrieve the internal ID used by
|
|
||||||
// the database.
|
|
||||||
async fn bulk_select_ids_for_note_uuids<'a, E>(executor: E, ids: &[&str]) -> SqlResult<Vec<(String, i64)>>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let bulk_select_ids_for_note_uuids_sql = "SELECT uuid, id FROM notes WHERE uuid IN (".to_string()
|
|
||||||
+ &["?"].repeat(ids.len()).join(",")
|
|
||||||
+ &");".to_string();
|
|
||||||
|
|
||||||
let mut request = sqlx::query(&bulk_select_ids_for_note_uuids_sql);
|
|
||||||
for id in ids.iter() {
|
|
||||||
request = request.bind(id);
|
|
||||||
}
|
|
||||||
Ok(request
|
|
||||||
.try_map(|row: SqliteRow| {
|
|
||||||
let l = row.try_get::<String, _>(0)?;
|
|
||||||
let r = row.try_get::<i64, _>(1)?;
|
|
||||||
Ok((l, r))
|
|
||||||
})
|
|
||||||
.fetch_all(executor)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
// Used by move_note to identify the single note to note relationship
|
|
||||||
// by the original parent and child pair. Used mostly to find the
|
|
||||||
// position for recalculation, to create a new gap or close an old
|
|
||||||
// one.
|
|
||||||
async fn get_note_to_note_relationship<'a, E>(
|
|
||||||
executor: E,
|
|
||||||
parent_id: ParentId,
|
|
||||||
note_id: NoteId,
|
|
||||||
) -> SqlResult<NoteRelationship>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let get_note_to_note_relationship_sql = concat!(
|
|
||||||
"SELECT parent_id, note_id, position, nature ",
|
|
||||||
"FROM note_relationships ",
|
|
||||||
"WHERE parent_id = ? and note_id = ? ",
|
|
||||||
"LIMIT 1"
|
|
||||||
);
|
|
||||||
sqlx::query_as(get_note_to_note_relationship_sql)
|
|
||||||
.bind(&*parent_id)
|
|
||||||
.bind(&*note_id)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_note_to_note_relationship<'a, E>(executor: E, parent_id: ParentId, note_id: NoteId) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let delete_note_to_note_relationship_sql = concat!(
|
|
||||||
"DELETE FROM note_relationships ",
|
|
||||||
"WHERE parent_id = ? and note_id = ? "
|
|
||||||
);
|
|
||||||
|
|
||||||
let count = sqlx::query(delete_note_to_note_relationship_sql)
|
|
||||||
.bind(&*parent_id)
|
|
||||||
.bind(&*note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?
|
|
||||||
.rows_affected();
|
|
||||||
|
|
||||||
match count {
|
|
||||||
1 => Ok(()),
|
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_note_to_page_relationships<'a, E>(executor: E, note_id: NoteId) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let delete_note_to_page_relationships_sql = concat!("DELETE FROM page_relationships ", "WHERE note_id = ? ");
|
|
||||||
|
|
||||||
let _ = sqlx::query(delete_note_to_page_relationships_sql)
|
|
||||||
.bind(&*note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?;
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn delete_note<'a, E>(executor: E, note_id: NoteId) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let delete_note_sql = concat!("DELETE FROM notes WHERE note_id = ?");
|
|
||||||
|
|
||||||
let count = sqlx::query(delete_note_sql)
|
|
||||||
.bind(&*note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?
|
|
||||||
.rows_affected();
|
|
||||||
|
|
||||||
match count {
|
|
||||||
1 => Ok(()),
|
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn count_existing_note_relationships<'a, E>(executor: E, note_id: NoteId) -> SqlResult<i64>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let count_existing_note_relationships_sql = "SELECT COUNT(*) as count FROM page_relationships WHERE note_id = ?";
|
|
||||||
|
|
||||||
let count: RowCount = sqlx::query_as(count_existing_note_relationships_sql)
|
|
||||||
.bind(&*note_id)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(count.count)
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn assert_max_child_position_for_note<'a, E>(executor: E, note_id: ParentId) -> SqlResult<i64>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let assert_max_child_position_for_note_sql =
|
|
||||||
"SELECT MAX(position) as count FROM note_relationships WHERE parent_id = ?";
|
|
||||||
|
|
||||||
let count: RowCount = sqlx::query_as(assert_max_child_position_for_note_sql)
|
|
||||||
.bind(&*note_id)
|
|
||||||
.fetch_one(executor)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
Ok(count.count)
|
|
||||||
}
|
|
||||||
|
|
||||||
// After removing a note, recalculate the position of all notes under
|
|
||||||
// the parent note, such that there order is now completely
|
|
||||||
// sequential.
|
|
||||||
async fn close_hole_for_deleted_note<'a, E>(executor: E, parent_id: ParentId, position: i64) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let close_hole_for_deleted_note_sql = concat!(
|
|
||||||
"UPDATE note_relationships ",
|
|
||||||
"SET position = position - 1 ",
|
|
||||||
"WHERE position > ? and parent_id = ?;"
|
|
||||||
);
|
|
||||||
|
|
||||||
sqlx::query(close_hole_for_deleted_note_sql)
|
|
||||||
.bind(&position)
|
|
||||||
.bind(&*parent_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await
|
|
||||||
.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn find_all_page_references_for<'a, E>(executor: E, references: &[String]) -> SqlResult<Vec<PageTitles>>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let find_all_references_for_sql = "SELECT id, title FROM pages WHERE title IN (".to_string()
|
|
||||||
+ &["?"].repeat(references.len()).join(",")
|
|
||||||
+ &");".to_string();
|
|
||||||
|
|
||||||
let mut request = sqlx::query_as(&find_all_references_for_sql);
|
|
||||||
for id in references.iter() {
|
|
||||||
request = request.bind(id);
|
|
||||||
}
|
|
||||||
request.fetch_all(executor).await
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn update_note_content<'a, E>(executor: E, note_id: NoteId, content: &str) -> SqlResult<()>
|
|
||||||
where
|
|
||||||
E: Executor<'a, Database = Sqlite>,
|
|
||||||
{
|
|
||||||
let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?";
|
|
||||||
let count = sqlx::query(update_note_content_sql)
|
|
||||||
.bind(content)
|
|
||||||
.bind(&*note_id)
|
|
||||||
.execute(executor)
|
|
||||||
.await?
|
|
||||||
.rows_affected();
|
|
||||||
|
|
||||||
match count {
|
|
||||||
1 => Ok(()),
|
|
||||||
_ => Err(sqlx::Error::RowNotFound),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_unique_root_note() -> NewNote {
|
|
||||||
NewNoteBuilder::default()
|
|
||||||
.uuid(friendly_id::create())
|
|
||||||
.content("".to_string())
|
|
||||||
.notetype("root".to_string())
|
|
||||||
.build()
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
fn create_new_page_for(title: &str, slug: &str, note_id: NoteId) -> NewPage {
|
|
||||||
NewPageBuilder::default()
|
|
||||||
.slug(slug.to_string())
|
|
||||||
.title(title.to_string())
|
|
||||||
.note_id(*note_id)
|
|
||||||
.build()
|
|
||||||
.unwrap()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given the references supplied, and the references found in the datastore,
|
|
||||||
// return a list of the references not found in the datastore.
|
|
||||||
fn diff_references(references: &[String], found_references: &[PageTitles]) -> Vec<String> {
|
|
||||||
let all: HashSet<String> = references.iter().cloned().collect();
|
|
||||||
let found: HashSet<String> = found_references.iter().map(|r| r.title.clone()).collect();
|
|
||||||
all.difference(&found).cloned().collect()
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,480 @@
|
||||||
|
use crate::structs::*;
|
||||||
|
use lazy_static::lazy_static;
|
||||||
|
use regex::Regex;
|
||||||
|
use slug::slugify;
|
||||||
|
use sqlx::{
|
||||||
|
sqlite::{Sqlite, SqliteRow},
|
||||||
|
Done, Executor, Row,
|
||||||
|
};
|
||||||
|
use std::collections::HashSet;
|
||||||
|
|
||||||
|
type SqlResult<T> = sqlx::Result<T>;
|
||||||
|
|
||||||
|
// ___ _ _
|
||||||
|
// | _ \_ _(_)_ ____ _| |_ ___
|
||||||
|
// | _/ '_| \ V / _` | _/ -_)
|
||||||
|
// |_| |_| |_|\_/\__,_|\__\___|
|
||||||
|
//
|
||||||
|
|
||||||
|
// I'm putting a lot of faith in Rust's ability to inline stuff. I'm
|
||||||
|
// sure this is okay. But really, this lets the API be clean and
|
||||||
|
// coherent and easily readable, and hides away the gnarliness of some
|
||||||
|
// of the SQL queries.
|
||||||
|
|
||||||
|
pub(crate) async fn reset_database<'a, E>(executor: E) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let initialize_sql = include_str!("sql/initialize_database.sql");
|
||||||
|
sqlx::query(initialize_sql).execute(executor).await.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn select_page_by_slug<'a, E>(executor: E, slug: &str) -> SqlResult<RawPage>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let select_one_page_by_slug_sql = concat!(
|
||||||
|
"SELECT id, title, slug, note_id, creation_date, updated_date, ",
|
||||||
|
"lastview_date, deleted_date FROM pages WHERE slug=?;"
|
||||||
|
);
|
||||||
|
Ok(sqlx::query_as(&select_one_page_by_slug_sql)
|
||||||
|
.bind(&slug)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn select_page_by_title<'a, E>(executor: E, title: &str) -> SqlResult<RawPage>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let select_one_page_by_title_sql = concat!(
|
||||||
|
"SELECT id, title, slug, note_id, creation_date, updated_date, ",
|
||||||
|
"lastview_date, deleted_date FROM pages WHERE title=?;"
|
||||||
|
);
|
||||||
|
Ok(sqlx::query_as(&select_one_page_by_title_sql)
|
||||||
|
.bind(&title)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn select_note_id_for_uuid<'a, E>(executor: E, uuid: &str) -> SqlResult<ParentId>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let select_note_id_for_uuid_sql = "SELECT id FROM notes WHERE uuid = ?;";
|
||||||
|
let id: JustId = sqlx::query_as(&select_note_id_for_uuid_sql)
|
||||||
|
.bind(&uuid)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(ParentId(id.id))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn make_room_for_new_note<'a, E>(executor: E, parent_id: ParentId, position: i64) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let make_room_for_new_note_sql = concat!(
|
||||||
|
"UPDATE note_relationships ",
|
||||||
|
"SET position = position + 1 ",
|
||||||
|
"WHERE position >= ? and parent_id = ?;"
|
||||||
|
);
|
||||||
|
|
||||||
|
sqlx::query(make_room_for_new_note_sql)
|
||||||
|
.bind(&position)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn insert_note_to_note_relationship<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
parent_id: ParentId,
|
||||||
|
note_id: NoteId,
|
||||||
|
position: i64,
|
||||||
|
nature: &str,
|
||||||
|
) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let insert_note_to_note_relationship_sql = concat!(
|
||||||
|
"INSERT INTO note_relationships (parent_id, note_id, position, nature) ",
|
||||||
|
"values (?, ?, ?, ?)"
|
||||||
|
);
|
||||||
|
|
||||||
|
sqlx::query(insert_note_to_note_relationship_sql)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.bind(&position)
|
||||||
|
.bind(&nature)
|
||||||
|
.execute(executor)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn select_note_collection_from_root<'a, E>(executor: E, root: i64) -> SqlResult<Vec<RawNote>>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let select_note_collection_from_root_sql = include_str!("sql/select_note_collection_from_root.sql");
|
||||||
|
Ok(sqlx::query_as(&select_note_collection_from_root_sql)
|
||||||
|
.bind(&root)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await?)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn insert_one_new_note<'a, E>(executor: E, note: &NewNote) -> SqlResult<NoteId>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let insert_one_note_sql = concat!(
|
||||||
|
"INSERT INTO notes ( ",
|
||||||
|
" uuid, ",
|
||||||
|
" content, ",
|
||||||
|
" notetype, ",
|
||||||
|
" creation_date, ",
|
||||||
|
" updated_date, ",
|
||||||
|
" lastview_date) ",
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?);"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(NoteId(
|
||||||
|
sqlx::query(insert_one_note_sql)
|
||||||
|
.bind(¬e.uuid)
|
||||||
|
.bind(¬e.content)
|
||||||
|
.bind(¬e.notetype)
|
||||||
|
.bind(¬e.creation_date)
|
||||||
|
.bind(¬e.updated_date)
|
||||||
|
.bind(¬e.lastview_date)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.last_insert_rowid(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given a possible slug, find the slug with the highest
|
||||||
|
// uniquification number, and return that number, if any.
|
||||||
|
|
||||||
|
pub(crate) fn find_maximal_slug(slugs: &[JustSlugs]) -> Option<u32> {
|
||||||
|
lazy_static! {
|
||||||
|
static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
if slugs.is_empty() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut slug_counters: Vec<u32> = slugs
|
||||||
|
.iter()
|
||||||
|
.filter_map(|slug| RE_CAP_NUM.captures(&slug.slug))
|
||||||
|
.map(|cap| cap.get(1).unwrap().as_str().parse::<u32>().unwrap())
|
||||||
|
.collect();
|
||||||
|
slug_counters.sort_unstable();
|
||||||
|
slug_counters.pop()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given an initial string and an existing collection of slugs,
|
||||||
|
// generate a new slug that does not conflict with the current
|
||||||
|
// collection.
|
||||||
|
pub(crate) async fn generate_slug<'a, E>(executor: E, title: &str) -> SqlResult<String>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
lazy_static! {
|
||||||
|
static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap();
|
||||||
|
}
|
||||||
|
|
||||||
|
let initial_slug = slugify(title);
|
||||||
|
let sample_slug = RE_STRIP_NUM.replace_all(&initial_slug, "");
|
||||||
|
let slug_finder_sql = "SELECT slug FROM pages WHERE slug LIKE '?%';";
|
||||||
|
let similar_slugs: Vec<JustSlugs> = sqlx::query_as(&slug_finder_sql)
|
||||||
|
.bind(&*sample_slug)
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await?;
|
||||||
|
let maximal_slug = find_maximal_slug(&similar_slugs);
|
||||||
|
match maximal_slug {
|
||||||
|
None => Ok(initial_slug),
|
||||||
|
Some(max_slug) => Ok(format!("{}-{}", initial_slug, max_slug + 1)),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn insert_one_new_page<'a, E>(executor: E, page: &NewPage) -> SqlResult<PageId>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let insert_one_page_sql = concat!(
|
||||||
|
"INSERT INTO pages ( ",
|
||||||
|
" slug, ",
|
||||||
|
" title, ",
|
||||||
|
" note_id, ",
|
||||||
|
" creation_date, ",
|
||||||
|
" updated_date, ",
|
||||||
|
" lastview_date) ",
|
||||||
|
"VALUES (?, ?, ?, ?, ?, ?);"
|
||||||
|
);
|
||||||
|
|
||||||
|
Ok(PageId(
|
||||||
|
sqlx::query(insert_one_page_sql)
|
||||||
|
.bind(&page.slug)
|
||||||
|
.bind(&page.title)
|
||||||
|
.bind(&page.note_id)
|
||||||
|
.bind(&page.creation_date)
|
||||||
|
.bind(&page.updated_date)
|
||||||
|
.bind(&page.lastview_date)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.last_insert_rowid(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn insert_note_to_page_relationships<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
note_id: NoteId,
|
||||||
|
references: &[PageId],
|
||||||
|
) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
if references.is_empty() {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
|
let insert_note_page_references_sql = "INSERT INTO page_relationships (note_id, page_id) VALUES ".to_string()
|
||||||
|
+ &["(?, ?)"].repeat(references.len()).join(", ")
|
||||||
|
+ &";".to_string();
|
||||||
|
|
||||||
|
let mut request = sqlx::query(&insert_note_page_references_sql);
|
||||||
|
for reference in references {
|
||||||
|
request = request.bind(*note_id).bind(**reference);
|
||||||
|
}
|
||||||
|
|
||||||
|
request.execute(executor).await.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
// For a given collection of uuids, retrieve the internal ID used by
|
||||||
|
// the database.
|
||||||
|
pub(crate) async fn bulk_select_ids_for_note_uuids<'a, E>(executor: E, ids: &[&str]) -> SqlResult<Vec<(String, i64)>>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
if ids.is_empty() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let bulk_select_ids_for_note_uuids_sql = "SELECT uuid, id FROM notes WHERE uuid IN (".to_string()
|
||||||
|
+ &["?"].repeat(ids.len()).join(",")
|
||||||
|
+ &");".to_string();
|
||||||
|
|
||||||
|
let mut request = sqlx::query(&bulk_select_ids_for_note_uuids_sql);
|
||||||
|
for id in ids.iter() {
|
||||||
|
request = request.bind(id);
|
||||||
|
}
|
||||||
|
Ok(request
|
||||||
|
.try_map(|row: SqliteRow| {
|
||||||
|
let l = row.try_get::<String, _>(0)?;
|
||||||
|
let r = row.try_get::<i64, _>(1)?;
|
||||||
|
Ok((l, r))
|
||||||
|
})
|
||||||
|
.fetch_all(executor)
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
// Used by move_note to identify the single note to note relationship
|
||||||
|
// by the original parent and child pair. Used mostly to find the
|
||||||
|
// position for recalculation, to create a new gap or close an old
|
||||||
|
// one.
|
||||||
|
pub(crate) async fn get_note_to_note_relationship<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
parent_id: ParentId,
|
||||||
|
note_id: NoteId,
|
||||||
|
) -> SqlResult<NoteRelationship>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let get_note_to_note_relationship_sql = concat!(
|
||||||
|
"SELECT parent_id, note_id, position, nature ",
|
||||||
|
"FROM note_relationships ",
|
||||||
|
"WHERE parent_id = ? and note_id = ? ",
|
||||||
|
"LIMIT 1"
|
||||||
|
);
|
||||||
|
sqlx::query_as(get_note_to_note_relationship_sql)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn delete_note_to_note_relationship<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
parent_id: ParentId,
|
||||||
|
note_id: NoteId,
|
||||||
|
) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let delete_note_to_note_relationship_sql = concat!(
|
||||||
|
"DELETE FROM note_relationships ",
|
||||||
|
"WHERE parent_id = ? and note_id = ? "
|
||||||
|
);
|
||||||
|
|
||||||
|
let count = sqlx::query(delete_note_to_note_relationship_sql)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.rows_affected();
|
||||||
|
|
||||||
|
match count {
|
||||||
|
1 => Ok(()),
|
||||||
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn delete_note_to_page_relationships<'a, E>(executor: E, note_id: NoteId) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let delete_note_to_page_relationships_sql = "DELETE FROM page_relationships WHERE note_id = ?;";
|
||||||
|
|
||||||
|
let _ = sqlx::query(delete_note_to_page_relationships_sql)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn delete_note<'a, E>(executor: E, note_id: NoteId) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let delete_note_sql = "DELETE FROM notes WHERE note_id = ?";
|
||||||
|
|
||||||
|
let count = sqlx::query(delete_note_sql)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.rows_affected();
|
||||||
|
|
||||||
|
match count {
|
||||||
|
1 => Ok(()),
|
||||||
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn count_existing_note_relationships<'a, E>(executor: E, note_id: NoteId) -> SqlResult<i64>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let count_existing_note_relationships_sql = "SELECT COUNT(*) as count FROM page_relationships WHERE note_id = ?;";
|
||||||
|
|
||||||
|
let count: RowCount = sqlx::query_as(count_existing_note_relationships_sql)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(count.count)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn assert_max_child_position_for_note<'a, E>(executor: E, note_id: ParentId) -> SqlResult<i64>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let assert_max_child_position_for_note_sql =
|
||||||
|
"SELECT MAX(position) AS count FROM note_relationships WHERE parent_id = ?;";
|
||||||
|
|
||||||
|
let count: RowCount = sqlx::query_as(assert_max_child_position_for_note_sql)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.fetch_one(executor)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok(count.count)
|
||||||
|
}
|
||||||
|
|
||||||
|
// After removing a note, recalculate the position of all notes under
|
||||||
|
// the parent note, such that there order is now completely
|
||||||
|
// sequential.
|
||||||
|
pub(crate) async fn close_hole_for_deleted_note<'a, E>(executor: E, parent_id: ParentId, position: i64) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let close_hole_for_deleted_note_sql = concat!(
|
||||||
|
"UPDATE note_relationships ",
|
||||||
|
"SET position = position - 1 ",
|
||||||
|
"WHERE position > ? and parent_id = ?;"
|
||||||
|
);
|
||||||
|
|
||||||
|
sqlx::query(close_hole_for_deleted_note_sql)
|
||||||
|
.bind(&position)
|
||||||
|
.bind(&*parent_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn find_all_page_references_for<'a, E>(
|
||||||
|
executor: E,
|
||||||
|
references: &[String],
|
||||||
|
) -> SqlResult<Vec<PageTitles>>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
if references.is_empty() {
|
||||||
|
return Ok(vec![]);
|
||||||
|
}
|
||||||
|
|
||||||
|
let find_all_references_for_sql = "SELECT id, title FROM pages WHERE title IN (".to_string()
|
||||||
|
+ &["?"].repeat(references.len()).join(",")
|
||||||
|
+ &");".to_string();
|
||||||
|
|
||||||
|
let mut request = sqlx::query_as(&find_all_references_for_sql);
|
||||||
|
for id in references.iter() {
|
||||||
|
request = request.bind(id);
|
||||||
|
}
|
||||||
|
request.fetch_all(executor).await
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) async fn update_note_content<'a, E>(executor: E, note_id: NoteId, content: &str) -> SqlResult<()>
|
||||||
|
where
|
||||||
|
E: Executor<'a, Database = Sqlite>,
|
||||||
|
{
|
||||||
|
let update_note_content_sql = "UPDATE notes SET content = ? WHERE note_id = ?";
|
||||||
|
let count = sqlx::query(update_note_content_sql)
|
||||||
|
.bind(content)
|
||||||
|
.bind(&*note_id)
|
||||||
|
.execute(executor)
|
||||||
|
.await?
|
||||||
|
.rows_affected();
|
||||||
|
|
||||||
|
match count {
|
||||||
|
1 => Ok(()),
|
||||||
|
_ => Err(sqlx::Error::RowNotFound),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn create_unique_root_note() -> NewNote {
|
||||||
|
NewNoteBuilder::default()
|
||||||
|
.uuid(friendly_id::create())
|
||||||
|
.content("".to_string())
|
||||||
|
.notetype("root".to_string())
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub(crate) fn create_new_page_for(title: &str, slug: &str, note_id: NoteId) -> NewPage {
|
||||||
|
NewPageBuilder::default()
|
||||||
|
.slug(slug.to_string())
|
||||||
|
.title(title.to_string())
|
||||||
|
.note_id(*note_id)
|
||||||
|
.build()
|
||||||
|
.unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given the references supplied, and the references found in the datastore,
|
||||||
|
// return a list of the references not found in the datastore.
|
||||||
|
pub(crate) fn diff_references(references: &[String], found_references: &[PageTitles]) -> Vec<String> {
|
||||||
|
let all: HashSet<String> = references.iter().cloned().collect();
|
||||||
|
let found: HashSet<String> = found_references.iter().map(|r| r.title.clone()).collect();
|
||||||
|
all.difference(&found).cloned().collect()
|
||||||
|
}
|
|
@ -1,8 +1,22 @@
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
use derive_builder::Builder;
|
use derive_builder::Builder;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use shrinkwraprs::Shrinkwrap;
|
||||||
use sqlx::{self, FromRow};
|
use sqlx::{self, FromRow};
|
||||||
|
|
||||||
|
#[derive(Shrinkwrap, Copy, Clone)]
|
||||||
|
pub(crate) struct PageId(pub i64);
|
||||||
|
|
||||||
|
#[derive(Shrinkwrap, Copy, Clone)]
|
||||||
|
pub(crate) struct NoteId(pub i64);
|
||||||
|
|
||||||
|
#[derive(Shrinkwrap, Copy, Clone)]
|
||||||
|
pub(crate) struct ParentId(pub i64);
|
||||||
|
|
||||||
|
/// A RawPage is what this layer of the API returns when requesting a
|
||||||
|
/// page. Note that usually what you'll get back in the RawPage and a
|
||||||
|
/// Vec<RawNote>. It's the next level's responsibility to turn that
|
||||||
|
/// into a proper tree.
|
||||||
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
pub struct RawPage {
|
pub struct RawPage {
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
|
@ -15,6 +29,8 @@ pub struct RawPage {
|
||||||
pub deleted_date: Option<DateTime<Utc>>,
|
pub deleted_date: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// A RawNote is what this layer of the API returns
|
||||||
|
/// when requesting a note.
|
||||||
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||||
pub struct RawNote {
|
pub struct RawNote {
|
||||||
pub id: i64,
|
pub id: i64,
|
||||||
|
@ -30,6 +46,7 @@ pub struct RawNote {
|
||||||
pub deleted_date: Option<DateTime<Utc>>,
|
pub deleted_date: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The interface for passing a new page to the store.
|
||||||
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
|
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
|
||||||
pub struct NewPage {
|
pub struct NewPage {
|
||||||
pub slug: String,
|
pub slug: String,
|
||||||
|
@ -45,6 +62,7 @@ pub struct NewPage {
|
||||||
pub deleted_date: Option<DateTime<Utc>>,
|
pub deleted_date: Option<DateTime<Utc>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The interface for passing a new note to the store.
|
||||||
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
|
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
|
||||||
pub struct NewNote {
|
pub struct NewNote {
|
||||||
#[builder(default = r#""".to_string()"#)]
|
#[builder(default = r#""".to_string()"#)]
|
||||||
|
|
Loading…
Reference in New Issue