Not working.
This commit is contained in:
parent
0f5d15ad14
commit
e0c463f9fc
|
@ -13,6 +13,7 @@ readme = "./README.org"
|
|||
[dependencies]
|
||||
friendly_id = "0.3.0"
|
||||
thiserror = "1.0.20"
|
||||
derive_builder = "0.9.0"
|
||||
tokio = { version = "0.2.22", features = ["rt-threaded", "blocking"] }
|
||||
serde = { version = "1.0.116", features = ["derive"] }
|
||||
serde_json = "1.0.56"
|
||||
|
|
|
@ -0,0 +1,190 @@
|
|||
/async fn insert_note<'e, E>(executor: E, id: &str, content: &str, notetype: &str) -> SqlResult<i64>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
lazy_static! {
|
||||
static ref INSERT_ONE_NOTE_SQL: String = include_str!("sql/insert_one_note.sql");
|
||||
}
|
||||
let now = chrono::Utc::now();
|
||||
Ok(sqlx::query(INSERT_ONE_NOTE_SQL)
|
||||
.bind(&id)
|
||||
.bind(&content)
|
||||
.bind(¬etype)
|
||||
.bind(&now)
|
||||
.bind(&now)
|
||||
.bind(&now)
|
||||
.execute(executor)
|
||||
.await?
|
||||
.last_insert_rowid())
|
||||
}
|
||||
|
||||
|
||||
#[derive(Clone, FromRow)]
|
||||
struct JustSlugs {
|
||||
slug: String
|
||||
}
|
||||
|
||||
|
||||
// Given an initial string and an existing collection of slugs,
|
||||
// generate a new slug that does not conflict with the current
|
||||
// collection.
|
||||
async fn generate_slug<'e, E>(executor: E, title: &str) -> SqlResult<String>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
lazy_static! {
|
||||
static ref RE_JUSTNUM: Regex = Regex::new(r"-\d+$").unwrap();
|
||||
}
|
||||
lazy_static! {
|
||||
static ref RE_CAPNUM: Regex = Regex::new(r"-(\d+)$").unwrap();
|
||||
}
|
||||
|
||||
let initial_slug = slugify::slugify(title);
|
||||
let sample_slug = RE_JUSTNUM.replace_all(slug, "");
|
||||
let similar_slugs: Vec<JustSlugs> = sqlx::query("SELECT slug FROM pages WHERE slug LIKE '?%';")
|
||||
.bind(&sample_slug)
|
||||
.execute(executor)
|
||||
.await?;
|
||||
let slug_counters = similar_slugs
|
||||
.iter()
|
||||
.map(|slug| RE_CAPNUM.captures(slug.slug))
|
||||
.filter_map(|cap| cap.get(1).unwrap().parse::<u32>().unwrap())
|
||||
.collect();
|
||||
match slug_counters.len() {
|
||||
0 => Ok(initial_slug),
|
||||
_ => {
|
||||
slug_counters.sort_unstable();
|
||||
return Ok(format!("{}-{}", initial_slug, slug_counters.pop() + 1))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn insert_page<'e, E>(executor: E, page: &RawPage) -> SqlResult<i64>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
let insert_one_page_sql = include_str!("sql/insert_one_page.sql");
|
||||
Ok(sqlx::query(insert_one_page_sql)
|
||||
.bind(&page.id)
|
||||
.bind(&page.title)
|
||||
.bind(&page.note_id)
|
||||
.bind(&page.creation_date)
|
||||
.bind(&page.updated_date)
|
||||
.bind(&page.lastview_date)
|
||||
.execute(&mut tx)
|
||||
.await?
|
||||
.last_insert_rowid())
|
||||
}
|
||||
|
||||
/// Given a title, insert a new page. All dates are today, and the slug is
|
||||
/// generated as above:
|
||||
async fn insert_new_page_for_title<'e, E>(executor: E, title: &str) -> SqlResult<Page> {
|
||||
|
||||
|
||||
|
||||
|
||||
// /// Fetch page by title
|
||||
// ///
|
||||
// /// This is the most common use case, in which a specific title
|
||||
// /// has been requested of the server via POST. The page always
|
||||
// /// exists; if it doesn't, it will be automatically generated.
|
||||
// pub async fn get_page_by_title(&slug, slug: &title) -> NoteResult<(Page, Notes)> {
|
||||
// let mut tx = self.0.begin().await?;
|
||||
// let maybe_page = sqlx::query_as(select_one_page_by_title)
|
||||
// .bind(&title)
|
||||
// .fetch_one(&tx)
|
||||
// .await;
|
||||
// let page = match maybe_page {
|
||||
// Ok(page) => page,
|
||||
// Err(sqlx::Error::NotFound) => insert_new_page_for_title(tx, title),
|
||||
// Err(a) => return Err(a)
|
||||
// };
|
||||
// let notes = sqlx::query_as(select_note_collection_for_root)
|
||||
// .bind(page.note_id)
|
||||
// .fetch(&tx)
|
||||
// .await?;
|
||||
// tx.commit().await?;
|
||||
// Ok((page, notes))
|
||||
// }
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
//
|
||||
// /// This will erase all the data in the database. Only use this
|
||||
// /// if you're sure that's what you want.
|
||||
// pub async fn reset_database(&self) -> NoteResult<()> {
|
||||
// let initialize_sql = include_str!("sql/initialize_database.sql");
|
||||
// sqlx::query(initialize_sql).execute(&*self.0).await?;
|
||||
// Ok(())
|
||||
// }
|
||||
//
|
||||
// async fn create_new_page(&self, title: &str) -> SqlResult<Page, Vec<Notes>> {
|
||||
// let now = chrono::Utc::now();
|
||||
// let new_note_id = friendly_id::create();
|
||||
//
|
||||
// let mut tx = self.0.begin().await?;
|
||||
// let new_slug = generate_slug(&mut tx, title);
|
||||
// let note_id = insert_note(&mut tx, &new_note_id, &"", &"page").await?;
|
||||
// insert_page(&mut tx, NewPage {
|
||||
// slug,
|
||||
// title,
|
||||
// note_id,
|
||||
// creation_date: now,
|
||||
// updated_date: now,
|
||||
// lastview_date: now
|
||||
// }).await;
|
||||
// tx.commit();
|
||||
// self.fetch_one_page(title)
|
||||
// }
|
||||
//
|
||||
// async fn fetch_one_page(&self, title: &str) ->
|
||||
//
|
||||
// pub async fn fetch_page(&self, title: &str) -> SqlResult<(Page, Vec<Notes>)> {
|
||||
// match self.fetch_one_page(title) {
|
||||
// Ok((page, notes)) => Ok((page, notes)),
|
||||
// Err(NotFound) => self.create_new_page(title),
|
||||
// Err(e) => Err(e)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// pub async fn fetch_raw_page(&self, id: &str) -> SqlResult<RawPage> {
|
||||
// let select_one_page_sql = include_str!("sql/select_one_page.sql");
|
||||
// sqlx::query_as(select_one_page_sql).bind(&id).fetch_one(&*self.0).await
|
||||
// }
|
||||
//
|
||||
// pub async fn fetch_raw_note(&self, id: &str) -> SqlResult<RawNote> {
|
||||
// let select_one_note_sql = include_str!("sql/select_one_note.sql");
|
||||
// sqlx::query_as(select_one_note_sql).bind(&id).fetch_one(&*self.0).await
|
||||
// }
|
||||
//
|
||||
// pub async fn insert_note(&self, id: &str, content: &str, notetype: &str) -> SqlResult<i64> {
|
||||
// insert_note(&*self.0, id, content, notetype).await
|
||||
// }
|
||||
//
|
||||
// pub async fn update_raw_note(&self, id: &str, content: &str) -> NoteResult<()> {
|
||||
// let update_one_note_sql = include_str!("sql/update_one_note.sql");
|
||||
// let now = chrono::Utc::now();
|
||||
// let rows_updated = sqlx::query(update_one_note_sql)
|
||||
// .bind(&content)
|
||||
// .bind(&now)
|
||||
// .bind(&now)
|
||||
// .bind(&id)
|
||||
// .execute(&*self.0).await?
|
||||
// .rows_affected();
|
||||
// match rows_updated {
|
||||
// 1 => Ok(()),
|
||||
// _ => Err(NoteStoreError::NotFound)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// // TODO: We're returning the raw page with the raw note id, note
|
||||
// // the friendly ID. Is there a disconnect there? It's making me
|
||||
// // furiously to think.
|
||||
//
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
mod errors;
|
||||
mod row_structs;
|
||||
mod store;
|
||||
mod structs;
|
||||
|
||||
|
@ -19,81 +21,42 @@ mod tests {
|
|||
storagepool
|
||||
}
|
||||
|
||||
// Request for the page by slug.
|
||||
// If the page exists, return it. If the page doesn't, return NotFound
|
||||
|
||||
#[tokio::test(threaded_scheduler)]
|
||||
async fn fetching_unfound_page_works() {
|
||||
async fn fetching_unfound_page_by_slug_works() {
|
||||
let storagepool = fresh_inmemory_database().await;
|
||||
let unfoundpage = storagepool.fetch_raw_page("nonexistent-page").await;
|
||||
let unfoundpage = storagepool.get_page_by_slug("nonexistent-page").await;
|
||||
assert!(unfoundpage.is_err());
|
||||
}
|
||||
|
||||
#[tokio::test(threaded_scheduler)]
|
||||
async fn fetching_unfound_note_works() {
|
||||
let storagepool = fresh_inmemory_database().await;
|
||||
let unfoundnote = storagepool.fetch_raw_note("nonexistent-note").await;
|
||||
assert!(unfoundnote.is_err());
|
||||
}
|
||||
// Request for the page by title. If the page exists, return it.
|
||||
// If the page doesn't exist, create it then return it anyway.
|
||||
// There should be at least one note, the root note.
|
||||
|
||||
#[tokio::test(threaded_scheduler)]
|
||||
async fn cloning_storagepool_is_ok() {
|
||||
async fn fetching_unfound_page_by_title_works() {
|
||||
let title = "Nonexistent Page";
|
||||
let now = chrono::Utc::now();
|
||||
let storagepool = fresh_inmemory_database().await;
|
||||
let storagepool2 = storagepool.clone();
|
||||
let unfoundnote = storagepool2.fetch_raw_note("nonexistent-note").await;
|
||||
assert!(unfoundnote.is_err());
|
||||
let unfoundnote = storagepool.fetch_raw_note("nonexistent-note").await;
|
||||
assert!(unfoundnote.is_err());
|
||||
let newpageresult = storagepool.get_page_by_title(&title).await;
|
||||
|
||||
assert!(newpageresult.is_ok(), "{:?}", newpage);
|
||||
let (newpage, newnotes) = newpageresult.unwrap();
|
||||
|
||||
assert_eq!(newpage.title, title, "{:?}", newpage.title);
|
||||
assert_eq!(newpage.slug, "nonexistent-page");
|
||||
}
|
||||
|
||||
#[tokio::test(threaded_scheduler)]
|
||||
async fn can_save_a_note() {
|
||||
let storagepool = fresh_inmemory_database().await;
|
||||
let note_id = storagepool.insert_note("noteid", "notecontent", "note").await;
|
||||
assert!(note_id.is_ok(), "{:?}", note_id);
|
||||
let note_id = note_id.unwrap();
|
||||
assert!(note_id > 0);
|
||||
|
||||
let foundnote = storagepool.fetch_raw_note("noteid").await;
|
||||
assert!(foundnote.is_ok(), "{:?}", foundnote);
|
||||
let foundnote = foundnote.unwrap();
|
||||
assert_eq!(foundnote.content, "notecontent");
|
||||
assert_eq!(foundnote.notetype, "note");
|
||||
}
|
||||
|
||||
#[tokio::test(threaded_scheduler)]
|
||||
async fn can_save_a_page() {
|
||||
let storagepool = fresh_inmemory_database().await;
|
||||
let page_id = storagepool.insert_page("pageid", "Test page").await;
|
||||
assert!(page_id.is_ok(), "{:?}", page_id);
|
||||
|
||||
let page = storagepool.fetch_raw_page("pageid").await;
|
||||
assert!(page.is_ok(), "{:?}", page);
|
||||
let page = page.unwrap();
|
||||
assert_eq!(page.title, "Test page");
|
||||
assert!(page.note_id > 0);
|
||||
}
|
||||
|
||||
#[tokio::test(threaded_scheduler)]
|
||||
async fn reports_note_update_failure() {
|
||||
let storagepool = fresh_inmemory_database().await;
|
||||
let note_id = storagepool.insert_note("noteid", "notecontent", "note").await;
|
||||
assert!(note_id.is_ok(), "{:?}", note_id);
|
||||
|
||||
let update = storagepool.update_raw_note("badnote", "Bad Note Content").await;
|
||||
assert!(update.is_err());
|
||||
}
|
||||
|
||||
|
||||
#[tokio::test(threaded_scheduler)]
|
||||
async fn can_update_a_note() {
|
||||
let storagepool = fresh_inmemory_database().await;
|
||||
let note_id = storagepool.insert_note("noteid", "notecontent", "note").await;
|
||||
assert!(note_id.is_ok(), "{:?}", note_id);
|
||||
|
||||
let update = storagepool.update_raw_note("noteid", "Good Note Content").await;
|
||||
assert!(update.is_ok(), "{:?}", update);
|
||||
|
||||
let note = storagepool.fetch_raw_note("noteid").await;
|
||||
assert!(note.is_ok(), "{:?}", note);
|
||||
let note = note.unwrap();
|
||||
assert_eq!(note.content, "Good Note Content");
|
||||
}
|
||||
// // TODO: This should be 1, not 0
|
||||
// assert_eq!(newnotes.len(), 0);
|
||||
// // assert_eq!(newnotes[0].notetype, "root");
|
||||
// // assert_eq!(newpage.note_id, newnotes[0].id);
|
||||
//
|
||||
// assert!((newpage.creation_date - now).num_minutes() < 1.0);
|
||||
// assert!((newpage.updated_date - now).num_minutes() < 1.0);
|
||||
// assert!((newpage.lastview_date - now).num_minutes() < 1.0);
|
||||
// assert!(newpage.deleted_date.is_none());
|
||||
// }
|
||||
}
|
||||
|
|
|
@ -0,0 +1,72 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use derive_builder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use sqlx::{self, FromRow};
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||
pub struct RawPage {
|
||||
pub id: i64,
|
||||
pub slug: String,
|
||||
pub title: String,
|
||||
pub note_id: i64,
|
||||
pub creation_date: DateTime<Utc>,
|
||||
pub updated_date: DateTime<Utc>,
|
||||
pub lastview_date: DateTime<Utc>,
|
||||
pub deleted_date: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||
pub struct RawNote {
|
||||
pub id: i64,
|
||||
pub uuid: String,
|
||||
pub content: String,
|
||||
pub notetype: String,
|
||||
pub creation_date: DateTime<Utc>,
|
||||
pub updated_date: DateTime<Utc>,
|
||||
pub lastview_date: DateTime<Utc>,
|
||||
pub deleted_date: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
|
||||
pub struct NewPage {
|
||||
pub slug: String,
|
||||
pub title: String,
|
||||
pub note_id: i64,
|
||||
#[builder(default = "chrono::Utc::now()")]
|
||||
pub creation_date: DateTime<Utc>,
|
||||
#[builder(default = "chrono::Utc::now()")]
|
||||
pub updated_date: DateTime<Utc>,
|
||||
#[builder(default = "chrono::Utc::now()")]
|
||||
pub lastview_date: DateTime<Utc>,
|
||||
pub deleted_date: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, Builder)]
|
||||
pub struct NewNote {
|
||||
pub uuid: String,
|
||||
pub content: String,
|
||||
#[builder(default = "note")]
|
||||
pub notetype: String,
|
||||
#[builder(default = "chrono::Utc::now()")]
|
||||
pub creation_date: DateTime<Utc>,
|
||||
#[builder(default = "chrono::Utc::now()")]
|
||||
pub updated_date: DateTime<Utc>,
|
||||
#[builder(default = "chrono::Utc::now()")]
|
||||
pub lastview_date: DateTime<Utc>,
|
||||
pub deleted_date: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn can_build_new_page() {
|
||||
let now = chrono::Utc::now();
|
||||
let newnote = NewNoteBuilder::default();
|
||||
assert!((newnote.creation_date - now).num_minutes() < 1.0);
|
||||
assert!((newnote.updated_date - now).num_minutes() < 1.0);
|
||||
assert!((newnote.lastview_date - now).num_minutes() < 1.0);
|
||||
assert!(newnote.deleted_date.is_none());
|
||||
}
|
||||
}
|
|
@ -0,0 +1,38 @@
|
|||
SELECT parent_uuid, uuid, content, notetype, nature, position FROM (
|
||||
|
||||
WITH RECURSIVE children(
|
||||
parent_id,
|
||||
parent_uuid, id,
|
||||
uuid,
|
||||
content,
|
||||
notetype,
|
||||
creation_date,
|
||||
updated_date,
|
||||
lastview_date,
|
||||
deleted_date,
|
||||
cycle
|
||||
) AS (
|
||||
|
||||
SELECT
|
||||
notes.id,
|
||||
notes.uuid,
|
||||
notes.id,
|
||||
notes.uuid,
|
||||
notes.content,
|
||||
notes.notetype, 'page', 0, ','||notes.id||','
|
||||
FROM notes INNER JOIN pages
|
||||
ON pages.note_id = notes.id
|
||||
WHERE pages.id = ?
|
||||
AND notes.notetype="page"
|
||||
|
||||
UNION
|
||||
SELECT note_relationships.parent_id, notes.id,
|
||||
notes.content, notes.notetype, note_relationships.nature,
|
||||
note_relationships.position,
|
||||
children.cycle||notes.id||','
|
||||
FROM notes
|
||||
INNER JOIN note_relationships ON notes.id = note_relationships.note_id
|
||||
INNER JOIN children ON note_relationships.parent_id = children.id
|
||||
WHERE children.cycle NOT LIKE '%,'||notes.id||',%'
|
||||
ORDER BY note_relationships.position)
|
||||
SELECT * from children);</code>
|
|
@ -1,9 +1,12 @@
|
|||
use crate::errors::NoteStoreError;
|
||||
use crate::structs::{RawNote, RawPage};
|
||||
use crate::row_structs::{RawNote, RawPage};
|
||||
use chrono;
|
||||
use friendly_id;
|
||||
use sqlx;
|
||||
use sqlx::{sqlite::{Sqlite, SqlitePool}, Executor, Done};
|
||||
use sqlx::{
|
||||
sqlite::{Sqlite, SqlitePool},
|
||||
Done, Executor,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
/// A handle to our Sqlite database.
|
||||
|
@ -13,92 +16,199 @@ pub struct NoteStore(Arc<SqlitePool>);
|
|||
type NoteResult<T> = core::result::Result<T, NoteStoreError>;
|
||||
type SqlResult<T> = sqlx::Result<T>;
|
||||
|
||||
async fn insert_note<'e, E>(executor: E, id: &str, content: &str, notetype: &str) -> SqlResult<i64>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
let insert_one_note_sql = include_str!("sql/insert_one_note.sql");
|
||||
let now = chrono::Utc::now();
|
||||
Ok(sqlx::query(insert_one_note_sql)
|
||||
.bind(&id)
|
||||
.bind(&content)
|
||||
.bind(¬etype)
|
||||
.bind(&now)
|
||||
.bind(&now)
|
||||
.bind(&now)
|
||||
.execute(executor)
|
||||
.await?
|
||||
.last_insert_rowid())
|
||||
}
|
||||
|
||||
impl NoteStore {
|
||||
pub async fn new(url: &str) -> NoteResult<Self> {
|
||||
let pool = SqlitePool::connect(url).await?;
|
||||
Ok(NoteStore(Arc::new(pool)))
|
||||
}
|
||||
|
||||
/// This will erase all the data in the database. Only use this
|
||||
/// if you're sure that's what you want.
|
||||
// Erase all the data in the database and restore it
|
||||
// to its original empty form. Do not use unless you
|
||||
// really, really want that to happen.
|
||||
pub async fn reset_database(&self) -> NoteResult<()> {
|
||||
let initialize_sql = include_str!("sql/initialize_database.sql");
|
||||
sqlx::query(initialize_sql).execute(&*self.0).await?;
|
||||
Ok(())
|
||||
reset_databate(&*self.0).await
|
||||
}
|
||||
|
||||
pub async fn fetch_raw_page(&self, id: &str) -> SqlResult<RawPage> {
|
||||
let select_one_page_sql = include_str!("sql/select_one_page.sql");
|
||||
sqlx::query_as(select_one_page_sql).bind(&id).fetch_one(&*self.0).await
|
||||
}
|
||||
|
||||
pub async fn fetch_raw_note(&self, id: &str) -> SqlResult<RawNote> {
|
||||
let select_one_note_sql = include_str!("sql/select_one_note.sql");
|
||||
sqlx::query_as(select_one_note_sql).bind(&id).fetch_one(&*self.0).await
|
||||
}
|
||||
|
||||
pub async fn insert_note(&self, id: &str, content: &str, notetype: &str) -> SqlResult<i64> {
|
||||
insert_note(&*self.0, id, content, notetype).await
|
||||
}
|
||||
|
||||
pub async fn update_raw_note(&self, id: &str, content: &str) -> NoteResult<()> {
|
||||
let update_one_note_sql = include_str!("sql/update_one_note.sql");
|
||||
let now = chrono::Utc::now();
|
||||
let rows_updated = sqlx::query(update_one_note_sql)
|
||||
.bind(&content)
|
||||
.bind(&now)
|
||||
.bind(&now)
|
||||
.bind(&id)
|
||||
.execute(&*self.0).await?
|
||||
.rows_affected();
|
||||
match rows_updated {
|
||||
1 => Ok(()),
|
||||
_ => Err(NoteStoreError::NotFound)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: We're returning the raw page with the raw note id, note
|
||||
// the friendly ID. Is there a disconnect there? It's making me
|
||||
// furiously to think.
|
||||
pub async fn insert_page(&self, id: &str, title: &str) -> SqlResult<i64> {
|
||||
let insert_one_page_sql = include_str!("sql/insert_one_page.sql");
|
||||
let new_note_id = friendly_id::create();
|
||||
let now = chrono::Utc::now();
|
||||
|
||||
/// Fetch page by slug
|
||||
///
|
||||
/// Supports the use case of the user navigating to a known place
|
||||
/// via a bookmark or other URL. Since the title isn't clear from
|
||||
/// the slug, the slug is insufficient to generate a new page, so
|
||||
/// this use case says that in the event of a failure to find the
|
||||
/// requested page, return a basic NotFound.
|
||||
pub async fn get_page_by_slug(&self, slug: &str) -> NoteResult<(RawPage, Vec<RawNote>)> {
|
||||
// let select_note_collection_for_root = include_str!("sql/select_note_collection_for_root.sql");
|
||||
let mut tx = self.0.begin().await?;
|
||||
|
||||
let note_id = insert_note(&mut tx, &new_note_id, &"", &"page").await?;
|
||||
|
||||
let page_id = sqlx::query(insert_one_page_sql)
|
||||
.bind(&id)
|
||||
.bind(&title)
|
||||
.bind(¬e_id)
|
||||
.bind(&now)
|
||||
.bind(&now)
|
||||
.bind(&now)
|
||||
.execute(&mut tx)
|
||||
.await?
|
||||
.last_insert_rowid();
|
||||
|
||||
// let notes = sqlx::query_as(select_note_collection_for_root)
|
||||
// .bind(page.note_id)
|
||||
// .fetch(&tx)
|
||||
// .await?;
|
||||
tx.commit().await?;
|
||||
Ok(page_id)
|
||||
Ok((page, vec![]))
|
||||
}
|
||||
|
||||
pub async fn get_page_by_title(&self, title: &str) -> NoteResult<(RawPage, Vec<RawNote>)> {
|
||||
let mut tx = self.0.begin().await?;
|
||||
let page = match select_page_by_title(&mut tx, title) {
|
||||
Ok(page) => page,
|
||||
Err(sqlx::Error::NotFound) => {
|
||||
match create_page_for_title(&mut tx, title) {
|
||||
Ok(page) => page,
|
||||
Err(e) => return Err(e)
|
||||
}
|
||||
},
|
||||
Err(e) => return Err(e),
|
||||
};
|
||||
// Todo: Replace vec with the results of the CTE
|
||||
return Ok((page, vec![]))
|
||||
}
|
||||
}
|
||||
|
||||
// ___ _ _
|
||||
// | _ \_ _(_)_ ____ _| |_ ___
|
||||
// | _/ '_| \ V / _` | _/ -_)
|
||||
// |_| |_| |_|\_/\__,_|\__\___|
|
||||
//
|
||||
|
||||
// I'm putting a lot of faith in Rust's ability to inline stuff. I'm
|
||||
// sure this is okay. But really, this lets the API be clean and
|
||||
// coherent and easily readable, and hides away the gnarliness of some
|
||||
// of the SQL queries.
|
||||
|
||||
async fn select_page_by_slug<'e, E>(executor: E, slug: &str) -> SqlResult<RawPage>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
let select_one_page_by_title_sql = concat!(
|
||||
"SELECT id, title, slug, note_id, creation_date, updated_date, ",
|
||||
"lastview_date, deleted_date FROM pages WHERE slug=?;"
|
||||
);
|
||||
sqlx::query_as(select_one_page_by_slug_sql)
|
||||
.bind(&slug)
|
||||
.fetch_one(&mut executor)
|
||||
.await?
|
||||
}
|
||||
|
||||
async fn select_page_by_title<'e, E>(executor: E, title: &str) -> SqlResult<RawPage>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
let select_one_page_by_title_sql = concat!(
|
||||
"SELECT id, title, slug, note_id, creation_date, updated_date, ",
|
||||
"lastview_date, deleted_date FROM pages WHERE title=?;"
|
||||
);
|
||||
sqlx::query_as(select_one_page_by_title_sql)
|
||||
.bind(&title)
|
||||
.fetch_one(&mut executor)
|
||||
.await?
|
||||
}
|
||||
|
||||
async fn reset_database<'e, E>(executor: E) -> SqlResult<()>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
let initialize_sql = include_str!("sql/initialize_database.sql");
|
||||
sqlx::query(initialize_sql).execute(&*self.0).await?
|
||||
}
|
||||
|
||||
async fn get_note_collection_for_root<'e, E>(executor: E, root: i64) -> SqlResult<Vec<RawNotes>>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
let select_note_collection_for_root = include_str!("sql/select_note_collection_for_root.sql");
|
||||
sqlx::query_as(select_note_collection_for_root)
|
||||
.fetch(&*self.0)
|
||||
.await?
|
||||
}
|
||||
|
||||
async fn insert_one_new_note<'e, E>(executor: E, note: &NewNote) -> SqlResult<i64> where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
let insert_one_note_sql = concat!(
|
||||
"INSERT INTO notes ( ",
|
||||
" uuid, ",
|
||||
" content, ",
|
||||
" notetype, ",
|
||||
" creation_date, ",
|
||||
" updated_date, ",
|
||||
" lastview_date) ",
|
||||
"VALUES (?, ?, ?, ?, ?, ?);");
|
||||
|
||||
Ok(sqlx::query(insert_one_note_sql)
|
||||
.bind(¬e.uuid)
|
||||
.bind(¬e.content)
|
||||
.bind(¬e.note_type)
|
||||
.bind(¬e.creation_date)
|
||||
.bind(¬e.updated_date)
|
||||
.bind(¬e.lastview_date)
|
||||
.execute(&mut tx)
|
||||
.await?
|
||||
.last_insert_rowid())
|
||||
}
|
||||
|
||||
// Given an initial string and an existing collection of slugs,
|
||||
// generate a new slug that does not conflict with the current
|
||||
// collection.
|
||||
async fn generate_slug<'e, E>(executor: E, title: &str) -> SqlResult<String>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
lazy_static! {
|
||||
static ref RE_STRIP_NUM: Regex = Regex::new(r"-\d+$").unwrap();
|
||||
}
|
||||
lazy_static! {
|
||||
static ref RE_CAP_NUM: Regex = Regex::new(r"-(\d+)$").unwrap();
|
||||
}
|
||||
|
||||
let initial_slug = slugify::slugify(title);
|
||||
let sample_slug = RE_STRIP_NUM.replace_all(slug, "");
|
||||
let similar_slugs: Vec<JustSlugs> = sqlx::query("SELECT slug FROM pages WHERE slug LIKE '?%';")
|
||||
.bind(&sample_slug)
|
||||
.execute(executor)
|
||||
.await?;
|
||||
let slug_counters = similar_slugs
|
||||
.iter()
|
||||
.map(|slug| RE_CAPNUM.captures(slug.slug))
|
||||
.filter_map(|cap| cap.get(1).unwrap().parse::<u32>().unwrap())
|
||||
.collect();
|
||||
match slug_counters.len() {
|
||||
0 => Ok(initial_slug),
|
||||
_ => {
|
||||
slug_counters.sort_unstable();
|
||||
return Ok(format!("{}-{}", initial_slug, slug_counters.pop() + 1))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn insert_one_new_page<'e, E>(executor: E, page: &NewPage) -> SqlResult<i64>
|
||||
where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
let insert_one_page_sql = concat!(
|
||||
"INSERT INTO pages ( ",
|
||||
" slug, ",
|
||||
" title, ",
|
||||
" note_id, ",
|
||||
" creation_date, ",
|
||||
" updated_date, ",
|
||||
" lastview_date) ",
|
||||
"VALUES (?, ?, ?, ?, ?, ?);");
|
||||
|
||||
Ok(sqlx::query(insert_one_page_sql)
|
||||
.bind(&page.slug)
|
||||
.bind(&page.title)
|
||||
.bind(&page.note_id)
|
||||
.bind(&page.creation_date)
|
||||
.bind(&page.updated_date)
|
||||
.bind(&page.lastview_date)
|
||||
.execute(&mut tx)
|
||||
.await?
|
||||
.last_insert_rowid())
|
||||
}
|
||||
|
||||
|
||||
async fn create_page_for_title<'e, E>(executor: E, title: &str) -> SqlResult<RawPage> where
|
||||
E: 'e + Executor<'e, Database = Sqlite>,
|
||||
{
|
||||
todo!()
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize};
|
|||
use sqlx::{self, FromRow};
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||
pub struct RawPage {
|
||||
pub(crate) struct RawPage {
|
||||
pub id: i64,
|
||||
pub slug: String,
|
||||
pub title: String,
|
||||
|
@ -15,7 +15,7 @@ pub struct RawPage {
|
|||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, FromRow)]
|
||||
pub struct RawNote {
|
||||
pub(crate) struct RawNote {
|
||||
pub id: i64,
|
||||
pub uuid: String,
|
||||
pub content: String,
|
||||
|
@ -25,3 +25,59 @@ pub struct RawNote {
|
|||
pub lastview_date: DateTime<Utc>,
|
||||
pub deleted_date: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
||||
// // A Resource is either content or a URL to content that the
|
||||
// // user embeds in a note. TODO: I have no idea how to do this yet,
|
||||
// // but I'll figure it out.
|
||||
// #[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
// pub struct Resource {
|
||||
// pub id: String,
|
||||
// pub content: String,
|
||||
// }
|
||||
//
|
||||
// // A Breadcrumb is a component of a reference. Every element should
|
||||
// // be clickable, although in practice what's going to happen is that
|
||||
// // the user will be sent to the *page* with that note, then *scrolled*
|
||||
// // to that note via anchor.
|
||||
// #[derive(Clone, Debug)]
|
||||
// pub struct Breadcrumb {
|
||||
// pub note_id: String,
|
||||
// pub summary: String,
|
||||
// }
|
||||
//
|
||||
// // A Note is the heart of our system. It is a single object that has
|
||||
// // a place in our system; it has a parent, but it also has embedded
|
||||
// // references that allow it to navigate through a web of related
|
||||
// // objects. It may have children. *AT THIS LAYER*, though, it is
|
||||
// // returned as an array. It is up to the
|
||||
// #[derive(Clone, Debug)]
|
||||
// pub struct Note {
|
||||
// pub id: String,
|
||||
// pub parent_id: String,
|
||||
// pub content: String,
|
||||
// pub resources: Vec<Resource>,
|
||||
// pub note_type: String, // Describes the relationship to the parent note.
|
||||
// pub created: DateTime<Utc>,
|
||||
// pub updated: DateTime<Utc>,
|
||||
// pub viewed: DateTime<Utc>,
|
||||
// pub deleted: Option<DateTime<Utc>>,
|
||||
// }
|
||||
//
|
||||
// pub struct Reference {
|
||||
// pub page_id: String,
|
||||
// pub page_title: String,
|
||||
// pub reference_summary_titles: Vec<Breadcrumbs>,
|
||||
// pub reference_summary: String,
|
||||
// }
|
||||
|
||||
pub struct Page {
|
||||
pub slug: String,
|
||||
pub title: String,
|
||||
// pub notes: Vec<Notes>, // The actual notes on this page.
|
||||
// pub references: Vec<Reference>, // All other notes that reference this page.
|
||||
// pub unlinked_references: Vec<Reference>,
|
||||
pub created: DateTime<Utc>,
|
||||
pub updated: DateTime<Utc>,
|
||||
pub viewed: DateTime<Utc>,
|
||||
pub deleted: Option<DateTime<Utc>>,
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue