From 70144145ee627f521ee7a9d5020dce2cb2b96373 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 18:09:29 -0700 Subject: database_entity_crud -> api_entity_crud --- rust/src/api_entity_crud.rs | 1145 ++++++++++++++++++++++++++++++++++++++ rust/src/api_helpers.rs | 2 +- rust/src/api_server.rs | 2 +- rust/src/database_entity_crud.rs | 1145 -------------------------------------- rust/src/lib.rs | 2 +- 5 files changed, 1148 insertions(+), 1148 deletions(-) create mode 100644 rust/src/api_entity_crud.rs delete mode 100644 rust/src/database_entity_crud.rs (limited to 'rust/src') diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs new file mode 100644 index 00000000..01323637 --- /dev/null +++ b/rust/src/api_entity_crud.rs @@ -0,0 +1,1145 @@ +use api_helpers::*; +use chrono; +use database_models::*; +use database_schema::*; +use diesel::prelude::*; +use diesel::{self, insert_into}; +use errors::*; +use fatcat_api::models::*; +use serde_json; +use sha1::Sha1; +use std::marker::Sized; +use std::str::FromStr; +use uuid::Uuid; + +pub struct EditContext { + pub editor_id: FatCatId, + pub editgroup_id: FatCatId, + pub extra_json: Option, + pub autoaccept: bool, +} + +/* One goal here is to abstract the non-entity-specific bits into generic traits or functions, + * instead of macros. + * + * Notably: + * + * db_get + * db_get_rev + * db_create + * db_create_batch + * db_update + * db_delete + * db_get_history + * + * For now, these will probably be macros, until we can level up our trait/generics foo. + */ + +// Associated Type, not parametric +pub trait EntityCrud +where + Self: Sized, +{ + // TODO: could EditRow and IdentRow be generic structs? Or do they need to be bound to a + // specific table? + type EditRow; // EntityEditRow + type EditNewRow; + type IdentRow; // EntityIdentRow + type IdentNewRow; + type RevRow; + + fn parse_editgroup_id(&self) -> Result>; + + // Generic Methods + fn db_get( + conn: &DbConn, + ident: FatCatId + ) -> Result; + fn db_get_rev( + conn: &DbConn, + rev_id: Uuid + ) -> Result; + fn db_create( + &self, + conn: &DbConn, + edit_context: &EditContext + ) -> Result; + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result>; + fn db_update( + &self, + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result; + fn db_delete( + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result; + fn db_get_history( + conn: &DbConn, + ident: FatCatId, + limit: Option, + ) -> Result>; + fn db_accept_edits( + conn: &DbConn, + editgroup_id: FatCatId + ) -> Result; + + // Entity-specific Methods + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result; + fn db_insert_rev( + &self, + conn: &DbConn + ) -> Result; + fn db_insert_revs( + conn: &DbConn, + models: &[&Self] + ) -> Result>; +} + +// TODO: this could be a separate trait on all entities +macro_rules! generic_parse_editgroup_id{ + () => { + fn parse_editgroup_id(&self) -> Result> { + match &self.editgroup_id { + Some(s) => Ok(Some(FatCatId::from_str(&s)?)), + None => Ok(None), + } + } + } +} + +macro_rules! generic_db_get { + ($ident_table:ident, $rev_table:ident) => { + fn db_get(conn: &DbConn, ident: FatCatId) -> Result { + let (ident, rev): (Self::IdentRow, Self::RevRow) = $ident_table::table + .find(ident.to_uuid()) + .inner_join($rev_table::table) + .first(conn)?; + + Self::db_from_row(conn, rev, Some(ident)) + } + }; +} + +macro_rules! generic_db_get_rev { + ($rev_table:ident) => { + fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result { + let rev = $rev_table::table.find(rev_id).first(conn)?; + + Self::db_from_row(conn, rev, None) + } + }; +} + +macro_rules! generic_db_create { + // TODO: this path should call generic_db_create_batch + ($ident_table: ident, $edit_table: ident) => { + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { + let rev_id = self.db_insert_rev(conn)?; + let ident: Uuid = insert_into($ident_table::table) + .values($ident_table::rev_id.eq(&rev_id)) + .returning($ident_table::id) + .get_result(conn)?; + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::ident_id.eq(&ident), + )) + .get_result(conn)?; + Ok(edit) + } + } +} + +macro_rules! generic_db_create_batch { + ($ident_table:ident, $edit_table:ident) => { + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result> { + let rev_ids: Vec = Self::db_insert_revs(conn, models)?; + let ident_ids: Vec = insert_into($ident_table::table) + .values( + rev_ids + .iter() + .map(|rev_id| Self::IdentNewRow { + rev_id: Some(rev_id.clone()), + is_live: edit_context.autoaccept, + redirect_id: None, + }) + .collect::>(), + ) + .returning($ident_table::id) + .get_results(conn)?; + let edits: Vec = insert_into($edit_table::table) + .values( + rev_ids + .into_iter() + .zip(ident_ids.into_iter()) + .map(|(rev_id, ident_id)| Self::EditNewRow { + editgroup_id: edit_context.editgroup_id.to_uuid(), + rev_id: Some(rev_id), + ident_id: ident_id, + redirect_id: None, + prev_rev: None, + extra_json: edit_context.extra_json.clone(), + }) + .collect::>(), + ) + .get_results(conn)?; + Ok(edits) + } + }; +} + +macro_rules! generic_db_update { + ($ident_table: ident, $edit_table: ident) => { + fn db_update(&self, conn: &DbConn, edit_context: &EditContext, ident: FatCatId) -> Result { + let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + if current.is_live != true { + // TODO: what if isn't live? 4xx not 5xx + bail!("can't delete an entity that doesn't exist yet"); + } + if current.rev_id.is_none() { + // TODO: what if it's already deleted? 4xx not 5xx + bail!("entity was already deleted"); + } + + let rev_id = self.db_insert_rev(conn)?; + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(&ident.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::prev_rev.eq(current.rev_id.unwrap()), + $edit_table::extra_json.eq(&self.extra), + )) + .get_result(conn)?; + + Ok(edit) + } + } +} + +macro_rules! generic_db_delete { + ($ident_table:ident, $edit_table:ident) => { + fn db_delete( + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result { + let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + if current.is_live != true { + // TODO: what if isn't live? 4xx not 5xx + bail!("can't delete an entity that doesn't exist yet"); + } + if current.rev_id.is_none() { + // TODO: what if it's already deleted? 4xx not 5xx + bail!("entity was already deleted"); + } + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(ident.to_uuid()), + $edit_table::rev_id.eq(None::), + $edit_table::redirect_id.eq(None::), + $edit_table::prev_rev.eq(current.rev_id), + $edit_table::extra_json.eq(&edit_context.extra_json), + )) + .get_result(conn)?; + + Ok(edit) + } + }; +} + +macro_rules! generic_db_get_history { + ($edit_table:ident) => { + fn db_get_history( + conn: &DbConn, + ident: FatCatId, + limit: Option, + ) -> Result> { + let limit = limit.unwrap_or(50); // TODO: make a static + + let rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table + .inner_join(changelog::table) + .inner_join($edit_table::table) + .filter($edit_table::ident_id.eq(ident.to_uuid())) + .order(changelog::id.desc()) + .limit(limit) + .get_results(conn)?; + + let history: Result> = rows.into_iter() + .map(|(eg_row, cl_row, e_row)| { + Ok(EntityHistoryEntry { + edit: e_row.into_model()?, + editgroup: eg_row.into_model_partial(), + changelog_entry: cl_row.into_model(), + }) + }) + .collect(); + history + } + }; +} + +/* +// This would be the clean and efficient way, but see: +// https://github.com/diesel-rs/diesel/issues/1478 +// + diesel::update(container_ident::table) + .inner_join(container_edit::table.on( + container_ident::id.eq(container_edit::ident_id) + )) + .filter(container_edit::editgroup_id.eq(editgroup_id)) + .values(( + container_ident::is_live.eq(true), + container_ident::rev_id.eq(container_edit::rev_id), + container_ident::redirect_id.eq(container_edit::redirect_id), + )) + .execute()?; + +// Was previously: + + for entity in &["container", "creator", "file", "work", "release"] { + diesel::sql_query(format!( + " + UPDATE {entity}_ident + SET + is_live = true, + rev_id = {entity}_edit.rev_id, + redirect_id = {entity}_edit.redirect_id + FROM {entity}_edit + WHERE + {entity}_ident.id = {entity}_edit.ident_id + AND {entity}_edit.editgroup_id = $1", + entity = entity + )).bind::(editgroup_id) + .execute(conn)?; +*/ + +// UPDATE FROM version: single query for many rows +// Works with Postgres, not Cockroach +macro_rules! generic_db_accept_edits_batch { + ($entity_name_str:expr) => { + fn db_accept_edits( + conn: &DbConn, + editgroup_id: FatCatId, + ) -> Result { + + let count = diesel::sql_query(format!( + " + UPDATE {entity}_ident + SET + is_live = true, + rev_id = {entity}_edit.rev_id, + redirect_id = {entity}_edit.redirect_id + FROM {entity}_edit + WHERE + {entity}_ident.id = {entity}_edit.ident_id + AND {entity}_edit.editgroup_id = $1", + entity = $entity_name_str + )).bind::(editgroup_id.to_uuid()) + .execute(conn)?; + Ok(count as u64) + } + } +} + +// UPDATE ROW version: single query per row +// CockroachDB version (slow, single query per row) +macro_rules! generic_db_accept_edits_each { + ($ident_table:ident, $edit_table:ident) => { + fn db_accept_edits( + conn: &DbConn, + editgroup_id: FatCatId, + ) -> Result { + + // 1. select edit rows (in sql) + let edit_rows: Vec = $edit_table::table + .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) + .get_results(conn)?; + // 2. create ident rows (in rust) + let ident_rows: Vec = edit_rows + .iter() + .map(|edit| + Self::IdentRow { + id: edit.ident_id, + is_live: true, + rev_id: edit.rev_id, + redirect_id: edit.redirect_id, + + } + ) + .collect(); + /* + // 3. upsert ident rows (in sql) + let count: u64 = diesel::insert_into($ident_table::table) + .values(ident_rows) + .on_conflict() + .do_update() + .set(ident_rows) + .execute(conn)?; + */ + // 3. update every row individually + let count = ident_rows.len() as u64; + for row in ident_rows { + diesel::update(&row) + .set(&row) + .execute(conn)?; + } + Ok(count) + } + }; +} + +macro_rules! generic_db_insert_rev { + () => { + fn db_insert_rev(&self, conn: &DbConn) -> Result { + Self::db_insert_revs(conn, &vec![self]).map(|id_list| id_list[0]) + } + } +} + +impl EntityCrud for ContainerEntity { + type EditRow = ContainerEditRow; + type EditNewRow = ContainerEditNewRow; + type IdentRow = ContainerIdentRow; + type IdentNewRow = ContainerIdentNewRow; + type RevRow = ContainerRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(container_ident, container_rev); + generic_db_get_rev!(container_rev); + generic_db_create!(container_ident, container_edit); + generic_db_create_batch!(container_ident, container_edit); + generic_db_update!(container_ident, container_edit); + generic_db_delete!(container_ident, container_edit); + generic_db_get_history!(container_edit); + generic_db_accept_edits_batch!("container"); + //generic_db_accept_edits_each!(container_ident, container_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + Ok(ContainerEntity { + issnl: rev_row.issnl, + wikidata_qid: rev_row.wikidata_qid, + publisher: rev_row.publisher, + name: rev_row.name, + abbrev: rev_row.abbrev, + coden: rev_row.coden, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + editgroup_id: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + if let Some(ref extid) = entity.issnl { + check_issn(extid)?; + } + } + + let rev_ids: Vec = insert_into(container_rev::table) + .values( + models + .iter() + .map(|model| ContainerRevNewRow { + name: model.name.clone(), + publisher: model.publisher.clone(), + issnl: model.issnl.clone(), + wikidata_qid: model.wikidata_qid.clone(), + abbrev: model.abbrev.clone(), + coden: model.coden.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(container_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} + +impl EntityCrud for CreatorEntity { + type EditRow = CreatorEditRow; + type EditNewRow = CreatorEditNewRow; + type IdentRow = CreatorIdentRow; + type IdentNewRow = CreatorIdentNewRow; + type RevRow = CreatorRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(creator_ident, creator_rev); + generic_db_get_rev!(creator_rev); + generic_db_create!(creator_ident, creator_edit); + generic_db_create_batch!(creator_ident, creator_edit); + generic_db_update!(creator_ident, creator_edit); + generic_db_delete!(creator_ident, creator_edit); + generic_db_get_history!(creator_edit); + generic_db_accept_edits_batch!("creator"); + //generic_db_accept_edits_each!(creator_ident, creator_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + Ok(CreatorEntity { + display_name: rev_row.display_name, + given_name: rev_row.given_name, + surname: rev_row.surname, + orcid: rev_row.orcid, + wikidata_qid: rev_row.wikidata_qid, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.orcid { + check_orcid(extid)?; + } + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + } + + let rev_ids: Vec = insert_into(creator_rev::table) + .values( + models + .iter() + .map(|model| CreatorRevNewRow { + display_name: model.display_name.clone(), + given_name: model.given_name.clone(), + surname: model.surname.clone(), + orcid: model.orcid.clone(), + wikidata_qid: model.wikidata_qid.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(creator_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} + +impl EntityCrud for FileEntity { + type EditRow = FileEditRow; + type EditNewRow = FileEditNewRow; + type IdentRow = FileIdentRow; + type IdentNewRow = FileIdentNewRow; + type RevRow = FileRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(file_ident, file_rev); + generic_db_get_rev!(file_rev); + generic_db_create!(file_ident, file_edit); + generic_db_create_batch!(file_ident, file_edit); + generic_db_update!(file_ident, file_edit); + generic_db_delete!(file_ident, file_edit); + generic_db_get_history!(file_edit); + generic_db_accept_edits_batch!("file"); + //generic_db_accept_edits_each!(file_ident, file_edit); + generic_db_insert_rev!(); + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let releases: Vec = file_release::table + .filter(file_release::file_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FileReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) + .collect(); + + let urls: Vec = file_rev_url::table + .filter(file_rev_url::file_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FileRevUrlRow| FileEntityUrls { + rel: r.rel, + url: r.url, + }) + .collect(); + + Ok(FileEntity { + sha1: rev_row.sha1, + sha256: rev_row.sha256, + md5: rev_row.md5, + size: rev_row.size.map(|v| v as i64), + urls: Some(urls), + mimetype: rev_row.mimetype, + releases: Some(releases.iter().map(|fcid| fcid.to_string()).collect()), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + let rev_ids: Vec = insert_into(file_rev::table) + .values( + models + .iter() + .map(|model| FileRevNewRow { + size: model.size, + sha1: model.sha1.clone(), + sha256: model.sha256.clone(), + md5: model.md5.clone(), + mimetype: model.mimetype.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(file_rev::id) + .get_results(conn)?; + + let mut file_release_rows: Vec = vec![]; + let mut file_url_rows: Vec = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.releases { + None => (), + Some(release_list) => { + let these_release_rows: Result> = release_list + .iter() + .map(|r| { + Ok(FileReleaseRow { + file_rev: rev_id.clone(), + target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), + }) + }) + .collect(); + file_release_rows.extend(these_release_rows?); + } + }; + + match &model.urls { + None => (), + Some(url_list) => { + let these_url_rows: Vec = url_list + .into_iter() + .map(|u| FileRevUrlNewRow { + file_rev: rev_id.clone(), + rel: u.rel.clone(), + url: u.url.clone(), + }) + .collect(); + file_url_rows.extend(these_url_rows); + } + }; + } + + if !file_release_rows.is_empty() { + // TODO: shouldn't it be "file_rev_release"? + insert_into(file_release::table) + .values(file_release_rows) + .execute(conn)?; + } + + if !file_url_rows.is_empty() { + insert_into(file_rev_url::table) + .values(file_url_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for ReleaseEntity { + type EditRow = ReleaseEditRow; + type EditNewRow = ReleaseEditNewRow; + type IdentRow = ReleaseIdentRow; + type IdentNewRow = ReleaseIdentNewRow; + type RevRow = ReleaseRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(release_ident, release_rev); + generic_db_get_rev!(release_rev); + //generic_db_create!(release_ident, release_edit); + //generic_db_create_batch!(release_ident, release_edit); + generic_db_update!(release_ident, release_edit); + generic_db_delete!(release_ident, release_edit); + generic_db_get_history!(release_edit); + generic_db_accept_edits_batch!("release"); + //generic_db_accept_edits_each!(release_ident, release_edit); + generic_db_insert_rev!(); + + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { + let mut edits = Self::db_create_batch(conn, edit_context, &vec![self])?; + // probably a more elegant way to destroy the vec and take first element + Ok(edits.pop().unwrap()) + } + + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result> { + // This isn't the generic implementation because we need to create Work entities for each + // of the release entities passed (at least in the common case) + + // Generate the set of new work entities to insert (usually one for each release, but some + // releases might be pointed to a work already) + let mut new_work_models: Vec<&WorkEntity> = vec![]; + for entity in models { + if entity.work_id.is_none() { + new_work_models.push(&WorkEntity { + ident: None, + revision: None, + redirect: None, + state: None, + editgroup_id: None, + extra: None, + }); + }; + } + + // create the works, then pluck the list of idents from the result + let new_work_edits = + WorkEntity::db_create_batch(conn, edit_context, new_work_models.as_slice())?; + let mut new_work_ids: Vec = new_work_edits.iter().map(|edit| edit.ident_id).collect(); + + // Copy all the release models, and ensure that each has work_id set, using the new work + // idents. There should be one new work ident for each release missing one. + let models_with_work_ids: Vec = models + .iter() + .map(|model| { + let mut model = (*model).clone(); + if model.work_id.is_none() { + model.work_id = + Some(FatCatId::from_uuid(&new_work_ids.pop().unwrap()).to_string()) + } + model + }) + .collect(); + let model_refs: Vec<&Self> = models_with_work_ids.iter().map(|s| s).collect(); + let models = model_refs.as_slice(); + + // The rest here is copy/pasta from the generic (how to avoid copypasta?) + let rev_ids: Vec = Self::db_insert_revs(conn, models)?; + let ident_ids: Vec = insert_into(release_ident::table) + .values( + rev_ids + .iter() + .map(|rev_id| Self::IdentNewRow { + rev_id: Some(rev_id.clone()), + is_live: edit_context.autoaccept, + redirect_id: None, + }) + .collect::>(), + ) + .returning(release_ident::id) + .get_results(conn)?; + let edits: Vec = insert_into(release_edit::table) + .values( + rev_ids + .into_iter() + .zip(ident_ids.into_iter()) + .map(|(rev_id, ident_id)| Self::EditNewRow { + editgroup_id: edit_context.editgroup_id.to_uuid(), + rev_id: Some(rev_id), + ident_id: ident_id, + redirect_id: None, + prev_rev: None, + extra_json: edit_context.extra_json.clone(), + }) + .collect::>(), + ) + .get_results(conn)?; + Ok(edits) + } + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let refs: Vec = release_ref::table + .filter(release_ref::release_rev.eq(rev_row.id)) + .order(release_ref::index_val.asc()) + .get_results(conn)? + .into_iter() + .map(|r: ReleaseRefRow| ReleaseRef { + index: r.index_val, + key: r.key, + extra: r.extra_json, + container_title: r.container_title, + year: r.year, + title: r.title, + locator: r.locator, + target_release_id: r.target_release_ident_id + .map(|v| FatCatId::from_uuid(&v).to_string()), + }) + .collect(); + + let contribs: Vec = release_contrib::table + .filter(release_contrib::release_rev.eq(rev_row.id)) + .order(( + release_contrib::role.asc(), + release_contrib::index_val.asc(), + )) + .get_results(conn)? + .into_iter() + .map(|c: ReleaseContribRow| ReleaseContrib { + index: c.index_val, + raw_name: c.raw_name, + role: c.role, + extra: c.extra_json, + creator_id: c.creator_ident_id + .map(|v| FatCatId::from_uuid(&v).to_string()), + creator: None, + }) + .collect(); + + let abstracts: Vec = release_rev_abstract::table + .inner_join(abstracts::table) + .filter(release_rev_abstract::release_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map( + |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts { + sha1: Some(r.0.abstract_sha1), + mimetype: r.0.mimetype, + lang: r.0.lang, + content: Some(r.1.content), + }, + ) + .collect(); + + Ok(ReleaseEntity { + title: rev_row.title, + release_type: rev_row.release_type, + release_status: rev_row.release_status, + release_date: rev_row + .release_date + .map(|v| chrono::DateTime::from_utc(v.and_hms(0, 0, 0), chrono::Utc)), + doi: rev_row.doi, + pmid: rev_row.pmid, + pmcid: rev_row.pmcid, + isbn13: rev_row.isbn13, + core_id: rev_row.core_id, + wikidata_qid: rev_row.wikidata_qid, + volume: rev_row.volume, + issue: rev_row.issue, + pages: rev_row.pages, + files: None, + container: None, + container_id: rev_row + .container_ident_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + publisher: rev_row.publisher, + language: rev_row.language, + work_id: Some(FatCatId::from_uuid(&rev_row.work_ident_id).to_string()), + refs: Some(refs), + contribs: Some(contribs), + abstracts: Some(abstracts), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.doi { + check_doi(extid)?; + } + if let Some(ref extid) = entity.pmid { + check_pmid(extid)?; + } + if let Some(ref extid) = entity.pmcid { + check_pmcid(extid)?; + } + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + } + + let rev_ids: Vec = insert_into(release_rev::table) + .values(models + .iter() + .map(|model| { + Ok(ReleaseRevNewRow { + title: model.title.clone(), + release_type: model.release_type.clone(), + release_status: model.release_status.clone(), + release_date: model.release_date.map(|v| v.naive_utc().date()), + doi: model.doi.clone(), + pmid: model.pmid.clone(), + pmcid: model.pmcid.clone(), + wikidata_qid: model.wikidata_qid.clone(), + isbn13: model.isbn13.clone(), + core_id: model.core_id.clone(), + volume: model.volume.clone(), + issue: model.issue.clone(), + pages: model.pages.clone(), + work_ident_id: match model.work_id.clone() { + None => bail!("release_revs must have a work_id by the time they are inserted; this is an internal soundness error"), + Some(s) => FatCatId::from_str(&s)?.to_uuid(), + }, + container_ident_id: match model.container_id.clone() { + None => None, + Some(s) => Some(FatCatId::from_str(&s)?.to_uuid()), + }, + publisher: model.publisher.clone(), + language: model.language.clone(), + extra_json: model.extra.clone() + }) + }) + .collect::>>()?) + .returning(release_rev::id) + .get_results(conn)?; + + let mut release_ref_rows: Vec = vec![]; + let mut release_contrib_rows: Vec = vec![]; + let mut abstract_rows: Vec = vec![]; + let mut release_abstract_rows: Vec = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.refs { + None => (), + Some(ref_list) => { + let these_ref_rows: Vec = ref_list + .iter() + .map(|r| { + Ok(ReleaseRefNewRow { + release_rev: rev_id.clone(), + target_release_ident_id: match r.target_release_id.clone() { + None => None, + Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), + }, + index_val: r.index, + key: r.key.clone(), + container_title: r.container_title.clone(), + year: r.year, + title: r.title.clone(), + locator: r.locator.clone(), + extra_json: r.extra.clone(), + }) + }) + .collect::>>()?; + release_ref_rows.extend(these_ref_rows); + } + }; + + match &model.contribs { + None => (), + Some(contrib_list) => { + let these_contrib_rows: Vec = contrib_list + .iter() + .map(|c| { + Ok(ReleaseContribNewRow { + release_rev: rev_id.clone(), + creator_ident_id: match c.creator_id.clone() { + None => None, + Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), + }, + raw_name: c.raw_name.clone(), + index_val: c.index, + role: c.role.clone(), + extra_json: c.extra.clone(), + }) + }) + .collect::>>()?; + release_contrib_rows.extend(these_contrib_rows); + } + }; + + if let Some(abstract_list) = &model.abstracts { + // For rows that specify content, we need to insert the abstract if it doesn't exist + // already + let new_abstracts: Vec = abstract_list + .iter() + .filter(|ea| ea.content.is_some()) + .map(|c| AbstractsRow { + sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(), + content: c.content.clone().unwrap(), + }) + .collect(); + abstract_rows.extend(new_abstracts); + let new_release_abstract_rows: Vec = abstract_list + .into_iter() + .map(|c| { + Ok(ReleaseRevAbstractNewRow { + release_rev: rev_id.clone(), + abstract_sha1: match c.content { + Some(ref content) => Sha1::from(content).hexdigest(), + None => match c.sha1.clone() { + Some(v) => v, + None => bail!("either abstract_sha1 or content is required"), + }, + }, + lang: c.lang.clone(), + mimetype: c.mimetype.clone(), + }) + }) + .collect::>>()?; + release_abstract_rows.extend(new_release_abstract_rows); + } + } + + if !release_ref_rows.is_empty() { + insert_into(release_ref::table) + .values(release_ref_rows) + .execute(conn)?; + } + + if !release_contrib_rows.is_empty() { + insert_into(release_contrib::table) + .values(release_contrib_rows) + .execute(conn)?; + } + + if !abstract_rows.is_empty() { + // Sort of an "upsert"; only inserts new abstract rows if they don't already exist + insert_into(abstracts::table) + .values(&abstract_rows) + .on_conflict(abstracts::sha1) + .do_nothing() + .execute(conn)?; + insert_into(release_rev_abstract::table) + .values(release_abstract_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for WorkEntity { + type EditRow = WorkEditRow; + type EditNewRow = WorkEditNewRow; + type IdentRow = WorkIdentRow; + type IdentNewRow = WorkIdentNewRow; + type RevRow = WorkRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(work_ident, work_rev); + generic_db_get_rev!(work_rev); + generic_db_create!(work_ident, work_edit); + generic_db_create_batch!(work_ident, work_edit); + generic_db_update!(work_ident, work_edit); + generic_db_delete!(work_ident, work_edit); + generic_db_get_history!(work_edit); + generic_db_accept_edits_batch!("work"); + //generic_db_accept_edits_each!(work_ident, work_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + Ok(WorkEntity { + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + let rev_ids: Vec = insert_into(work_rev::table) + .values( + models + .iter() + .map(|model| WorkRevNewRow { + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(work_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs index 8ab9dcb3..456646b4 100644 --- a/rust/src/api_helpers.rs +++ b/rust/src/api_helpers.rs @@ -8,7 +8,7 @@ use errors::*; use regex::Regex; use std::str::FromStr; use uuid::Uuid; -use database_entity_crud::EntityCrud; +use api_entity_crud::EntityCrud; pub type DbConn = diesel::r2d2::PooledConnection>; diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs index 5fce8e64..70ebb02a 100644 --- a/rust/src/api_server.rs +++ b/rust/src/api_server.rs @@ -2,7 +2,7 @@ use api_helpers::*; use chrono; -use database_entity_crud::{EditContext, EntityCrud}; +use api_entity_crud::{EditContext, EntityCrud}; use database_models::*; use database_schema::*; use diesel::prelude::*; diff --git a/rust/src/database_entity_crud.rs b/rust/src/database_entity_crud.rs deleted file mode 100644 index 01323637..00000000 --- a/rust/src/database_entity_crud.rs +++ /dev/null @@ -1,1145 +0,0 @@ -use api_helpers::*; -use chrono; -use database_models::*; -use database_schema::*; -use diesel::prelude::*; -use diesel::{self, insert_into}; -use errors::*; -use fatcat_api::models::*; -use serde_json; -use sha1::Sha1; -use std::marker::Sized; -use std::str::FromStr; -use uuid::Uuid; - -pub struct EditContext { - pub editor_id: FatCatId, - pub editgroup_id: FatCatId, - pub extra_json: Option, - pub autoaccept: bool, -} - -/* One goal here is to abstract the non-entity-specific bits into generic traits or functions, - * instead of macros. - * - * Notably: - * - * db_get - * db_get_rev - * db_create - * db_create_batch - * db_update - * db_delete - * db_get_history - * - * For now, these will probably be macros, until we can level up our trait/generics foo. - */ - -// Associated Type, not parametric -pub trait EntityCrud -where - Self: Sized, -{ - // TODO: could EditRow and IdentRow be generic structs? Or do they need to be bound to a - // specific table? - type EditRow; // EntityEditRow - type EditNewRow; - type IdentRow; // EntityIdentRow - type IdentNewRow; - type RevRow; - - fn parse_editgroup_id(&self) -> Result>; - - // Generic Methods - fn db_get( - conn: &DbConn, - ident: FatCatId - ) -> Result; - fn db_get_rev( - conn: &DbConn, - rev_id: Uuid - ) -> Result; - fn db_create( - &self, - conn: &DbConn, - edit_context: &EditContext - ) -> Result; - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result>; - fn db_update( - &self, - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result; - fn db_delete( - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result; - fn db_get_history( - conn: &DbConn, - ident: FatCatId, - limit: Option, - ) -> Result>; - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId - ) -> Result; - - // Entity-specific Methods - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result; - fn db_insert_rev( - &self, - conn: &DbConn - ) -> Result; - fn db_insert_revs( - conn: &DbConn, - models: &[&Self] - ) -> Result>; -} - -// TODO: this could be a separate trait on all entities -macro_rules! generic_parse_editgroup_id{ - () => { - fn parse_editgroup_id(&self) -> Result> { - match &self.editgroup_id { - Some(s) => Ok(Some(FatCatId::from_str(&s)?)), - None => Ok(None), - } - } - } -} - -macro_rules! generic_db_get { - ($ident_table:ident, $rev_table:ident) => { - fn db_get(conn: &DbConn, ident: FatCatId) -> Result { - let (ident, rev): (Self::IdentRow, Self::RevRow) = $ident_table::table - .find(ident.to_uuid()) - .inner_join($rev_table::table) - .first(conn)?; - - Self::db_from_row(conn, rev, Some(ident)) - } - }; -} - -macro_rules! generic_db_get_rev { - ($rev_table:ident) => { - fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result { - let rev = $rev_table::table.find(rev_id).first(conn)?; - - Self::db_from_row(conn, rev, None) - } - }; -} - -macro_rules! generic_db_create { - // TODO: this path should call generic_db_create_batch - ($ident_table: ident, $edit_table: ident) => { - fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { - let rev_id = self.db_insert_rev(conn)?; - let ident: Uuid = insert_into($ident_table::table) - .values($ident_table::rev_id.eq(&rev_id)) - .returning($ident_table::id) - .get_result(conn)?; - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::rev_id.eq(&rev_id), - $edit_table::ident_id.eq(&ident), - )) - .get_result(conn)?; - Ok(edit) - } - } -} - -macro_rules! generic_db_create_batch { - ($ident_table:ident, $edit_table:ident) => { - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result> { - let rev_ids: Vec = Self::db_insert_revs(conn, models)?; - let ident_ids: Vec = insert_into($ident_table::table) - .values( - rev_ids - .iter() - .map(|rev_id| Self::IdentNewRow { - rev_id: Some(rev_id.clone()), - is_live: edit_context.autoaccept, - redirect_id: None, - }) - .collect::>(), - ) - .returning($ident_table::id) - .get_results(conn)?; - let edits: Vec = insert_into($edit_table::table) - .values( - rev_ids - .into_iter() - .zip(ident_ids.into_iter()) - .map(|(rev_id, ident_id)| Self::EditNewRow { - editgroup_id: edit_context.editgroup_id.to_uuid(), - rev_id: Some(rev_id), - ident_id: ident_id, - redirect_id: None, - prev_rev: None, - extra_json: edit_context.extra_json.clone(), - }) - .collect::>(), - ) - .get_results(conn)?; - Ok(edits) - } - }; -} - -macro_rules! generic_db_update { - ($ident_table: ident, $edit_table: ident) => { - fn db_update(&self, conn: &DbConn, edit_context: &EditContext, ident: FatCatId) -> Result { - let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; - if current.is_live != true { - // TODO: what if isn't live? 4xx not 5xx - bail!("can't delete an entity that doesn't exist yet"); - } - if current.rev_id.is_none() { - // TODO: what if it's already deleted? 4xx not 5xx - bail!("entity was already deleted"); - } - - let rev_id = self.db_insert_rev(conn)?; - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::ident_id.eq(&ident.to_uuid()), - $edit_table::rev_id.eq(&rev_id), - $edit_table::prev_rev.eq(current.rev_id.unwrap()), - $edit_table::extra_json.eq(&self.extra), - )) - .get_result(conn)?; - - Ok(edit) - } - } -} - -macro_rules! generic_db_delete { - ($ident_table:ident, $edit_table:ident) => { - fn db_delete( - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result { - let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; - if current.is_live != true { - // TODO: what if isn't live? 4xx not 5xx - bail!("can't delete an entity that doesn't exist yet"); - } - if current.rev_id.is_none() { - // TODO: what if it's already deleted? 4xx not 5xx - bail!("entity was already deleted"); - } - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::ident_id.eq(ident.to_uuid()), - $edit_table::rev_id.eq(None::), - $edit_table::redirect_id.eq(None::), - $edit_table::prev_rev.eq(current.rev_id), - $edit_table::extra_json.eq(&edit_context.extra_json), - )) - .get_result(conn)?; - - Ok(edit) - } - }; -} - -macro_rules! generic_db_get_history { - ($edit_table:ident) => { - fn db_get_history( - conn: &DbConn, - ident: FatCatId, - limit: Option, - ) -> Result> { - let limit = limit.unwrap_or(50); // TODO: make a static - - let rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table - .inner_join(changelog::table) - .inner_join($edit_table::table) - .filter($edit_table::ident_id.eq(ident.to_uuid())) - .order(changelog::id.desc()) - .limit(limit) - .get_results(conn)?; - - let history: Result> = rows.into_iter() - .map(|(eg_row, cl_row, e_row)| { - Ok(EntityHistoryEntry { - edit: e_row.into_model()?, - editgroup: eg_row.into_model_partial(), - changelog_entry: cl_row.into_model(), - }) - }) - .collect(); - history - } - }; -} - -/* -// This would be the clean and efficient way, but see: -// https://github.com/diesel-rs/diesel/issues/1478 -// - diesel::update(container_ident::table) - .inner_join(container_edit::table.on( - container_ident::id.eq(container_edit::ident_id) - )) - .filter(container_edit::editgroup_id.eq(editgroup_id)) - .values(( - container_ident::is_live.eq(true), - container_ident::rev_id.eq(container_edit::rev_id), - container_ident::redirect_id.eq(container_edit::redirect_id), - )) - .execute()?; - -// Was previously: - - for entity in &["container", "creator", "file", "work", "release"] { - diesel::sql_query(format!( - " - UPDATE {entity}_ident - SET - is_live = true, - rev_id = {entity}_edit.rev_id, - redirect_id = {entity}_edit.redirect_id - FROM {entity}_edit - WHERE - {entity}_ident.id = {entity}_edit.ident_id - AND {entity}_edit.editgroup_id = $1", - entity = entity - )).bind::(editgroup_id) - .execute(conn)?; -*/ - -// UPDATE FROM version: single query for many rows -// Works with Postgres, not Cockroach -macro_rules! generic_db_accept_edits_batch { - ($entity_name_str:expr) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result { - - let count = diesel::sql_query(format!( - " - UPDATE {entity}_ident - SET - is_live = true, - rev_id = {entity}_edit.rev_id, - redirect_id = {entity}_edit.redirect_id - FROM {entity}_edit - WHERE - {entity}_ident.id = {entity}_edit.ident_id - AND {entity}_edit.editgroup_id = $1", - entity = $entity_name_str - )).bind::(editgroup_id.to_uuid()) - .execute(conn)?; - Ok(count as u64) - } - } -} - -// UPDATE ROW version: single query per row -// CockroachDB version (slow, single query per row) -macro_rules! generic_db_accept_edits_each { - ($ident_table:ident, $edit_table:ident) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result { - - // 1. select edit rows (in sql) - let edit_rows: Vec = $edit_table::table - .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) - .get_results(conn)?; - // 2. create ident rows (in rust) - let ident_rows: Vec = edit_rows - .iter() - .map(|edit| - Self::IdentRow { - id: edit.ident_id, - is_live: true, - rev_id: edit.rev_id, - redirect_id: edit.redirect_id, - - } - ) - .collect(); - /* - // 3. upsert ident rows (in sql) - let count: u64 = diesel::insert_into($ident_table::table) - .values(ident_rows) - .on_conflict() - .do_update() - .set(ident_rows) - .execute(conn)?; - */ - // 3. update every row individually - let count = ident_rows.len() as u64; - for row in ident_rows { - diesel::update(&row) - .set(&row) - .execute(conn)?; - } - Ok(count) - } - }; -} - -macro_rules! generic_db_insert_rev { - () => { - fn db_insert_rev(&self, conn: &DbConn) -> Result { - Self::db_insert_revs(conn, &vec![self]).map(|id_list| id_list[0]) - } - } -} - -impl EntityCrud for ContainerEntity { - type EditRow = ContainerEditRow; - type EditNewRow = ContainerEditNewRow; - type IdentRow = ContainerIdentRow; - type IdentNewRow = ContainerIdentNewRow; - type RevRow = ContainerRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(container_ident, container_rev); - generic_db_get_rev!(container_rev); - generic_db_create!(container_ident, container_edit); - generic_db_create_batch!(container_ident, container_edit); - generic_db_update!(container_ident, container_edit); - generic_db_delete!(container_ident, container_edit); - generic_db_get_history!(container_edit); - generic_db_accept_edits_batch!("container"); - //generic_db_accept_edits_each!(container_ident, container_edit); - generic_db_insert_rev!(); - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - Ok(ContainerEntity { - issnl: rev_row.issnl, - wikidata_qid: rev_row.wikidata_qid, - publisher: rev_row.publisher, - name: rev_row.name, - abbrev: rev_row.abbrev, - coden: rev_row.coden, - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - editgroup_id: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - if let Some(ref extid) = entity.issnl { - check_issn(extid)?; - } - } - - let rev_ids: Vec = insert_into(container_rev::table) - .values( - models - .iter() - .map(|model| ContainerRevNewRow { - name: model.name.clone(), - publisher: model.publisher.clone(), - issnl: model.issnl.clone(), - wikidata_qid: model.wikidata_qid.clone(), - abbrev: model.abbrev.clone(), - coden: model.coden.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(container_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} - -impl EntityCrud for CreatorEntity { - type EditRow = CreatorEditRow; - type EditNewRow = CreatorEditNewRow; - type IdentRow = CreatorIdentRow; - type IdentNewRow = CreatorIdentNewRow; - type RevRow = CreatorRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(creator_ident, creator_rev); - generic_db_get_rev!(creator_rev); - generic_db_create!(creator_ident, creator_edit); - generic_db_create_batch!(creator_ident, creator_edit); - generic_db_update!(creator_ident, creator_edit); - generic_db_delete!(creator_ident, creator_edit); - generic_db_get_history!(creator_edit); - generic_db_accept_edits_batch!("creator"); - //generic_db_accept_edits_each!(creator_ident, creator_edit); - generic_db_insert_rev!(); - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - Ok(CreatorEntity { - display_name: rev_row.display_name, - given_name: rev_row.given_name, - surname: rev_row.surname, - orcid: rev_row.orcid, - wikidata_qid: rev_row.wikidata_qid, - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev_row.extra_json, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.orcid { - check_orcid(extid)?; - } - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - } - - let rev_ids: Vec = insert_into(creator_rev::table) - .values( - models - .iter() - .map(|model| CreatorRevNewRow { - display_name: model.display_name.clone(), - given_name: model.given_name.clone(), - surname: model.surname.clone(), - orcid: model.orcid.clone(), - wikidata_qid: model.wikidata_qid.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(creator_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} - -impl EntityCrud for FileEntity { - type EditRow = FileEditRow; - type EditNewRow = FileEditNewRow; - type IdentRow = FileIdentRow; - type IdentNewRow = FileIdentNewRow; - type RevRow = FileRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(file_ident, file_rev); - generic_db_get_rev!(file_rev); - generic_db_create!(file_ident, file_edit); - generic_db_create_batch!(file_ident, file_edit); - generic_db_update!(file_ident, file_edit); - generic_db_delete!(file_ident, file_edit); - generic_db_get_history!(file_edit); - generic_db_accept_edits_batch!("file"); - //generic_db_accept_edits_each!(file_ident, file_edit); - generic_db_insert_rev!(); - - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - let releases: Vec = file_release::table - .filter(file_release::file_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FileReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) - .collect(); - - let urls: Vec = file_rev_url::table - .filter(file_rev_url::file_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FileRevUrlRow| FileEntityUrls { - rel: r.rel, - url: r.url, - }) - .collect(); - - Ok(FileEntity { - sha1: rev_row.sha1, - sha256: rev_row.sha256, - md5: rev_row.md5, - size: rev_row.size.map(|v| v as i64), - urls: Some(urls), - mimetype: rev_row.mimetype, - releases: Some(releases.iter().map(|fcid| fcid.to_string()).collect()), - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev_row.extra_json, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - let rev_ids: Vec = insert_into(file_rev::table) - .values( - models - .iter() - .map(|model| FileRevNewRow { - size: model.size, - sha1: model.sha1.clone(), - sha256: model.sha256.clone(), - md5: model.md5.clone(), - mimetype: model.mimetype.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(file_rev::id) - .get_results(conn)?; - - let mut file_release_rows: Vec = vec![]; - let mut file_url_rows: Vec = vec![]; - - for (model, rev_id) in models.iter().zip(rev_ids.iter()) { - match &model.releases { - None => (), - Some(release_list) => { - let these_release_rows: Result> = release_list - .iter() - .map(|r| { - Ok(FileReleaseRow { - file_rev: rev_id.clone(), - target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), - }) - }) - .collect(); - file_release_rows.extend(these_release_rows?); - } - }; - - match &model.urls { - None => (), - Some(url_list) => { - let these_url_rows: Vec = url_list - .into_iter() - .map(|u| FileRevUrlNewRow { - file_rev: rev_id.clone(), - rel: u.rel.clone(), - url: u.url.clone(), - }) - .collect(); - file_url_rows.extend(these_url_rows); - } - }; - } - - if !file_release_rows.is_empty() { - // TODO: shouldn't it be "file_rev_release"? - insert_into(file_release::table) - .values(file_release_rows) - .execute(conn)?; - } - - if !file_url_rows.is_empty() { - insert_into(file_rev_url::table) - .values(file_url_rows) - .execute(conn)?; - } - - Ok(rev_ids) - } -} - -impl EntityCrud for ReleaseEntity { - type EditRow = ReleaseEditRow; - type EditNewRow = ReleaseEditNewRow; - type IdentRow = ReleaseIdentRow; - type IdentNewRow = ReleaseIdentNewRow; - type RevRow = ReleaseRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(release_ident, release_rev); - generic_db_get_rev!(release_rev); - //generic_db_create!(release_ident, release_edit); - //generic_db_create_batch!(release_ident, release_edit); - generic_db_update!(release_ident, release_edit); - generic_db_delete!(release_ident, release_edit); - generic_db_get_history!(release_edit); - generic_db_accept_edits_batch!("release"); - //generic_db_accept_edits_each!(release_ident, release_edit); - generic_db_insert_rev!(); - - fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { - let mut edits = Self::db_create_batch(conn, edit_context, &vec![self])?; - // probably a more elegant way to destroy the vec and take first element - Ok(edits.pop().unwrap()) - } - - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result> { - // This isn't the generic implementation because we need to create Work entities for each - // of the release entities passed (at least in the common case) - - // Generate the set of new work entities to insert (usually one for each release, but some - // releases might be pointed to a work already) - let mut new_work_models: Vec<&WorkEntity> = vec![]; - for entity in models { - if entity.work_id.is_none() { - new_work_models.push(&WorkEntity { - ident: None, - revision: None, - redirect: None, - state: None, - editgroup_id: None, - extra: None, - }); - }; - } - - // create the works, then pluck the list of idents from the result - let new_work_edits = - WorkEntity::db_create_batch(conn, edit_context, new_work_models.as_slice())?; - let mut new_work_ids: Vec = new_work_edits.iter().map(|edit| edit.ident_id).collect(); - - // Copy all the release models, and ensure that each has work_id set, using the new work - // idents. There should be one new work ident for each release missing one. - let models_with_work_ids: Vec = models - .iter() - .map(|model| { - let mut model = (*model).clone(); - if model.work_id.is_none() { - model.work_id = - Some(FatCatId::from_uuid(&new_work_ids.pop().unwrap()).to_string()) - } - model - }) - .collect(); - let model_refs: Vec<&Self> = models_with_work_ids.iter().map(|s| s).collect(); - let models = model_refs.as_slice(); - - // The rest here is copy/pasta from the generic (how to avoid copypasta?) - let rev_ids: Vec = Self::db_insert_revs(conn, models)?; - let ident_ids: Vec = insert_into(release_ident::table) - .values( - rev_ids - .iter() - .map(|rev_id| Self::IdentNewRow { - rev_id: Some(rev_id.clone()), - is_live: edit_context.autoaccept, - redirect_id: None, - }) - .collect::>(), - ) - .returning(release_ident::id) - .get_results(conn)?; - let edits: Vec = insert_into(release_edit::table) - .values( - rev_ids - .into_iter() - .zip(ident_ids.into_iter()) - .map(|(rev_id, ident_id)| Self::EditNewRow { - editgroup_id: edit_context.editgroup_id.to_uuid(), - rev_id: Some(rev_id), - ident_id: ident_id, - redirect_id: None, - prev_rev: None, - extra_json: edit_context.extra_json.clone(), - }) - .collect::>(), - ) - .get_results(conn)?; - Ok(edits) - } - - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - let refs: Vec = release_ref::table - .filter(release_ref::release_rev.eq(rev_row.id)) - .order(release_ref::index_val.asc()) - .get_results(conn)? - .into_iter() - .map(|r: ReleaseRefRow| ReleaseRef { - index: r.index_val, - key: r.key, - extra: r.extra_json, - container_title: r.container_title, - year: r.year, - title: r.title, - locator: r.locator, - target_release_id: r.target_release_ident_id - .map(|v| FatCatId::from_uuid(&v).to_string()), - }) - .collect(); - - let contribs: Vec = release_contrib::table - .filter(release_contrib::release_rev.eq(rev_row.id)) - .order(( - release_contrib::role.asc(), - release_contrib::index_val.asc(), - )) - .get_results(conn)? - .into_iter() - .map(|c: ReleaseContribRow| ReleaseContrib { - index: c.index_val, - raw_name: c.raw_name, - role: c.role, - extra: c.extra_json, - creator_id: c.creator_ident_id - .map(|v| FatCatId::from_uuid(&v).to_string()), - creator: None, - }) - .collect(); - - let abstracts: Vec = release_rev_abstract::table - .inner_join(abstracts::table) - .filter(release_rev_abstract::release_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map( - |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts { - sha1: Some(r.0.abstract_sha1), - mimetype: r.0.mimetype, - lang: r.0.lang, - content: Some(r.1.content), - }, - ) - .collect(); - - Ok(ReleaseEntity { - title: rev_row.title, - release_type: rev_row.release_type, - release_status: rev_row.release_status, - release_date: rev_row - .release_date - .map(|v| chrono::DateTime::from_utc(v.and_hms(0, 0, 0), chrono::Utc)), - doi: rev_row.doi, - pmid: rev_row.pmid, - pmcid: rev_row.pmcid, - isbn13: rev_row.isbn13, - core_id: rev_row.core_id, - wikidata_qid: rev_row.wikidata_qid, - volume: rev_row.volume, - issue: rev_row.issue, - pages: rev_row.pages, - files: None, - container: None, - container_id: rev_row - .container_ident_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - publisher: rev_row.publisher, - language: rev_row.language, - work_id: Some(FatCatId::from_uuid(&rev_row.work_ident_id).to_string()), - refs: Some(refs), - contribs: Some(contribs), - abstracts: Some(abstracts), - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev_row.extra_json, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.doi { - check_doi(extid)?; - } - if let Some(ref extid) = entity.pmid { - check_pmid(extid)?; - } - if let Some(ref extid) = entity.pmcid { - check_pmcid(extid)?; - } - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - } - - let rev_ids: Vec = insert_into(release_rev::table) - .values(models - .iter() - .map(|model| { - Ok(ReleaseRevNewRow { - title: model.title.clone(), - release_type: model.release_type.clone(), - release_status: model.release_status.clone(), - release_date: model.release_date.map(|v| v.naive_utc().date()), - doi: model.doi.clone(), - pmid: model.pmid.clone(), - pmcid: model.pmcid.clone(), - wikidata_qid: model.wikidata_qid.clone(), - isbn13: model.isbn13.clone(), - core_id: model.core_id.clone(), - volume: model.volume.clone(), - issue: model.issue.clone(), - pages: model.pages.clone(), - work_ident_id: match model.work_id.clone() { - None => bail!("release_revs must have a work_id by the time they are inserted; this is an internal soundness error"), - Some(s) => FatCatId::from_str(&s)?.to_uuid(), - }, - container_ident_id: match model.container_id.clone() { - None => None, - Some(s) => Some(FatCatId::from_str(&s)?.to_uuid()), - }, - publisher: model.publisher.clone(), - language: model.language.clone(), - extra_json: model.extra.clone() - }) - }) - .collect::>>()?) - .returning(release_rev::id) - .get_results(conn)?; - - let mut release_ref_rows: Vec = vec![]; - let mut release_contrib_rows: Vec = vec![]; - let mut abstract_rows: Vec = vec![]; - let mut release_abstract_rows: Vec = vec![]; - - for (model, rev_id) in models.iter().zip(rev_ids.iter()) { - match &model.refs { - None => (), - Some(ref_list) => { - let these_ref_rows: Vec = ref_list - .iter() - .map(|r| { - Ok(ReleaseRefNewRow { - release_rev: rev_id.clone(), - target_release_ident_id: match r.target_release_id.clone() { - None => None, - Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), - }, - index_val: r.index, - key: r.key.clone(), - container_title: r.container_title.clone(), - year: r.year, - title: r.title.clone(), - locator: r.locator.clone(), - extra_json: r.extra.clone(), - }) - }) - .collect::>>()?; - release_ref_rows.extend(these_ref_rows); - } - }; - - match &model.contribs { - None => (), - Some(contrib_list) => { - let these_contrib_rows: Vec = contrib_list - .iter() - .map(|c| { - Ok(ReleaseContribNewRow { - release_rev: rev_id.clone(), - creator_ident_id: match c.creator_id.clone() { - None => None, - Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), - }, - raw_name: c.raw_name.clone(), - index_val: c.index, - role: c.role.clone(), - extra_json: c.extra.clone(), - }) - }) - .collect::>>()?; - release_contrib_rows.extend(these_contrib_rows); - } - }; - - if let Some(abstract_list) = &model.abstracts { - // For rows that specify content, we need to insert the abstract if it doesn't exist - // already - let new_abstracts: Vec = abstract_list - .iter() - .filter(|ea| ea.content.is_some()) - .map(|c| AbstractsRow { - sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(), - content: c.content.clone().unwrap(), - }) - .collect(); - abstract_rows.extend(new_abstracts); - let new_release_abstract_rows: Vec = abstract_list - .into_iter() - .map(|c| { - Ok(ReleaseRevAbstractNewRow { - release_rev: rev_id.clone(), - abstract_sha1: match c.content { - Some(ref content) => Sha1::from(content).hexdigest(), - None => match c.sha1.clone() { - Some(v) => v, - None => bail!("either abstract_sha1 or content is required"), - }, - }, - lang: c.lang.clone(), - mimetype: c.mimetype.clone(), - }) - }) - .collect::>>()?; - release_abstract_rows.extend(new_release_abstract_rows); - } - } - - if !release_ref_rows.is_empty() { - insert_into(release_ref::table) - .values(release_ref_rows) - .execute(conn)?; - } - - if !release_contrib_rows.is_empty() { - insert_into(release_contrib::table) - .values(release_contrib_rows) - .execute(conn)?; - } - - if !abstract_rows.is_empty() { - // Sort of an "upsert"; only inserts new abstract rows if they don't already exist - insert_into(abstracts::table) - .values(&abstract_rows) - .on_conflict(abstracts::sha1) - .do_nothing() - .execute(conn)?; - insert_into(release_rev_abstract::table) - .values(release_abstract_rows) - .execute(conn)?; - } - - Ok(rev_ids) - } -} - -impl EntityCrud for WorkEntity { - type EditRow = WorkEditRow; - type EditNewRow = WorkEditNewRow; - type IdentRow = WorkIdentRow; - type IdentNewRow = WorkIdentNewRow; - type RevRow = WorkRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(work_ident, work_rev); - generic_db_get_rev!(work_rev); - generic_db_create!(work_ident, work_edit); - generic_db_create_batch!(work_ident, work_edit); - generic_db_update!(work_ident, work_edit); - generic_db_delete!(work_ident, work_edit); - generic_db_get_history!(work_edit); - generic_db_accept_edits_batch!("work"); - //generic_db_accept_edits_each!(work_ident, work_edit); - generic_db_insert_rev!(); - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - Ok(WorkEntity { - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev_row.extra_json, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - let rev_ids: Vec = insert_into(work_rev::table) - .values( - models - .iter() - .map(|model| WorkRevNewRow { - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(work_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 38f9b343..58147139 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -25,7 +25,7 @@ extern crate sha1; pub mod api_helpers; pub mod api_server; pub mod api_wrappers; -pub mod database_entity_crud; +pub mod api_entity_crud; pub mod database_models; pub mod database_schema; -- cgit v1.2.3 From d57a807ee0481fd6534307f0870a0f96ea19cd25 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 18:11:01 -0700 Subject: squelch unused macro warning --- rust/src/api_entity_crud.rs | 2 ++ 1 file changed, 2 insertions(+) (limited to 'rust/src') diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs index 01323637..e59b47b8 100644 --- a/rust/src/api_entity_crud.rs +++ b/rust/src/api_entity_crud.rs @@ -333,6 +333,7 @@ macro_rules! generic_db_get_history { // UPDATE FROM version: single query for many rows // Works with Postgres, not Cockroach +#[allow(unused_macros)] macro_rules! generic_db_accept_edits_batch { ($entity_name_str:expr) => { fn db_accept_edits( @@ -361,6 +362,7 @@ macro_rules! generic_db_accept_edits_batch { // UPDATE ROW version: single query per row // CockroachDB version (slow, single query per row) +#[allow(unused_macros)] macro_rules! generic_db_accept_edits_each { ($ident_table:ident, $edit_table:ident) => { fn db_accept_edits( -- cgit v1.2.3 From bf769e17ccdb788cb6127c26fb7f0e60e1fd0a9e Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 18:37:16 -0700 Subject: continue CRUD refactor, removing CUD handlers Can't remove the "get" handlers because of the expand parameter, which is only implemented for releases and in the handler (not in the CRUD trait, yet). Also didn't do the batch handler stuff yet. --- rust/src/api_entity_crud.rs | 8 -- rust/src/api_helpers.rs | 29 ++++++ rust/src/api_server.rs | 221 +------------------------------------------- rust/src/api_wrappers.rs | 78 ++++++---------- 4 files changed, 60 insertions(+), 276 deletions(-) (limited to 'rust/src') diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs index e59b47b8..a1f4742b 100644 --- a/rust/src/api_entity_crud.rs +++ b/rust/src/api_entity_crud.rs @@ -6,19 +6,11 @@ use diesel::prelude::*; use diesel::{self, insert_into}; use errors::*; use fatcat_api::models::*; -use serde_json; use sha1::Sha1; use std::marker::Sized; use std::str::FromStr; use uuid::Uuid; -pub struct EditContext { - pub editor_id: FatCatId, - pub editgroup_id: FatCatId, - pub extra_json: Option, - pub autoaccept: bool, -} - /* One goal here is to abstract the non-entity-specific bits into generic traits or functions, * instead of macros. * diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs index 456646b4..2d203232 100644 --- a/rust/src/api_helpers.rs +++ b/rust/src/api_helpers.rs @@ -2,6 +2,7 @@ use data_encoding::BASE32_NOPAD; use database_models::*; use database_schema::*; use fatcat_api::models::*; +use serde_json; use diesel; use diesel::prelude::*; use errors::*; @@ -13,6 +14,34 @@ use api_entity_crud::EntityCrud; pub type DbConn = diesel::r2d2::PooledConnection>; +pub struct EditContext { + pub editor_id: FatCatId, + pub editgroup_id: FatCatId, + pub extra_json: Option, + pub autoaccept: bool, +} + +pub fn make_edit_context(conn: &DbConn, editgroup_id: Option, autoaccept: bool) -> Result { + let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth + let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { + (Some(eg), _) => eg, + // If autoaccept and no editgroup_id passed, always create a new one for this transaction + (None, true) => { + let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) + .values((editgroup::editor_id.eq(editor_id),)) + .get_result(conn)?; + FatCatId::from_uuid(&eg_row.id) + }, + (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), + }; + Ok(EditContext { + editor_id: FatCatId::from_uuid(&editor_id), + editgroup_id: editgroup_id, + extra_json: None, + autoaccept: autoaccept, + }) +} + /// This function should always be run within a transaction pub fn get_or_create_editgroup(editor_id: Uuid, conn: &DbConn) -> Result { // check for current active diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs index 70ebb02a..e832385a 100644 --- a/rust/src/api_server.rs +++ b/rust/src/api_server.rs @@ -2,7 +2,7 @@ use api_helpers::*; use chrono; -use api_entity_crud::{EditContext, EntityCrud}; +use api_entity_crud::EntityCrud; use database_models::*; use database_schema::*; use diesel::prelude::*; @@ -53,27 +53,6 @@ macro_rules! count_entity { }}; } -fn make_edit_context(conn: &DbConn, editgroup_id: Option, autoaccept: bool) -> Result { - let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth - let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { - (Some(eg), _) => eg, - // If autoaccept and no editgroup_id passed, always create a new one for this transaction - (None, true) => { - let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) - .values((editgroup::editor_id.eq(editor_id),)) - .get_result(conn)?; - FatCatId::from_uuid(&eg_row.id) - }, - (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), - }; - Ok(EditContext { - editor_id: FatCatId::from_uuid(&editor_id), - editgroup_id: editgroup_id, - extra_json: None, - autoaccept: autoaccept, - }) -} - #[derive(Clone)] pub struct Server { pub db_pool: ConnectionPool, @@ -248,163 +227,6 @@ impl Server { .collect() } - pub fn create_container_handler( - &self, - entity: models::ContainerEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_container_handler( - &self, - id: &Uuid, - entity: models::ContainerEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_container_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = ContainerEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_creator_handler( - &self, - entity: models::CreatorEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_creator_handler( - &self, - id: &Uuid, - entity: models::CreatorEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_creator_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = CreatorEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_file_handler( - &self, - entity: models::FileEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_file_handler( - &self, - id: &Uuid, - entity: models::FileEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_file_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = FileEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_release_handler( - &self, - entity: models::ReleaseEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_release_handler( - &self, - id: &Uuid, - entity: models::ReleaseEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_release_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = ReleaseEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_work_handler( - &self, - entity: models::WorkEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_work_handler( - &self, - id: &Uuid, - entity: models::WorkEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn delete_work_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = WorkEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - - edit.into_model() - } - pub fn accept_editgroup_handler(&self, id: &str, conn: &DbConn) -> Result<()> { accept_editgroup(fcid2uuid(id)?, conn)?; Ok(()) @@ -638,45 +460,4 @@ impl Server { entity_batch_handler!(create_file_batch_handler, FileEntity); entity_batch_handler!(create_release_batch_handler, ReleaseEntity); entity_batch_handler!(create_work_batch_handler, WorkEntity); - - pub fn get_container_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - ContainerEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_creator_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - CreatorEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_file_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - FileEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_release_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - ReleaseEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_work_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - WorkEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } } diff --git a/rust/src/api_wrappers.rs b/rust/src/api_wrappers.rs index faafe984..0600f3de 100644 --- a/rust/src/api_wrappers.rs +++ b/rust/src/api_wrappers.rs @@ -1,6 +1,6 @@ //! API endpoint handlers -use api_helpers::fcid2uuid; +use api_helpers::*; use api_server::Server; use diesel::Connection; use errors::*; @@ -8,6 +8,9 @@ use fatcat_api::models; use fatcat_api::models::*; use fatcat_api::*; use futures::{self, Future}; +use api_entity_crud::EntityCrud; +use database_models::EntityEditRow; +use std::str::FromStr; /// Helper for generating wrappers (which return "Box::new(futures::done(Ok(BLAH)))" like the /// codegen fatcat-api code wants) that call through to actual helpers (which have simple Result<> @@ -17,11 +20,11 @@ macro_rules! wrap_entity_handlers { // stable doesn't have a mechanism to "concat" or generate new identifiers in macros, at least // in the context of defining new functions. // The only stable approach I know of would be: https://github.com/dtolnay/mashup - ($get_fn:ident, $get_handler:ident, $get_resp:ident, $post_fn:ident, $post_handler:ident, + ($get_fn:ident, $get_handler:ident, $get_resp:ident, $post_fn:ident, $post_resp:ident, $post_batch_fn:ident, $post_batch_handler:ident, - $post_batch_resp:ident, $update_fn:ident, $update_handler:ident, $update_resp:ident, - $delete_fn:ident, $delete_handler:ident, $delete_resp:ident, $get_history_fn:ident, - $get_history_handler:ident, $get_history_resp:ident, $model:ident) => { + $post_batch_resp:ident, $update_fn:ident, $update_resp:ident, + $delete_fn:ident, $delete_resp:ident, $get_history_fn:ident, + $get_history_resp:ident, $model:ident) => { fn $get_fn( &self, @@ -61,7 +64,10 @@ macro_rules! wrap_entity_handlers { _context: &Context, ) -> Box + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$post_handler(entity, &conn)) { + let ret = match conn.transaction(|| { + let edit_context = make_edit_context(&conn, entity.parse_editgroup_id()?, false)?; + Ok(entity.db_create(&conn, &edit_context)?.into_model()?) + }) { Ok(edit) => $post_resp::CreatedEntity(edit), Err(Error(ErrorKind::Diesel(e), _)) => @@ -115,12 +121,12 @@ macro_rules! wrap_entity_handlers { entity: models::$model, _context: &Context, ) -> Box + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($update_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$update_handler(&id, entity, &conn)) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + let edit_context = make_edit_context(&conn, entity.parse_editgroup_id()?, false)?; + Ok(entity.db_update(&conn, &edit_context, entity_id)?.into_model()?) + }) { Ok(edit) => $update_resp::UpdatedEntity(edit), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -148,19 +154,16 @@ macro_rules! wrap_entity_handlers { editgroup_id: Option, _context: &Context, ) -> Box + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($delete_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; - let editgroup_id = match editgroup_id { - Some(raw) => if let Ok(parsed) = fcid2uuid(&raw) { Some(parsed) } else { - return Box::new(futures::done(Ok($delete_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(raw).to_string() })))) - } - None => None - }; let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$delete_handler(&id, editgroup_id, &conn)) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + let editgroup_id: Option = match editgroup_id { + Some(s) => Some(FatCatId::from_str(&s)?), + None => None, + }; + let edit_context = make_edit_context(&conn, editgroup_id, false)?; + Ok($model::db_delete(&conn, &edit_context, entity_id)?.into_model()?) + }) { Ok(edit) => $delete_resp::DeletedEntity(edit), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -188,13 +191,12 @@ macro_rules! wrap_entity_handlers { limit: Option, _context: &Context, ) -> Box + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($get_history_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); // No transaction for GET - let ret = match self.$get_history_handler(&id, limit, &conn) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + Ok($model::db_get_history(&conn, entity_id, limit)?) + }) { Ok(history) => $get_history_resp::FoundEntityHistory(history), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -246,19 +248,15 @@ impl Api for Server { get_container_handler, GetContainerResponse, create_container, - create_container_handler, CreateContainerResponse, create_container_batch, create_container_batch_handler, CreateContainerBatchResponse, update_container, - update_container_handler, UpdateContainerResponse, delete_container, - delete_container_handler, DeleteContainerResponse, get_container_history, - get_container_history_handler, GetContainerHistoryResponse, ContainerEntity ); @@ -268,19 +266,15 @@ impl Api for Server { get_creator_handler, GetCreatorResponse, create_creator, - create_creator_handler, CreateCreatorResponse, create_creator_batch, create_creator_batch_handler, CreateCreatorBatchResponse, update_creator, - update_creator_handler, UpdateCreatorResponse, delete_creator, - delete_creator_handler, DeleteCreatorResponse, get_creator_history, - get_creator_history_handler, GetCreatorHistoryResponse, CreatorEntity ); @@ -289,19 +283,15 @@ impl Api for Server { get_file_handler, GetFileResponse, create_file, - create_file_handler, CreateFileResponse, create_file_batch, create_file_batch_handler, CreateFileBatchResponse, update_file, - update_file_handler, UpdateFileResponse, delete_file, - delete_file_handler, DeleteFileResponse, get_file_history, - get_file_history_handler, GetFileHistoryResponse, FileEntity ); @@ -310,19 +300,15 @@ impl Api for Server { get_release_handler, GetReleaseResponse, create_release, - create_release_handler, CreateReleaseResponse, create_release_batch, create_release_batch_handler, CreateReleaseBatchResponse, update_release, - update_release_handler, UpdateReleaseResponse, delete_release, - delete_release_handler, DeleteReleaseResponse, get_release_history, - get_release_history_handler, GetReleaseHistoryResponse, ReleaseEntity ); @@ -331,19 +317,15 @@ impl Api for Server { get_work_handler, GetWorkResponse, create_work, - create_work_handler, CreateWorkResponse, create_work_batch, create_work_batch_handler, CreateWorkBatchResponse, update_work, - update_work_handler, UpdateWorkResponse, delete_work, - delete_work_handler, DeleteWorkResponse, get_work_history, - get_work_history_handler, GetWorkHistoryResponse, WorkEntity ); -- cgit v1.2.3 From 3df7c6fa601309b74c1b664f2f09f048dce2bf29 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 19:30:07 -0700 Subject: many small API cleanups - use FatCatId much more often (though not everywhere yet) - more consistent types - remove redundant error handling code in wrappers --- rust/src/api_entity_crud.rs | 82 +++++++++------------------ rust/src/api_helpers.rs | 35 ++++++------ rust/src/api_server.rs | 90 +++++++++++++++--------------- rust/src/api_wrappers.rs | 132 ++++++++++++++++++++------------------------ rust/src/lib.rs | 2 +- 5 files changed, 148 insertions(+), 193 deletions(-) (limited to 'rust/src') diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs index a1f4742b..dd0961d5 100644 --- a/rust/src/api_entity_crud.rs +++ b/rust/src/api_entity_crud.rs @@ -43,19 +43,9 @@ where fn parse_editgroup_id(&self) -> Result>; // Generic Methods - fn db_get( - conn: &DbConn, - ident: FatCatId - ) -> Result; - fn db_get_rev( - conn: &DbConn, - rev_id: Uuid - ) -> Result; - fn db_create( - &self, - conn: &DbConn, - edit_context: &EditContext - ) -> Result; + fn db_get(conn: &DbConn, ident: FatCatId) -> Result; + fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result; + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result; fn db_create_batch( conn: &DbConn, edit_context: &EditContext, @@ -77,10 +67,7 @@ where ident: FatCatId, limit: Option, ) -> Result>; - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId - ) -> Result; + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result; // Entity-specific Methods fn db_from_row( @@ -88,14 +75,8 @@ where rev_row: Self::RevRow, ident_row: Option, ) -> Result; - fn db_insert_rev( - &self, - conn: &DbConn - ) -> Result; - fn db_insert_revs( - conn: &DbConn, - models: &[&Self] - ) -> Result>; + fn db_insert_rev(&self, conn: &DbConn) -> Result; + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result>; } // TODO: this could be a separate trait on all entities @@ -328,11 +309,7 @@ macro_rules! generic_db_get_history { #[allow(unused_macros)] macro_rules! generic_db_accept_edits_batch { ($entity_name_str:expr) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result { - + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result { let count = diesel::sql_query(format!( " UPDATE {entity}_ident @@ -344,12 +321,12 @@ macro_rules! generic_db_accept_edits_batch { WHERE {entity}_ident.id = {entity}_edit.ident_id AND {entity}_edit.editgroup_id = $1", - entity = $entity_name_str + entity = $entity_name_str )).bind::(editgroup_id.to_uuid()) .execute(conn)?; Ok(count as u64) } - } + }; } // UPDATE ROW version: single query per row @@ -357,11 +334,7 @@ macro_rules! generic_db_accept_edits_batch { #[allow(unused_macros)] macro_rules! generic_db_accept_edits_each { ($ident_table:ident, $edit_table:ident) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result { - + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result { // 1. select edit rows (in sql) let edit_rows: Vec = $edit_table::table .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) @@ -369,31 +342,26 @@ macro_rules! generic_db_accept_edits_each { // 2. create ident rows (in rust) let ident_rows: Vec = edit_rows .iter() - .map(|edit| - Self::IdentRow { - id: edit.ident_id, - is_live: true, - rev_id: edit.rev_id, - redirect_id: edit.redirect_id, - - } - ) + .map(|edit| Self::IdentRow { + id: edit.ident_id, + is_live: true, + rev_id: edit.rev_id, + redirect_id: edit.redirect_id, + }) .collect(); /* - // 3. upsert ident rows (in sql) - let count: u64 = diesel::insert_into($ident_table::table) - .values(ident_rows) - .on_conflict() - .do_update() - .set(ident_rows) - .execute(conn)?; - */ + // 3. upsert ident rows (in sql) + let count: u64 = diesel::insert_into($ident_table::table) + .values(ident_rows) + .on_conflict() + .do_update() + .set(ident_rows) + .execute(conn)?; + */ // 3. update every row individually let count = ident_rows.len() as u64; for row in ident_rows { - diesel::update(&row) - .set(&row) - .execute(conn)?; + diesel::update(&row).set(&row).execute(conn)?; } Ok(count) } diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs index 2d203232..6c214223 100644 --- a/rust/src/api_helpers.rs +++ b/rust/src/api_helpers.rs @@ -1,15 +1,15 @@ +use api_entity_crud::EntityCrud; use data_encoding::BASE32_NOPAD; use database_models::*; use database_schema::*; -use fatcat_api::models::*; -use serde_json; use diesel; use diesel::prelude::*; use errors::*; +use fatcat_api::models::*; use regex::Regex; +use serde_json; use std::str::FromStr; use uuid::Uuid; -use api_entity_crud::EntityCrud; pub type DbConn = diesel::r2d2::PooledConnection>; @@ -21,7 +21,11 @@ pub struct EditContext { pub autoaccept: bool, } -pub fn make_edit_context(conn: &DbConn, editgroup_id: Option, autoaccept: bool) -> Result { +pub fn make_edit_context( + conn: &DbConn, + editgroup_id: Option, + autoaccept: bool, +) -> Result { let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { (Some(eg), _) => eg, @@ -31,7 +35,7 @@ pub fn make_edit_context(conn: &DbConn, editgroup_id: Option, autoacce .values((editgroup::editor_id.eq(editor_id),)) .get_result(conn)?; FatCatId::from_uuid(&eg_row.id) - }, + } (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), }; Ok(EditContext { @@ -61,34 +65,33 @@ pub fn get_or_create_editgroup(editor_id: Uuid, conn: &DbConn) -> Result { } /// This function should always be run within a transaction -pub fn accept_editgroup(editgroup_id: Uuid, conn: &DbConn) -> Result { +pub fn accept_editgroup(editgroup_id: FatCatId, conn: &DbConn) -> Result { // check that we haven't accepted already (in changelog) // NB: could leave this to a UNIQUE constraint let count: i64 = changelog::table - .filter(changelog::editgroup_id.eq(editgroup_id)) + .filter(changelog::editgroup_id.eq(editgroup_id.to_uuid())) .count() .get_result(conn)?; if count > 0 { - return Err(ErrorKind::EditgroupAlreadyAccepted(uuid2fcid(&editgroup_id)).into()); + return Err(ErrorKind::EditgroupAlreadyAccepted(editgroup_id.to_string()).into()); } // copy edit columns to ident table - let eg_id = FatCatId::from_uuid(&editgroup_id); - ContainerEntity::db_accept_edits(conn, eg_id)?; - CreatorEntity::db_accept_edits(conn, eg_id)?; - FileEntity::db_accept_edits(conn, eg_id)?; - ReleaseEntity::db_accept_edits(conn, eg_id)?; - WorkEntity::db_accept_edits(conn, eg_id)?; + ContainerEntity::db_accept_edits(conn, editgroup_id)?; + CreatorEntity::db_accept_edits(conn, editgroup_id)?; + FileEntity::db_accept_edits(conn, editgroup_id)?; + ReleaseEntity::db_accept_edits(conn, editgroup_id)?; + WorkEntity::db_accept_edits(conn, editgroup_id)?; // append log/changelog row let entry: ChangelogRow = diesel::insert_into(changelog::table) - .values((changelog::editgroup_id.eq(editgroup_id),)) + .values((changelog::editgroup_id.eq(editgroup_id.to_uuid()),)) .get_result(conn)?; // update any editor's active editgroup let no_active: Option = None; diesel::update(editor::table) - .filter(editor::active_editgroup_id.eq(editgroup_id)) + .filter(editor::active_editgroup_id.eq(editgroup_id.to_uuid())) .set(editor::active_editgroup_id.eq(no_active)) .execute(conn)?; Ok(entry) diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs index e832385a..076bc085 100644 --- a/rust/src/api_server.rs +++ b/rust/src/api_server.rs @@ -1,8 +1,8 @@ //! API endpoint handlers +use api_entity_crud::EntityCrud; use api_helpers::*; use chrono; -use api_entity_crud::EntityCrud; use database_models::*; use database_schema::*; use diesel::prelude::*; @@ -11,7 +11,6 @@ use errors::*; use fatcat_api::models; use fatcat_api::models::*; use std::str::FromStr; -use uuid::Uuid; use ConnectionPool; macro_rules! entity_batch_handler { @@ -61,11 +60,11 @@ pub struct Server { impl Server { pub fn get_container_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option, conn: &DbConn, ) -> Result { - ContainerEntity::db_get(conn, FatCatId::from_uuid(id)) + ContainerEntity::db_get(conn, id) } pub fn lookup_container_handler(&self, issnl: &str, conn: &DbConn) -> Result { @@ -85,11 +84,11 @@ impl Server { pub fn get_creator_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option, conn: &DbConn, ) -> Result { - CreatorEntity::db_get(conn, FatCatId::from_uuid(id)) + CreatorEntity::db_get(conn, id) } pub fn lookup_creator_handler(&self, orcid: &str, conn: &DbConn) -> Result { @@ -109,16 +108,14 @@ impl Server { pub fn get_creator_releases_handler( &self, - id: &str, + id: FatCatId, conn: &DbConn, ) -> Result> { - let id = fcid2uuid(&id)?; - // TODO: some kind of unique or group-by? let rows: Vec<(ReleaseRevRow, ReleaseIdentRow, ReleaseContribRow)> = release_rev::table .inner_join(release_ident::table) .inner_join(release_contrib::table) - .filter(release_contrib::creator_ident_id.eq(&id)) + .filter(release_contrib::creator_ident_id.eq(&id.to_uuid())) .filter(release_ident::is_live.eq(true)) .filter(release_ident::redirect_id.is_null()) .load(conn)?; @@ -131,11 +128,11 @@ impl Server { pub fn get_file_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option, conn: &DbConn, ) -> Result { - FileEntity::db_get(conn, FatCatId::from_uuid(id)) + FileEntity::db_get(conn, id) } pub fn lookup_file_handler(&self, sha1: &str, conn: &DbConn) -> Result { @@ -154,19 +151,18 @@ impl Server { pub fn get_release_handler( &self, - id: &Uuid, + id: FatCatId, expand: Option, conn: &DbConn, ) -> Result { - let mut release = ReleaseEntity::db_get(conn, FatCatId::from_uuid(id))?; + let mut release = ReleaseEntity::db_get(conn, id)?; // For now, if there is any expand param we do them all if expand.is_some() { - release.files = - Some(self.get_release_files_handler(&release.ident.clone().unwrap(), conn)?); + release.files = Some(self.get_release_files_handler(id, conn)?); if let Some(ref cid) = release.container_id { release.container = - Some(self.get_container_handler(&fcid2uuid(&cid)?, None, conn)?); + Some(self.get_container_handler(FatCatId::from_str(&cid)?, None, conn)?); } } Ok(release) @@ -187,13 +183,15 @@ impl Server { ReleaseEntity::db_from_row(conn, rev, Some(ident)) } - pub fn get_release_files_handler(&self, id: &str, conn: &DbConn) -> Result> { - let ident = FatCatId::from_str(id)?; - + pub fn get_release_files_handler( + &self, + id: FatCatId, + conn: &DbConn, + ) -> Result> { let rows: Vec<(FileRevRow, FileIdentRow, FileReleaseRow)> = file_rev::table .inner_join(file_ident::table) .inner_join(file_release::table) - .filter(file_release::target_release_ident_id.eq(&ident.to_uuid())) + .filter(file_release::target_release_ident_id.eq(&id.to_uuid())) .filter(file_ident::is_live.eq(true)) .filter(file_ident::redirect_id.is_null()) .load(conn)?; @@ -205,19 +203,21 @@ impl Server { pub fn get_work_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option, conn: &DbConn, ) -> Result { - WorkEntity::db_get(conn, FatCatId::from_uuid(id)) + WorkEntity::db_get(conn, id) } - pub fn get_work_releases_handler(&self, id: &str, conn: &DbConn) -> Result> { - let id = fcid2uuid(&id)?; - + pub fn get_work_releases_handler( + &self, + id: FatCatId, + conn: &DbConn, + ) -> Result> { let rows: Vec<(ReleaseRevRow, ReleaseIdentRow)> = release_rev::table .inner_join(release_ident::table) - .filter(release_rev::work_ident_id.eq(&id)) + .filter(release_rev::work_ident_id.eq(&id.to_uuid())) .filter(release_ident::is_live.eq(true)) .filter(release_ident::redirect_id.is_null()) .load(conn)?; @@ -227,8 +227,8 @@ impl Server { .collect() } - pub fn accept_editgroup_handler(&self, id: &str, conn: &DbConn) -> Result<()> { - accept_editgroup(fcid2uuid(id)?, conn)?; + pub fn accept_editgroup_handler(&self, id: FatCatId, conn: &DbConn) -> Result<()> { + accept_editgroup(id, conn)?; Ok(()) } @@ -239,7 +239,7 @@ impl Server { ) -> Result { let row: EditgroupRow = insert_into(editgroup::table) .values(( - editgroup::editor_id.eq(fcid2uuid(&entity.editor_id)?), + editgroup::editor_id.eq(FatCatId::from_str(&entity.editor_id)?.to_uuid()), editgroup::description.eq(entity.description), editgroup::extra_json.eq(entity.extra), )) @@ -254,14 +254,13 @@ impl Server { }) } - pub fn get_editgroup_handler(&self, id: &str, conn: &DbConn) -> Result { - let id = fcid2uuid(id)?; - let row: EditgroupRow = editgroup::table.find(id).first(conn)?; + pub fn get_editgroup_handler(&self, id: FatCatId, conn: &DbConn) -> Result { + let row: EditgroupRow = editgroup::table.find(id.to_uuid()).first(conn)?; let edits = EditgroupEdits { containers: Some( container_edit::table - .filter(container_edit::editgroup_id.eq(id)) + .filter(container_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: ContainerEditRow| e.into_model().unwrap()) @@ -269,7 +268,7 @@ impl Server { ), creators: Some( creator_edit::table - .filter(creator_edit::editgroup_id.eq(id)) + .filter(creator_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: CreatorEditRow| e.into_model().unwrap()) @@ -277,7 +276,7 @@ impl Server { ), files: Some( file_edit::table - .filter(file_edit::editgroup_id.eq(id)) + .filter(file_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: FileEditRow| e.into_model().unwrap()) @@ -285,7 +284,7 @@ impl Server { ), releases: Some( release_edit::table - .filter(release_edit::editgroup_id.eq(id)) + .filter(release_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: ReleaseEditRow| e.into_model().unwrap()) @@ -293,7 +292,7 @@ impl Server { ), works: Some( work_edit::table - .filter(work_edit::editgroup_id.eq(id)) + .filter(work_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: WorkEditRow| e.into_model().unwrap()) @@ -311,9 +310,8 @@ impl Server { Ok(eg) } - pub fn get_editor_handler(&self, id: &str, conn: &DbConn) -> Result { - let id = fcid2uuid(id)?; - let row: EditorRow = editor::table.find(id).first(conn)?; + pub fn get_editor_handler(&self, id: FatCatId, conn: &DbConn) -> Result { + let row: EditorRow = editor::table.find(id.to_uuid()).first(conn)?; let ed = Editor { id: Some(uuid2fcid(&row.id)), @@ -322,14 +320,13 @@ impl Server { Ok(ed) } - pub fn editor_changelog_get_handler( + pub fn get_editor_changelog_handler( &self, - id: &str, + id: FatCatId, conn: &DbConn, ) -> Result> { - let id = fcid2uuid(id)?; // TODO: single query - let editor: EditorRow = editor::table.find(id).first(conn)?; + let editor: EditorRow = editor::table.find(id.to_uuid()).first(conn)?; let changes: Vec<(ChangelogRow, EditgroupRow)> = changelog::table .inner_join(editgroup::table) .filter(editgroup::editor_id.eq(editor.id)) @@ -374,7 +371,8 @@ impl Server { pub fn get_changelog_entry_handler(&self, id: i64, conn: &DbConn) -> Result { let cl_row: ChangelogRow = changelog::table.find(id).first(conn)?; - let editgroup = self.get_editgroup_handler(&uuid2fcid(&cl_row.editgroup_id), conn)?; + let editgroup = + self.get_editgroup_handler(FatCatId::from_uuid(&cl_row.editgroup_id), conn)?; let mut entry = cl_row.into_model(); entry.editgroup = Some(editgroup); diff --git a/rust/src/api_wrappers.rs b/rust/src/api_wrappers.rs index 0600f3de..f6425c8c 100644 --- a/rust/src/api_wrappers.rs +++ b/rust/src/api_wrappers.rs @@ -1,15 +1,15 @@ //! API endpoint handlers +use api_entity_crud::EntityCrud; use api_helpers::*; use api_server::Server; +use database_models::EntityEditRow; use diesel::Connection; use errors::*; use fatcat_api::models; use fatcat_api::models::*; use fatcat_api::*; use futures::{self, Future}; -use api_entity_crud::EntityCrud; -use database_models::EntityEditRow; use std::str::FromStr; /// Helper for generating wrappers (which return "Box::new(futures::done(Ok(BLAH)))" like the @@ -32,13 +32,12 @@ macro_rules! wrap_entity_handlers { expand: Option, _context: &Context, ) -> Box + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($get_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); // No transaction for GET - let ret = match self.$get_handler(&id, expand, &conn) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + self.$get_handler(entity_id, expand, &conn) + }) { Ok(entity) => $get_resp::FoundEntity(entity), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -242,6 +241,35 @@ macro_rules! wrap_lookup_handler { } } +macro_rules! wrap_fcid_handler { + ($get_fn:ident, $get_handler:ident, $get_resp:ident) => { + fn $get_fn( + &self, + id: String, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET + let ret = match (|| { + let fcid = FatCatId::from_str(&id)?; + self.$get_handler(fcid, &conn) + })() { + Ok(entity) => + $get_resp::Found(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_resp::NotFound(ErrorResponse { message: format!("Not found: {}", id) }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + } +} + impl Api for Server { wrap_entity_handlers!( get_container, @@ -359,26 +387,26 @@ impl Api for Server { String ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_release_files, get_release_files_handler, - GetReleaseFilesResponse, - id, - String + GetReleaseFilesResponse ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_work_releases, get_work_releases_handler, - GetWorkReleasesResponse, - id, - String + GetWorkReleasesResponse ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_creator_releases, get_creator_releases_handler, - GetCreatorReleasesResponse, - id, - String + GetCreatorReleasesResponse + ); + wrap_fcid_handler!(get_editor, get_editor_handler, GetEditorResponse); + wrap_fcid_handler!( + get_editor_changelog, + get_editor_changelog_handler, + GetEditorChangelogResponse ); fn accept_editgroup( @@ -387,7 +415,10 @@ impl Api for Server { _context: &Context, ) -> Box + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.accept_editgroup_handler(&id, &conn)) { + let ret = match conn.transaction(|| { + let id = FatCatId::from_str(&id)?; + self.accept_editgroup_handler(id, &conn) + }) { Ok(()) => AcceptEditgroupResponse::MergedSuccessfully(Success { message: "horray!".to_string(), }), @@ -414,9 +445,12 @@ impl Api for Server { _context: &Context, ) -> Box + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.get_editgroup_handler(&id, &conn)) { + let ret = match conn.transaction(|| { + let id = FatCatId::from_str(&id)?; + self.get_editgroup_handler(id, &conn) + }) { Ok(entity) => - GetEditgroupResponse::FoundEntity(entity), + GetEditgroupResponse::Found(entity), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => GetEditgroupResponse::NotFound( ErrorResponse { message: format!("No such editgroup: {}", id) }), @@ -434,7 +468,9 @@ impl Api for Server { _context: &Context, ) -> Box + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.create_editgroup_handler(entity, &conn)) { + let ret = match conn.transaction(|| + self.create_editgroup_handler(entity, &conn) + ) { Ok(eg) => CreateEditgroupResponse::SuccessfullyCreated(eg), Err(e) => @@ -445,56 +481,6 @@ impl Api for Server { Box::new(futures::done(Ok(ret))) } - fn get_editor_changelog( - &self, - username: String, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match self.editor_changelog_get_handler(&username, &conn) { - Ok(entries) => GetEditorChangelogResponse::FoundMergedChanges(entries), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - GetEditorChangelogResponse::NotFound(ErrorResponse { - message: format!("No such editor: {}", username.clone()), - }) - } - Err(e) => { - // TODO: dig in to error type here - error!("{}", e); - GetEditorChangelogResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn get_editor( - &self, - username: String, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match self.get_editor_handler(&username, &conn) { - Ok(entity) => GetEditorResponse::FoundEditor(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - GetEditorResponse::NotFound(ErrorResponse { - message: format!("No such editor: {}", username.clone()), - }) - } - Err(e) => { - // TODO: dig in to error type here - error!("{}", e); - GetEditorResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - fn get_changelog( &self, limit: Option, diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 58147139..57cc535c 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -22,10 +22,10 @@ extern crate regex; extern crate lazy_static; extern crate sha1; +pub mod api_entity_crud; pub mod api_helpers; pub mod api_server; pub mod api_wrappers; -pub mod api_entity_crud; pub mod database_models; pub mod database_schema; -- cgit v1.2.3