From 2e335344e59de0ff42f1c2797a4e4ffbf3f4b30b Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 16:46:49 -0700 Subject: remove 'IS NOT NULL' identifier index constraints These seemed to be resulting in table scans on 404s in QA with postgres 10, despite the adding "IS NOT NULL" WHERE clauses earlier. Query time was very significant, even for the creator table (1.2 seconds or so on SSD). I looked at using hash indices (which have improved in postgres 10), which could save index size (disk and RAM) and potentially be faster for these trivial exact lookups, but didn't go for it at this time. --- rust/migrations/2018-05-12-001226_init/up.sql | 28 +++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) (limited to 'rust') diff --git a/rust/migrations/2018-05-12-001226_init/up.sql b/rust/migrations/2018-05-12-001226_init/up.sql index b0733576..c21e5b5e 100644 --- a/rust/migrations/2018-05-12-001226_init/up.sql +++ b/rust/migrations/2018-05-12-001226_init/up.sql @@ -59,8 +59,8 @@ CREATE TABLE creator_rev ( -- Could denormalize a "is_live" flag into revision tables, to make indices -- more efficient -CREATE INDEX creator_rev_orcid_idx ON creator_rev(orcid) WHERE orcid IS NOT NULL; -CREATE INDEX creator_rev_wikidata_idx ON creator_rev(wikidata_qid) WHERE wikidata_qid IS NOT NULL; +CREATE INDEX creator_rev_orcid_idx ON creator_rev(orcid); +CREATE INDEX creator_rev_wikidata_idx ON creator_rev(wikidata_qid); CREATE TABLE creator_ident ( id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), @@ -97,8 +97,8 @@ CREATE TABLE container_rev ( coden TEXT ); -CREATE INDEX container_rev_issnl_idx ON container_rev(issnl) WHERE issnl IS NOT NULL; -CREATE INDEX container_rev_wikidata_idx ON container_rev(wikidata_qid) WHERE wikidata_qid IS NOT NULL; +CREATE INDEX container_rev_issnl_idx ON container_rev(issnl); +CREATE INDEX container_rev_wikidata_idx ON container_rev(wikidata_qid); CREATE TABLE container_ident ( id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), @@ -134,9 +134,9 @@ CREATE TABLE file_rev ( mimetype TEXT ); -CREATE INDEX file_rev_sha1_idx ON file_rev(sha1) WHERE sha1 IS NOT NULL; -CREATE INDEX file_rev_md5_idx ON file_rev(md5) WHERE md5 IS NOT NULL; -CREATE INDEX file_rev_sha256_idx ON file_rev(sha256) WHERE sha256 IS NOT NULL; +CREATE INDEX file_rev_sha1_idx ON file_rev(sha1); +CREATE INDEX file_rev_md5_idx ON file_rev(md5); +CREATE INDEX file_rev_sha256_idx ON file_rev(sha256); CREATE TABLE file_rev_url ( id BIGSERIAL PRIMARY KEY, @@ -195,13 +195,13 @@ CREATE TABLE release_rev ( -- TODO: identifier table? ); -CREATE INDEX release_rev_doi_idx ON release_rev(doi) WHERE doi IS NOT NULL; -CREATE INDEX release_rev_pmid_idx ON release_rev(pmid) WHERE pmid IS NOT NULL; -CREATE INDEX release_rev_pmcid_idx ON release_rev(pmcid) WHERE pmcid IS NOT NULL; -CREATE INDEX release_rev_wikidata_idx ON release_rev(wikidata_qid) WHERE wikidata_qid IS NOT NULL; -CREATE INDEX release_rev_isbn13_idx ON release_rev(isbn13) WHERE isbn13 IS NOT NULL; -CREATE INDEX release_rev_core_idx ON release_rev(core_id) WHERE core_id IS NOT NULL; -CREATE INDEX release_rev_work_idx ON release_rev(work_ident_id) WHERE work_ident_id IS NOT NULL; +CREATE INDEX release_rev_doi_idx ON release_rev(doi); +CREATE INDEX release_rev_pmid_idx ON release_rev(pmid); +CREATE INDEX release_rev_pmcid_idx ON release_rev(pmcid); +CREATE INDEX release_rev_wikidata_idx ON release_rev(wikidata_qid); +CREATE INDEX release_rev_isbn13_idx ON release_rev(isbn13); +CREATE INDEX release_rev_core_idx ON release_rev(core_id); +CREATE INDEX release_rev_work_idx ON release_rev(work_ident_id); CREATE TABLE release_rev_abstract ( id BIGSERIAL PRIMARY KEY, -- cgit v1.2.3 From 70144145ee627f521ee7a9d5020dce2cb2b96373 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 18:09:29 -0700 Subject: database_entity_crud -> api_entity_crud --- rust/HACKING.md | 4 +- rust/src/api_entity_crud.rs | 1145 ++++++++++++++++++++++++++++++++++++++ rust/src/api_helpers.rs | 2 +- rust/src/api_server.rs | 2 +- rust/src/database_entity_crud.rs | 1145 -------------------------------------- rust/src/lib.rs | 2 +- 6 files changed, 1150 insertions(+), 1150 deletions(-) create mode 100644 rust/src/api_entity_crud.rs delete mode 100644 rust/src/database_entity_crud.rs (limited to 'rust') diff --git a/rust/HACKING.md b/rust/HACKING.md index 622a4b5a..0dde5058 100644 --- a/rust/HACKING.md +++ b/rust/HACKING.md @@ -8,11 +8,11 @@ swagger API spec on one side and the SQL schema on the other. - `./src/database_schema.rs`: autogenerated per-table Diesel schemas - `./src/database_models.rs`: hand- and macro-generated Rust structs matching Diesel schemas, and a small number of row-level helpers -- `./src/database_entity_crud.rs`: "struct-relational-mapping"; trait +- `./src/api_entity_crud.rs`: "struct-relational-mapping"; trait implementations of CRUD (create, read, update, delete) actions for each entity model, hitting the database (and building on `database_model` structs) - `./src/api_server.rs`: one function for each API endpoint, with rust-style - arguments and return types. mostly calls in to `database_entity_crud`. + arguments and return types. mostly calls in to `api_entity_crud`. - `./src/api_wrappers.rs`: hand- and macro-generated wrapper functions, one per API endpoint, that map between API request and return types, and rust-idiomatic request and return types (plus API models). diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs new file mode 100644 index 00000000..01323637 --- /dev/null +++ b/rust/src/api_entity_crud.rs @@ -0,0 +1,1145 @@ +use api_helpers::*; +use chrono; +use database_models::*; +use database_schema::*; +use diesel::prelude::*; +use diesel::{self, insert_into}; +use errors::*; +use fatcat_api::models::*; +use serde_json; +use sha1::Sha1; +use std::marker::Sized; +use std::str::FromStr; +use uuid::Uuid; + +pub struct EditContext { + pub editor_id: FatCatId, + pub editgroup_id: FatCatId, + pub extra_json: Option, + pub autoaccept: bool, +} + +/* One goal here is to abstract the non-entity-specific bits into generic traits or functions, + * instead of macros. + * + * Notably: + * + * db_get + * db_get_rev + * db_create + * db_create_batch + * db_update + * db_delete + * db_get_history + * + * For now, these will probably be macros, until we can level up our trait/generics foo. + */ + +// Associated Type, not parametric +pub trait EntityCrud +where + Self: Sized, +{ + // TODO: could EditRow and IdentRow be generic structs? Or do they need to be bound to a + // specific table? + type EditRow; // EntityEditRow + type EditNewRow; + type IdentRow; // EntityIdentRow + type IdentNewRow; + type RevRow; + + fn parse_editgroup_id(&self) -> Result>; + + // Generic Methods + fn db_get( + conn: &DbConn, + ident: FatCatId + ) -> Result; + fn db_get_rev( + conn: &DbConn, + rev_id: Uuid + ) -> Result; + fn db_create( + &self, + conn: &DbConn, + edit_context: &EditContext + ) -> Result; + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result>; + fn db_update( + &self, + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result; + fn db_delete( + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result; + fn db_get_history( + conn: &DbConn, + ident: FatCatId, + limit: Option, + ) -> Result>; + fn db_accept_edits( + conn: &DbConn, + editgroup_id: FatCatId + ) -> Result; + + // Entity-specific Methods + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result; + fn db_insert_rev( + &self, + conn: &DbConn + ) -> Result; + fn db_insert_revs( + conn: &DbConn, + models: &[&Self] + ) -> Result>; +} + +// TODO: this could be a separate trait on all entities +macro_rules! generic_parse_editgroup_id{ + () => { + fn parse_editgroup_id(&self) -> Result> { + match &self.editgroup_id { + Some(s) => Ok(Some(FatCatId::from_str(&s)?)), + None => Ok(None), + } + } + } +} + +macro_rules! generic_db_get { + ($ident_table:ident, $rev_table:ident) => { + fn db_get(conn: &DbConn, ident: FatCatId) -> Result { + let (ident, rev): (Self::IdentRow, Self::RevRow) = $ident_table::table + .find(ident.to_uuid()) + .inner_join($rev_table::table) + .first(conn)?; + + Self::db_from_row(conn, rev, Some(ident)) + } + }; +} + +macro_rules! generic_db_get_rev { + ($rev_table:ident) => { + fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result { + let rev = $rev_table::table.find(rev_id).first(conn)?; + + Self::db_from_row(conn, rev, None) + } + }; +} + +macro_rules! generic_db_create { + // TODO: this path should call generic_db_create_batch + ($ident_table: ident, $edit_table: ident) => { + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { + let rev_id = self.db_insert_rev(conn)?; + let ident: Uuid = insert_into($ident_table::table) + .values($ident_table::rev_id.eq(&rev_id)) + .returning($ident_table::id) + .get_result(conn)?; + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::ident_id.eq(&ident), + )) + .get_result(conn)?; + Ok(edit) + } + } +} + +macro_rules! generic_db_create_batch { + ($ident_table:ident, $edit_table:ident) => { + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result> { + let rev_ids: Vec = Self::db_insert_revs(conn, models)?; + let ident_ids: Vec = insert_into($ident_table::table) + .values( + rev_ids + .iter() + .map(|rev_id| Self::IdentNewRow { + rev_id: Some(rev_id.clone()), + is_live: edit_context.autoaccept, + redirect_id: None, + }) + .collect::>(), + ) + .returning($ident_table::id) + .get_results(conn)?; + let edits: Vec = insert_into($edit_table::table) + .values( + rev_ids + .into_iter() + .zip(ident_ids.into_iter()) + .map(|(rev_id, ident_id)| Self::EditNewRow { + editgroup_id: edit_context.editgroup_id.to_uuid(), + rev_id: Some(rev_id), + ident_id: ident_id, + redirect_id: None, + prev_rev: None, + extra_json: edit_context.extra_json.clone(), + }) + .collect::>(), + ) + .get_results(conn)?; + Ok(edits) + } + }; +} + +macro_rules! generic_db_update { + ($ident_table: ident, $edit_table: ident) => { + fn db_update(&self, conn: &DbConn, edit_context: &EditContext, ident: FatCatId) -> Result { + let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + if current.is_live != true { + // TODO: what if isn't live? 4xx not 5xx + bail!("can't delete an entity that doesn't exist yet"); + } + if current.rev_id.is_none() { + // TODO: what if it's already deleted? 4xx not 5xx + bail!("entity was already deleted"); + } + + let rev_id = self.db_insert_rev(conn)?; + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(&ident.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::prev_rev.eq(current.rev_id.unwrap()), + $edit_table::extra_json.eq(&self.extra), + )) + .get_result(conn)?; + + Ok(edit) + } + } +} + +macro_rules! generic_db_delete { + ($ident_table:ident, $edit_table:ident) => { + fn db_delete( + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result { + let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + if current.is_live != true { + // TODO: what if isn't live? 4xx not 5xx + bail!("can't delete an entity that doesn't exist yet"); + } + if current.rev_id.is_none() { + // TODO: what if it's already deleted? 4xx not 5xx + bail!("entity was already deleted"); + } + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(ident.to_uuid()), + $edit_table::rev_id.eq(None::), + $edit_table::redirect_id.eq(None::), + $edit_table::prev_rev.eq(current.rev_id), + $edit_table::extra_json.eq(&edit_context.extra_json), + )) + .get_result(conn)?; + + Ok(edit) + } + }; +} + +macro_rules! generic_db_get_history { + ($edit_table:ident) => { + fn db_get_history( + conn: &DbConn, + ident: FatCatId, + limit: Option, + ) -> Result> { + let limit = limit.unwrap_or(50); // TODO: make a static + + let rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table + .inner_join(changelog::table) + .inner_join($edit_table::table) + .filter($edit_table::ident_id.eq(ident.to_uuid())) + .order(changelog::id.desc()) + .limit(limit) + .get_results(conn)?; + + let history: Result> = rows.into_iter() + .map(|(eg_row, cl_row, e_row)| { + Ok(EntityHistoryEntry { + edit: e_row.into_model()?, + editgroup: eg_row.into_model_partial(), + changelog_entry: cl_row.into_model(), + }) + }) + .collect(); + history + } + }; +} + +/* +// This would be the clean and efficient way, but see: +// https://github.com/diesel-rs/diesel/issues/1478 +// + diesel::update(container_ident::table) + .inner_join(container_edit::table.on( + container_ident::id.eq(container_edit::ident_id) + )) + .filter(container_edit::editgroup_id.eq(editgroup_id)) + .values(( + container_ident::is_live.eq(true), + container_ident::rev_id.eq(container_edit::rev_id), + container_ident::redirect_id.eq(container_edit::redirect_id), + )) + .execute()?; + +// Was previously: + + for entity in &["container", "creator", "file", "work", "release"] { + diesel::sql_query(format!( + " + UPDATE {entity}_ident + SET + is_live = true, + rev_id = {entity}_edit.rev_id, + redirect_id = {entity}_edit.redirect_id + FROM {entity}_edit + WHERE + {entity}_ident.id = {entity}_edit.ident_id + AND {entity}_edit.editgroup_id = $1", + entity = entity + )).bind::(editgroup_id) + .execute(conn)?; +*/ + +// UPDATE FROM version: single query for many rows +// Works with Postgres, not Cockroach +macro_rules! generic_db_accept_edits_batch { + ($entity_name_str:expr) => { + fn db_accept_edits( + conn: &DbConn, + editgroup_id: FatCatId, + ) -> Result { + + let count = diesel::sql_query(format!( + " + UPDATE {entity}_ident + SET + is_live = true, + rev_id = {entity}_edit.rev_id, + redirect_id = {entity}_edit.redirect_id + FROM {entity}_edit + WHERE + {entity}_ident.id = {entity}_edit.ident_id + AND {entity}_edit.editgroup_id = $1", + entity = $entity_name_str + )).bind::(editgroup_id.to_uuid()) + .execute(conn)?; + Ok(count as u64) + } + } +} + +// UPDATE ROW version: single query per row +// CockroachDB version (slow, single query per row) +macro_rules! generic_db_accept_edits_each { + ($ident_table:ident, $edit_table:ident) => { + fn db_accept_edits( + conn: &DbConn, + editgroup_id: FatCatId, + ) -> Result { + + // 1. select edit rows (in sql) + let edit_rows: Vec = $edit_table::table + .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) + .get_results(conn)?; + // 2. create ident rows (in rust) + let ident_rows: Vec = edit_rows + .iter() + .map(|edit| + Self::IdentRow { + id: edit.ident_id, + is_live: true, + rev_id: edit.rev_id, + redirect_id: edit.redirect_id, + + } + ) + .collect(); + /* + // 3. upsert ident rows (in sql) + let count: u64 = diesel::insert_into($ident_table::table) + .values(ident_rows) + .on_conflict() + .do_update() + .set(ident_rows) + .execute(conn)?; + */ + // 3. update every row individually + let count = ident_rows.len() as u64; + for row in ident_rows { + diesel::update(&row) + .set(&row) + .execute(conn)?; + } + Ok(count) + } + }; +} + +macro_rules! generic_db_insert_rev { + () => { + fn db_insert_rev(&self, conn: &DbConn) -> Result { + Self::db_insert_revs(conn, &vec![self]).map(|id_list| id_list[0]) + } + } +} + +impl EntityCrud for ContainerEntity { + type EditRow = ContainerEditRow; + type EditNewRow = ContainerEditNewRow; + type IdentRow = ContainerIdentRow; + type IdentNewRow = ContainerIdentNewRow; + type RevRow = ContainerRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(container_ident, container_rev); + generic_db_get_rev!(container_rev); + generic_db_create!(container_ident, container_edit); + generic_db_create_batch!(container_ident, container_edit); + generic_db_update!(container_ident, container_edit); + generic_db_delete!(container_ident, container_edit); + generic_db_get_history!(container_edit); + generic_db_accept_edits_batch!("container"); + //generic_db_accept_edits_each!(container_ident, container_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + Ok(ContainerEntity { + issnl: rev_row.issnl, + wikidata_qid: rev_row.wikidata_qid, + publisher: rev_row.publisher, + name: rev_row.name, + abbrev: rev_row.abbrev, + coden: rev_row.coden, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + editgroup_id: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + if let Some(ref extid) = entity.issnl { + check_issn(extid)?; + } + } + + let rev_ids: Vec = insert_into(container_rev::table) + .values( + models + .iter() + .map(|model| ContainerRevNewRow { + name: model.name.clone(), + publisher: model.publisher.clone(), + issnl: model.issnl.clone(), + wikidata_qid: model.wikidata_qid.clone(), + abbrev: model.abbrev.clone(), + coden: model.coden.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(container_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} + +impl EntityCrud for CreatorEntity { + type EditRow = CreatorEditRow; + type EditNewRow = CreatorEditNewRow; + type IdentRow = CreatorIdentRow; + type IdentNewRow = CreatorIdentNewRow; + type RevRow = CreatorRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(creator_ident, creator_rev); + generic_db_get_rev!(creator_rev); + generic_db_create!(creator_ident, creator_edit); + generic_db_create_batch!(creator_ident, creator_edit); + generic_db_update!(creator_ident, creator_edit); + generic_db_delete!(creator_ident, creator_edit); + generic_db_get_history!(creator_edit); + generic_db_accept_edits_batch!("creator"); + //generic_db_accept_edits_each!(creator_ident, creator_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + Ok(CreatorEntity { + display_name: rev_row.display_name, + given_name: rev_row.given_name, + surname: rev_row.surname, + orcid: rev_row.orcid, + wikidata_qid: rev_row.wikidata_qid, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.orcid { + check_orcid(extid)?; + } + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + } + + let rev_ids: Vec = insert_into(creator_rev::table) + .values( + models + .iter() + .map(|model| CreatorRevNewRow { + display_name: model.display_name.clone(), + given_name: model.given_name.clone(), + surname: model.surname.clone(), + orcid: model.orcid.clone(), + wikidata_qid: model.wikidata_qid.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(creator_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} + +impl EntityCrud for FileEntity { + type EditRow = FileEditRow; + type EditNewRow = FileEditNewRow; + type IdentRow = FileIdentRow; + type IdentNewRow = FileIdentNewRow; + type RevRow = FileRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(file_ident, file_rev); + generic_db_get_rev!(file_rev); + generic_db_create!(file_ident, file_edit); + generic_db_create_batch!(file_ident, file_edit); + generic_db_update!(file_ident, file_edit); + generic_db_delete!(file_ident, file_edit); + generic_db_get_history!(file_edit); + generic_db_accept_edits_batch!("file"); + //generic_db_accept_edits_each!(file_ident, file_edit); + generic_db_insert_rev!(); + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let releases: Vec = file_release::table + .filter(file_release::file_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FileReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) + .collect(); + + let urls: Vec = file_rev_url::table + .filter(file_rev_url::file_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FileRevUrlRow| FileEntityUrls { + rel: r.rel, + url: r.url, + }) + .collect(); + + Ok(FileEntity { + sha1: rev_row.sha1, + sha256: rev_row.sha256, + md5: rev_row.md5, + size: rev_row.size.map(|v| v as i64), + urls: Some(urls), + mimetype: rev_row.mimetype, + releases: Some(releases.iter().map(|fcid| fcid.to_string()).collect()), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + let rev_ids: Vec = insert_into(file_rev::table) + .values( + models + .iter() + .map(|model| FileRevNewRow { + size: model.size, + sha1: model.sha1.clone(), + sha256: model.sha256.clone(), + md5: model.md5.clone(), + mimetype: model.mimetype.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(file_rev::id) + .get_results(conn)?; + + let mut file_release_rows: Vec = vec![]; + let mut file_url_rows: Vec = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.releases { + None => (), + Some(release_list) => { + let these_release_rows: Result> = release_list + .iter() + .map(|r| { + Ok(FileReleaseRow { + file_rev: rev_id.clone(), + target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), + }) + }) + .collect(); + file_release_rows.extend(these_release_rows?); + } + }; + + match &model.urls { + None => (), + Some(url_list) => { + let these_url_rows: Vec = url_list + .into_iter() + .map(|u| FileRevUrlNewRow { + file_rev: rev_id.clone(), + rel: u.rel.clone(), + url: u.url.clone(), + }) + .collect(); + file_url_rows.extend(these_url_rows); + } + }; + } + + if !file_release_rows.is_empty() { + // TODO: shouldn't it be "file_rev_release"? + insert_into(file_release::table) + .values(file_release_rows) + .execute(conn)?; + } + + if !file_url_rows.is_empty() { + insert_into(file_rev_url::table) + .values(file_url_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for ReleaseEntity { + type EditRow = ReleaseEditRow; + type EditNewRow = ReleaseEditNewRow; + type IdentRow = ReleaseIdentRow; + type IdentNewRow = ReleaseIdentNewRow; + type RevRow = ReleaseRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(release_ident, release_rev); + generic_db_get_rev!(release_rev); + //generic_db_create!(release_ident, release_edit); + //generic_db_create_batch!(release_ident, release_edit); + generic_db_update!(release_ident, release_edit); + generic_db_delete!(release_ident, release_edit); + generic_db_get_history!(release_edit); + generic_db_accept_edits_batch!("release"); + //generic_db_accept_edits_each!(release_ident, release_edit); + generic_db_insert_rev!(); + + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { + let mut edits = Self::db_create_batch(conn, edit_context, &vec![self])?; + // probably a more elegant way to destroy the vec and take first element + Ok(edits.pop().unwrap()) + } + + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result> { + // This isn't the generic implementation because we need to create Work entities for each + // of the release entities passed (at least in the common case) + + // Generate the set of new work entities to insert (usually one for each release, but some + // releases might be pointed to a work already) + let mut new_work_models: Vec<&WorkEntity> = vec![]; + for entity in models { + if entity.work_id.is_none() { + new_work_models.push(&WorkEntity { + ident: None, + revision: None, + redirect: None, + state: None, + editgroup_id: None, + extra: None, + }); + }; + } + + // create the works, then pluck the list of idents from the result + let new_work_edits = + WorkEntity::db_create_batch(conn, edit_context, new_work_models.as_slice())?; + let mut new_work_ids: Vec = new_work_edits.iter().map(|edit| edit.ident_id).collect(); + + // Copy all the release models, and ensure that each has work_id set, using the new work + // idents. There should be one new work ident for each release missing one. + let models_with_work_ids: Vec = models + .iter() + .map(|model| { + let mut model = (*model).clone(); + if model.work_id.is_none() { + model.work_id = + Some(FatCatId::from_uuid(&new_work_ids.pop().unwrap()).to_string()) + } + model + }) + .collect(); + let model_refs: Vec<&Self> = models_with_work_ids.iter().map(|s| s).collect(); + let models = model_refs.as_slice(); + + // The rest here is copy/pasta from the generic (how to avoid copypasta?) + let rev_ids: Vec = Self::db_insert_revs(conn, models)?; + let ident_ids: Vec = insert_into(release_ident::table) + .values( + rev_ids + .iter() + .map(|rev_id| Self::IdentNewRow { + rev_id: Some(rev_id.clone()), + is_live: edit_context.autoaccept, + redirect_id: None, + }) + .collect::>(), + ) + .returning(release_ident::id) + .get_results(conn)?; + let edits: Vec = insert_into(release_edit::table) + .values( + rev_ids + .into_iter() + .zip(ident_ids.into_iter()) + .map(|(rev_id, ident_id)| Self::EditNewRow { + editgroup_id: edit_context.editgroup_id.to_uuid(), + rev_id: Some(rev_id), + ident_id: ident_id, + redirect_id: None, + prev_rev: None, + extra_json: edit_context.extra_json.clone(), + }) + .collect::>(), + ) + .get_results(conn)?; + Ok(edits) + } + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let refs: Vec = release_ref::table + .filter(release_ref::release_rev.eq(rev_row.id)) + .order(release_ref::index_val.asc()) + .get_results(conn)? + .into_iter() + .map(|r: ReleaseRefRow| ReleaseRef { + index: r.index_val, + key: r.key, + extra: r.extra_json, + container_title: r.container_title, + year: r.year, + title: r.title, + locator: r.locator, + target_release_id: r.target_release_ident_id + .map(|v| FatCatId::from_uuid(&v).to_string()), + }) + .collect(); + + let contribs: Vec = release_contrib::table + .filter(release_contrib::release_rev.eq(rev_row.id)) + .order(( + release_contrib::role.asc(), + release_contrib::index_val.asc(), + )) + .get_results(conn)? + .into_iter() + .map(|c: ReleaseContribRow| ReleaseContrib { + index: c.index_val, + raw_name: c.raw_name, + role: c.role, + extra: c.extra_json, + creator_id: c.creator_ident_id + .map(|v| FatCatId::from_uuid(&v).to_string()), + creator: None, + }) + .collect(); + + let abstracts: Vec = release_rev_abstract::table + .inner_join(abstracts::table) + .filter(release_rev_abstract::release_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map( + |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts { + sha1: Some(r.0.abstract_sha1), + mimetype: r.0.mimetype, + lang: r.0.lang, + content: Some(r.1.content), + }, + ) + .collect(); + + Ok(ReleaseEntity { + title: rev_row.title, + release_type: rev_row.release_type, + release_status: rev_row.release_status, + release_date: rev_row + .release_date + .map(|v| chrono::DateTime::from_utc(v.and_hms(0, 0, 0), chrono::Utc)), + doi: rev_row.doi, + pmid: rev_row.pmid, + pmcid: rev_row.pmcid, + isbn13: rev_row.isbn13, + core_id: rev_row.core_id, + wikidata_qid: rev_row.wikidata_qid, + volume: rev_row.volume, + issue: rev_row.issue, + pages: rev_row.pages, + files: None, + container: None, + container_id: rev_row + .container_ident_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + publisher: rev_row.publisher, + language: rev_row.language, + work_id: Some(FatCatId::from_uuid(&rev_row.work_ident_id).to_string()), + refs: Some(refs), + contribs: Some(contribs), + abstracts: Some(abstracts), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.doi { + check_doi(extid)?; + } + if let Some(ref extid) = entity.pmid { + check_pmid(extid)?; + } + if let Some(ref extid) = entity.pmcid { + check_pmcid(extid)?; + } + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + } + + let rev_ids: Vec = insert_into(release_rev::table) + .values(models + .iter() + .map(|model| { + Ok(ReleaseRevNewRow { + title: model.title.clone(), + release_type: model.release_type.clone(), + release_status: model.release_status.clone(), + release_date: model.release_date.map(|v| v.naive_utc().date()), + doi: model.doi.clone(), + pmid: model.pmid.clone(), + pmcid: model.pmcid.clone(), + wikidata_qid: model.wikidata_qid.clone(), + isbn13: model.isbn13.clone(), + core_id: model.core_id.clone(), + volume: model.volume.clone(), + issue: model.issue.clone(), + pages: model.pages.clone(), + work_ident_id: match model.work_id.clone() { + None => bail!("release_revs must have a work_id by the time they are inserted; this is an internal soundness error"), + Some(s) => FatCatId::from_str(&s)?.to_uuid(), + }, + container_ident_id: match model.container_id.clone() { + None => None, + Some(s) => Some(FatCatId::from_str(&s)?.to_uuid()), + }, + publisher: model.publisher.clone(), + language: model.language.clone(), + extra_json: model.extra.clone() + }) + }) + .collect::>>()?) + .returning(release_rev::id) + .get_results(conn)?; + + let mut release_ref_rows: Vec = vec![]; + let mut release_contrib_rows: Vec = vec![]; + let mut abstract_rows: Vec = vec![]; + let mut release_abstract_rows: Vec = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.refs { + None => (), + Some(ref_list) => { + let these_ref_rows: Vec = ref_list + .iter() + .map(|r| { + Ok(ReleaseRefNewRow { + release_rev: rev_id.clone(), + target_release_ident_id: match r.target_release_id.clone() { + None => None, + Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), + }, + index_val: r.index, + key: r.key.clone(), + container_title: r.container_title.clone(), + year: r.year, + title: r.title.clone(), + locator: r.locator.clone(), + extra_json: r.extra.clone(), + }) + }) + .collect::>>()?; + release_ref_rows.extend(these_ref_rows); + } + }; + + match &model.contribs { + None => (), + Some(contrib_list) => { + let these_contrib_rows: Vec = contrib_list + .iter() + .map(|c| { + Ok(ReleaseContribNewRow { + release_rev: rev_id.clone(), + creator_ident_id: match c.creator_id.clone() { + None => None, + Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), + }, + raw_name: c.raw_name.clone(), + index_val: c.index, + role: c.role.clone(), + extra_json: c.extra.clone(), + }) + }) + .collect::>>()?; + release_contrib_rows.extend(these_contrib_rows); + } + }; + + if let Some(abstract_list) = &model.abstracts { + // For rows that specify content, we need to insert the abstract if it doesn't exist + // already + let new_abstracts: Vec = abstract_list + .iter() + .filter(|ea| ea.content.is_some()) + .map(|c| AbstractsRow { + sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(), + content: c.content.clone().unwrap(), + }) + .collect(); + abstract_rows.extend(new_abstracts); + let new_release_abstract_rows: Vec = abstract_list + .into_iter() + .map(|c| { + Ok(ReleaseRevAbstractNewRow { + release_rev: rev_id.clone(), + abstract_sha1: match c.content { + Some(ref content) => Sha1::from(content).hexdigest(), + None => match c.sha1.clone() { + Some(v) => v, + None => bail!("either abstract_sha1 or content is required"), + }, + }, + lang: c.lang.clone(), + mimetype: c.mimetype.clone(), + }) + }) + .collect::>>()?; + release_abstract_rows.extend(new_release_abstract_rows); + } + } + + if !release_ref_rows.is_empty() { + insert_into(release_ref::table) + .values(release_ref_rows) + .execute(conn)?; + } + + if !release_contrib_rows.is_empty() { + insert_into(release_contrib::table) + .values(release_contrib_rows) + .execute(conn)?; + } + + if !abstract_rows.is_empty() { + // Sort of an "upsert"; only inserts new abstract rows if they don't already exist + insert_into(abstracts::table) + .values(&abstract_rows) + .on_conflict(abstracts::sha1) + .do_nothing() + .execute(conn)?; + insert_into(release_rev_abstract::table) + .values(release_abstract_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for WorkEntity { + type EditRow = WorkEditRow; + type EditNewRow = WorkEditNewRow; + type IdentRow = WorkIdentRow; + type IdentNewRow = WorkIdentNewRow; + type RevRow = WorkRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(work_ident, work_rev); + generic_db_get_rev!(work_rev); + generic_db_create!(work_ident, work_edit); + generic_db_create_batch!(work_ident, work_edit); + generic_db_update!(work_ident, work_edit); + generic_db_delete!(work_ident, work_edit); + generic_db_get_history!(work_edit); + generic_db_accept_edits_batch!("work"); + //generic_db_accept_edits_each!(work_ident, work_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + Ok(WorkEntity { + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + let rev_ids: Vec = insert_into(work_rev::table) + .values( + models + .iter() + .map(|model| WorkRevNewRow { + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(work_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs index 8ab9dcb3..456646b4 100644 --- a/rust/src/api_helpers.rs +++ b/rust/src/api_helpers.rs @@ -8,7 +8,7 @@ use errors::*; use regex::Regex; use std::str::FromStr; use uuid::Uuid; -use database_entity_crud::EntityCrud; +use api_entity_crud::EntityCrud; pub type DbConn = diesel::r2d2::PooledConnection>; diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs index 5fce8e64..70ebb02a 100644 --- a/rust/src/api_server.rs +++ b/rust/src/api_server.rs @@ -2,7 +2,7 @@ use api_helpers::*; use chrono; -use database_entity_crud::{EditContext, EntityCrud}; +use api_entity_crud::{EditContext, EntityCrud}; use database_models::*; use database_schema::*; use diesel::prelude::*; diff --git a/rust/src/database_entity_crud.rs b/rust/src/database_entity_crud.rs deleted file mode 100644 index 01323637..00000000 --- a/rust/src/database_entity_crud.rs +++ /dev/null @@ -1,1145 +0,0 @@ -use api_helpers::*; -use chrono; -use database_models::*; -use database_schema::*; -use diesel::prelude::*; -use diesel::{self, insert_into}; -use errors::*; -use fatcat_api::models::*; -use serde_json; -use sha1::Sha1; -use std::marker::Sized; -use std::str::FromStr; -use uuid::Uuid; - -pub struct EditContext { - pub editor_id: FatCatId, - pub editgroup_id: FatCatId, - pub extra_json: Option, - pub autoaccept: bool, -} - -/* One goal here is to abstract the non-entity-specific bits into generic traits or functions, - * instead of macros. - * - * Notably: - * - * db_get - * db_get_rev - * db_create - * db_create_batch - * db_update - * db_delete - * db_get_history - * - * For now, these will probably be macros, until we can level up our trait/generics foo. - */ - -// Associated Type, not parametric -pub trait EntityCrud -where - Self: Sized, -{ - // TODO: could EditRow and IdentRow be generic structs? Or do they need to be bound to a - // specific table? - type EditRow; // EntityEditRow - type EditNewRow; - type IdentRow; // EntityIdentRow - type IdentNewRow; - type RevRow; - - fn parse_editgroup_id(&self) -> Result>; - - // Generic Methods - fn db_get( - conn: &DbConn, - ident: FatCatId - ) -> Result; - fn db_get_rev( - conn: &DbConn, - rev_id: Uuid - ) -> Result; - fn db_create( - &self, - conn: &DbConn, - edit_context: &EditContext - ) -> Result; - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result>; - fn db_update( - &self, - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result; - fn db_delete( - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result; - fn db_get_history( - conn: &DbConn, - ident: FatCatId, - limit: Option, - ) -> Result>; - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId - ) -> Result; - - // Entity-specific Methods - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result; - fn db_insert_rev( - &self, - conn: &DbConn - ) -> Result; - fn db_insert_revs( - conn: &DbConn, - models: &[&Self] - ) -> Result>; -} - -// TODO: this could be a separate trait on all entities -macro_rules! generic_parse_editgroup_id{ - () => { - fn parse_editgroup_id(&self) -> Result> { - match &self.editgroup_id { - Some(s) => Ok(Some(FatCatId::from_str(&s)?)), - None => Ok(None), - } - } - } -} - -macro_rules! generic_db_get { - ($ident_table:ident, $rev_table:ident) => { - fn db_get(conn: &DbConn, ident: FatCatId) -> Result { - let (ident, rev): (Self::IdentRow, Self::RevRow) = $ident_table::table - .find(ident.to_uuid()) - .inner_join($rev_table::table) - .first(conn)?; - - Self::db_from_row(conn, rev, Some(ident)) - } - }; -} - -macro_rules! generic_db_get_rev { - ($rev_table:ident) => { - fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result { - let rev = $rev_table::table.find(rev_id).first(conn)?; - - Self::db_from_row(conn, rev, None) - } - }; -} - -macro_rules! generic_db_create { - // TODO: this path should call generic_db_create_batch - ($ident_table: ident, $edit_table: ident) => { - fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { - let rev_id = self.db_insert_rev(conn)?; - let ident: Uuid = insert_into($ident_table::table) - .values($ident_table::rev_id.eq(&rev_id)) - .returning($ident_table::id) - .get_result(conn)?; - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::rev_id.eq(&rev_id), - $edit_table::ident_id.eq(&ident), - )) - .get_result(conn)?; - Ok(edit) - } - } -} - -macro_rules! generic_db_create_batch { - ($ident_table:ident, $edit_table:ident) => { - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result> { - let rev_ids: Vec = Self::db_insert_revs(conn, models)?; - let ident_ids: Vec = insert_into($ident_table::table) - .values( - rev_ids - .iter() - .map(|rev_id| Self::IdentNewRow { - rev_id: Some(rev_id.clone()), - is_live: edit_context.autoaccept, - redirect_id: None, - }) - .collect::>(), - ) - .returning($ident_table::id) - .get_results(conn)?; - let edits: Vec = insert_into($edit_table::table) - .values( - rev_ids - .into_iter() - .zip(ident_ids.into_iter()) - .map(|(rev_id, ident_id)| Self::EditNewRow { - editgroup_id: edit_context.editgroup_id.to_uuid(), - rev_id: Some(rev_id), - ident_id: ident_id, - redirect_id: None, - prev_rev: None, - extra_json: edit_context.extra_json.clone(), - }) - .collect::>(), - ) - .get_results(conn)?; - Ok(edits) - } - }; -} - -macro_rules! generic_db_update { - ($ident_table: ident, $edit_table: ident) => { - fn db_update(&self, conn: &DbConn, edit_context: &EditContext, ident: FatCatId) -> Result { - let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; - if current.is_live != true { - // TODO: what if isn't live? 4xx not 5xx - bail!("can't delete an entity that doesn't exist yet"); - } - if current.rev_id.is_none() { - // TODO: what if it's already deleted? 4xx not 5xx - bail!("entity was already deleted"); - } - - let rev_id = self.db_insert_rev(conn)?; - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::ident_id.eq(&ident.to_uuid()), - $edit_table::rev_id.eq(&rev_id), - $edit_table::prev_rev.eq(current.rev_id.unwrap()), - $edit_table::extra_json.eq(&self.extra), - )) - .get_result(conn)?; - - Ok(edit) - } - } -} - -macro_rules! generic_db_delete { - ($ident_table:ident, $edit_table:ident) => { - fn db_delete( - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result { - let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; - if current.is_live != true { - // TODO: what if isn't live? 4xx not 5xx - bail!("can't delete an entity that doesn't exist yet"); - } - if current.rev_id.is_none() { - // TODO: what if it's already deleted? 4xx not 5xx - bail!("entity was already deleted"); - } - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::ident_id.eq(ident.to_uuid()), - $edit_table::rev_id.eq(None::), - $edit_table::redirect_id.eq(None::), - $edit_table::prev_rev.eq(current.rev_id), - $edit_table::extra_json.eq(&edit_context.extra_json), - )) - .get_result(conn)?; - - Ok(edit) - } - }; -} - -macro_rules! generic_db_get_history { - ($edit_table:ident) => { - fn db_get_history( - conn: &DbConn, - ident: FatCatId, - limit: Option, - ) -> Result> { - let limit = limit.unwrap_or(50); // TODO: make a static - - let rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table - .inner_join(changelog::table) - .inner_join($edit_table::table) - .filter($edit_table::ident_id.eq(ident.to_uuid())) - .order(changelog::id.desc()) - .limit(limit) - .get_results(conn)?; - - let history: Result> = rows.into_iter() - .map(|(eg_row, cl_row, e_row)| { - Ok(EntityHistoryEntry { - edit: e_row.into_model()?, - editgroup: eg_row.into_model_partial(), - changelog_entry: cl_row.into_model(), - }) - }) - .collect(); - history - } - }; -} - -/* -// This would be the clean and efficient way, but see: -// https://github.com/diesel-rs/diesel/issues/1478 -// - diesel::update(container_ident::table) - .inner_join(container_edit::table.on( - container_ident::id.eq(container_edit::ident_id) - )) - .filter(container_edit::editgroup_id.eq(editgroup_id)) - .values(( - container_ident::is_live.eq(true), - container_ident::rev_id.eq(container_edit::rev_id), - container_ident::redirect_id.eq(container_edit::redirect_id), - )) - .execute()?; - -// Was previously: - - for entity in &["container", "creator", "file", "work", "release"] { - diesel::sql_query(format!( - " - UPDATE {entity}_ident - SET - is_live = true, - rev_id = {entity}_edit.rev_id, - redirect_id = {entity}_edit.redirect_id - FROM {entity}_edit - WHERE - {entity}_ident.id = {entity}_edit.ident_id - AND {entity}_edit.editgroup_id = $1", - entity = entity - )).bind::(editgroup_id) - .execute(conn)?; -*/ - -// UPDATE FROM version: single query for many rows -// Works with Postgres, not Cockroach -macro_rules! generic_db_accept_edits_batch { - ($entity_name_str:expr) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result { - - let count = diesel::sql_query(format!( - " - UPDATE {entity}_ident - SET - is_live = true, - rev_id = {entity}_edit.rev_id, - redirect_id = {entity}_edit.redirect_id - FROM {entity}_edit - WHERE - {entity}_ident.id = {entity}_edit.ident_id - AND {entity}_edit.editgroup_id = $1", - entity = $entity_name_str - )).bind::(editgroup_id.to_uuid()) - .execute(conn)?; - Ok(count as u64) - } - } -} - -// UPDATE ROW version: single query per row -// CockroachDB version (slow, single query per row) -macro_rules! generic_db_accept_edits_each { - ($ident_table:ident, $edit_table:ident) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result { - - // 1. select edit rows (in sql) - let edit_rows: Vec = $edit_table::table - .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) - .get_results(conn)?; - // 2. create ident rows (in rust) - let ident_rows: Vec = edit_rows - .iter() - .map(|edit| - Self::IdentRow { - id: edit.ident_id, - is_live: true, - rev_id: edit.rev_id, - redirect_id: edit.redirect_id, - - } - ) - .collect(); - /* - // 3. upsert ident rows (in sql) - let count: u64 = diesel::insert_into($ident_table::table) - .values(ident_rows) - .on_conflict() - .do_update() - .set(ident_rows) - .execute(conn)?; - */ - // 3. update every row individually - let count = ident_rows.len() as u64; - for row in ident_rows { - diesel::update(&row) - .set(&row) - .execute(conn)?; - } - Ok(count) - } - }; -} - -macro_rules! generic_db_insert_rev { - () => { - fn db_insert_rev(&self, conn: &DbConn) -> Result { - Self::db_insert_revs(conn, &vec![self]).map(|id_list| id_list[0]) - } - } -} - -impl EntityCrud for ContainerEntity { - type EditRow = ContainerEditRow; - type EditNewRow = ContainerEditNewRow; - type IdentRow = ContainerIdentRow; - type IdentNewRow = ContainerIdentNewRow; - type RevRow = ContainerRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(container_ident, container_rev); - generic_db_get_rev!(container_rev); - generic_db_create!(container_ident, container_edit); - generic_db_create_batch!(container_ident, container_edit); - generic_db_update!(container_ident, container_edit); - generic_db_delete!(container_ident, container_edit); - generic_db_get_history!(container_edit); - generic_db_accept_edits_batch!("container"); - //generic_db_accept_edits_each!(container_ident, container_edit); - generic_db_insert_rev!(); - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - Ok(ContainerEntity { - issnl: rev_row.issnl, - wikidata_qid: rev_row.wikidata_qid, - publisher: rev_row.publisher, - name: rev_row.name, - abbrev: rev_row.abbrev, - coden: rev_row.coden, - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - editgroup_id: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - if let Some(ref extid) = entity.issnl { - check_issn(extid)?; - } - } - - let rev_ids: Vec = insert_into(container_rev::table) - .values( - models - .iter() - .map(|model| ContainerRevNewRow { - name: model.name.clone(), - publisher: model.publisher.clone(), - issnl: model.issnl.clone(), - wikidata_qid: model.wikidata_qid.clone(), - abbrev: model.abbrev.clone(), - coden: model.coden.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(container_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} - -impl EntityCrud for CreatorEntity { - type EditRow = CreatorEditRow; - type EditNewRow = CreatorEditNewRow; - type IdentRow = CreatorIdentRow; - type IdentNewRow = CreatorIdentNewRow; - type RevRow = CreatorRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(creator_ident, creator_rev); - generic_db_get_rev!(creator_rev); - generic_db_create!(creator_ident, creator_edit); - generic_db_create_batch!(creator_ident, creator_edit); - generic_db_update!(creator_ident, creator_edit); - generic_db_delete!(creator_ident, creator_edit); - generic_db_get_history!(creator_edit); - generic_db_accept_edits_batch!("creator"); - //generic_db_accept_edits_each!(creator_ident, creator_edit); - generic_db_insert_rev!(); - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - Ok(CreatorEntity { - display_name: rev_row.display_name, - given_name: rev_row.given_name, - surname: rev_row.surname, - orcid: rev_row.orcid, - wikidata_qid: rev_row.wikidata_qid, - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev_row.extra_json, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.orcid { - check_orcid(extid)?; - } - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - } - - let rev_ids: Vec = insert_into(creator_rev::table) - .values( - models - .iter() - .map(|model| CreatorRevNewRow { - display_name: model.display_name.clone(), - given_name: model.given_name.clone(), - surname: model.surname.clone(), - orcid: model.orcid.clone(), - wikidata_qid: model.wikidata_qid.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(creator_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} - -impl EntityCrud for FileEntity { - type EditRow = FileEditRow; - type EditNewRow = FileEditNewRow; - type IdentRow = FileIdentRow; - type IdentNewRow = FileIdentNewRow; - type RevRow = FileRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(file_ident, file_rev); - generic_db_get_rev!(file_rev); - generic_db_create!(file_ident, file_edit); - generic_db_create_batch!(file_ident, file_edit); - generic_db_update!(file_ident, file_edit); - generic_db_delete!(file_ident, file_edit); - generic_db_get_history!(file_edit); - generic_db_accept_edits_batch!("file"); - //generic_db_accept_edits_each!(file_ident, file_edit); - generic_db_insert_rev!(); - - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - let releases: Vec = file_release::table - .filter(file_release::file_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FileReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) - .collect(); - - let urls: Vec = file_rev_url::table - .filter(file_rev_url::file_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FileRevUrlRow| FileEntityUrls { - rel: r.rel, - url: r.url, - }) - .collect(); - - Ok(FileEntity { - sha1: rev_row.sha1, - sha256: rev_row.sha256, - md5: rev_row.md5, - size: rev_row.size.map(|v| v as i64), - urls: Some(urls), - mimetype: rev_row.mimetype, - releases: Some(releases.iter().map(|fcid| fcid.to_string()).collect()), - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev_row.extra_json, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - let rev_ids: Vec = insert_into(file_rev::table) - .values( - models - .iter() - .map(|model| FileRevNewRow { - size: model.size, - sha1: model.sha1.clone(), - sha256: model.sha256.clone(), - md5: model.md5.clone(), - mimetype: model.mimetype.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(file_rev::id) - .get_results(conn)?; - - let mut file_release_rows: Vec = vec![]; - let mut file_url_rows: Vec = vec![]; - - for (model, rev_id) in models.iter().zip(rev_ids.iter()) { - match &model.releases { - None => (), - Some(release_list) => { - let these_release_rows: Result> = release_list - .iter() - .map(|r| { - Ok(FileReleaseRow { - file_rev: rev_id.clone(), - target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), - }) - }) - .collect(); - file_release_rows.extend(these_release_rows?); - } - }; - - match &model.urls { - None => (), - Some(url_list) => { - let these_url_rows: Vec = url_list - .into_iter() - .map(|u| FileRevUrlNewRow { - file_rev: rev_id.clone(), - rel: u.rel.clone(), - url: u.url.clone(), - }) - .collect(); - file_url_rows.extend(these_url_rows); - } - }; - } - - if !file_release_rows.is_empty() { - // TODO: shouldn't it be "file_rev_release"? - insert_into(file_release::table) - .values(file_release_rows) - .execute(conn)?; - } - - if !file_url_rows.is_empty() { - insert_into(file_rev_url::table) - .values(file_url_rows) - .execute(conn)?; - } - - Ok(rev_ids) - } -} - -impl EntityCrud for ReleaseEntity { - type EditRow = ReleaseEditRow; - type EditNewRow = ReleaseEditNewRow; - type IdentRow = ReleaseIdentRow; - type IdentNewRow = ReleaseIdentNewRow; - type RevRow = ReleaseRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(release_ident, release_rev); - generic_db_get_rev!(release_rev); - //generic_db_create!(release_ident, release_edit); - //generic_db_create_batch!(release_ident, release_edit); - generic_db_update!(release_ident, release_edit); - generic_db_delete!(release_ident, release_edit); - generic_db_get_history!(release_edit); - generic_db_accept_edits_batch!("release"); - //generic_db_accept_edits_each!(release_ident, release_edit); - generic_db_insert_rev!(); - - fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { - let mut edits = Self::db_create_batch(conn, edit_context, &vec![self])?; - // probably a more elegant way to destroy the vec and take first element - Ok(edits.pop().unwrap()) - } - - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result> { - // This isn't the generic implementation because we need to create Work entities for each - // of the release entities passed (at least in the common case) - - // Generate the set of new work entities to insert (usually one for each release, but some - // releases might be pointed to a work already) - let mut new_work_models: Vec<&WorkEntity> = vec![]; - for entity in models { - if entity.work_id.is_none() { - new_work_models.push(&WorkEntity { - ident: None, - revision: None, - redirect: None, - state: None, - editgroup_id: None, - extra: None, - }); - }; - } - - // create the works, then pluck the list of idents from the result - let new_work_edits = - WorkEntity::db_create_batch(conn, edit_context, new_work_models.as_slice())?; - let mut new_work_ids: Vec = new_work_edits.iter().map(|edit| edit.ident_id).collect(); - - // Copy all the release models, and ensure that each has work_id set, using the new work - // idents. There should be one new work ident for each release missing one. - let models_with_work_ids: Vec = models - .iter() - .map(|model| { - let mut model = (*model).clone(); - if model.work_id.is_none() { - model.work_id = - Some(FatCatId::from_uuid(&new_work_ids.pop().unwrap()).to_string()) - } - model - }) - .collect(); - let model_refs: Vec<&Self> = models_with_work_ids.iter().map(|s| s).collect(); - let models = model_refs.as_slice(); - - // The rest here is copy/pasta from the generic (how to avoid copypasta?) - let rev_ids: Vec = Self::db_insert_revs(conn, models)?; - let ident_ids: Vec = insert_into(release_ident::table) - .values( - rev_ids - .iter() - .map(|rev_id| Self::IdentNewRow { - rev_id: Some(rev_id.clone()), - is_live: edit_context.autoaccept, - redirect_id: None, - }) - .collect::>(), - ) - .returning(release_ident::id) - .get_results(conn)?; - let edits: Vec = insert_into(release_edit::table) - .values( - rev_ids - .into_iter() - .zip(ident_ids.into_iter()) - .map(|(rev_id, ident_id)| Self::EditNewRow { - editgroup_id: edit_context.editgroup_id.to_uuid(), - rev_id: Some(rev_id), - ident_id: ident_id, - redirect_id: None, - prev_rev: None, - extra_json: edit_context.extra_json.clone(), - }) - .collect::>(), - ) - .get_results(conn)?; - Ok(edits) - } - - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - let refs: Vec = release_ref::table - .filter(release_ref::release_rev.eq(rev_row.id)) - .order(release_ref::index_val.asc()) - .get_results(conn)? - .into_iter() - .map(|r: ReleaseRefRow| ReleaseRef { - index: r.index_val, - key: r.key, - extra: r.extra_json, - container_title: r.container_title, - year: r.year, - title: r.title, - locator: r.locator, - target_release_id: r.target_release_ident_id - .map(|v| FatCatId::from_uuid(&v).to_string()), - }) - .collect(); - - let contribs: Vec = release_contrib::table - .filter(release_contrib::release_rev.eq(rev_row.id)) - .order(( - release_contrib::role.asc(), - release_contrib::index_val.asc(), - )) - .get_results(conn)? - .into_iter() - .map(|c: ReleaseContribRow| ReleaseContrib { - index: c.index_val, - raw_name: c.raw_name, - role: c.role, - extra: c.extra_json, - creator_id: c.creator_ident_id - .map(|v| FatCatId::from_uuid(&v).to_string()), - creator: None, - }) - .collect(); - - let abstracts: Vec = release_rev_abstract::table - .inner_join(abstracts::table) - .filter(release_rev_abstract::release_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map( - |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts { - sha1: Some(r.0.abstract_sha1), - mimetype: r.0.mimetype, - lang: r.0.lang, - content: Some(r.1.content), - }, - ) - .collect(); - - Ok(ReleaseEntity { - title: rev_row.title, - release_type: rev_row.release_type, - release_status: rev_row.release_status, - release_date: rev_row - .release_date - .map(|v| chrono::DateTime::from_utc(v.and_hms(0, 0, 0), chrono::Utc)), - doi: rev_row.doi, - pmid: rev_row.pmid, - pmcid: rev_row.pmcid, - isbn13: rev_row.isbn13, - core_id: rev_row.core_id, - wikidata_qid: rev_row.wikidata_qid, - volume: rev_row.volume, - issue: rev_row.issue, - pages: rev_row.pages, - files: None, - container: None, - container_id: rev_row - .container_ident_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - publisher: rev_row.publisher, - language: rev_row.language, - work_id: Some(FatCatId::from_uuid(&rev_row.work_ident_id).to_string()), - refs: Some(refs), - contribs: Some(contribs), - abstracts: Some(abstracts), - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev_row.extra_json, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.doi { - check_doi(extid)?; - } - if let Some(ref extid) = entity.pmid { - check_pmid(extid)?; - } - if let Some(ref extid) = entity.pmcid { - check_pmcid(extid)?; - } - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - } - - let rev_ids: Vec = insert_into(release_rev::table) - .values(models - .iter() - .map(|model| { - Ok(ReleaseRevNewRow { - title: model.title.clone(), - release_type: model.release_type.clone(), - release_status: model.release_status.clone(), - release_date: model.release_date.map(|v| v.naive_utc().date()), - doi: model.doi.clone(), - pmid: model.pmid.clone(), - pmcid: model.pmcid.clone(), - wikidata_qid: model.wikidata_qid.clone(), - isbn13: model.isbn13.clone(), - core_id: model.core_id.clone(), - volume: model.volume.clone(), - issue: model.issue.clone(), - pages: model.pages.clone(), - work_ident_id: match model.work_id.clone() { - None => bail!("release_revs must have a work_id by the time they are inserted; this is an internal soundness error"), - Some(s) => FatCatId::from_str(&s)?.to_uuid(), - }, - container_ident_id: match model.container_id.clone() { - None => None, - Some(s) => Some(FatCatId::from_str(&s)?.to_uuid()), - }, - publisher: model.publisher.clone(), - language: model.language.clone(), - extra_json: model.extra.clone() - }) - }) - .collect::>>()?) - .returning(release_rev::id) - .get_results(conn)?; - - let mut release_ref_rows: Vec = vec![]; - let mut release_contrib_rows: Vec = vec![]; - let mut abstract_rows: Vec = vec![]; - let mut release_abstract_rows: Vec = vec![]; - - for (model, rev_id) in models.iter().zip(rev_ids.iter()) { - match &model.refs { - None => (), - Some(ref_list) => { - let these_ref_rows: Vec = ref_list - .iter() - .map(|r| { - Ok(ReleaseRefNewRow { - release_rev: rev_id.clone(), - target_release_ident_id: match r.target_release_id.clone() { - None => None, - Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), - }, - index_val: r.index, - key: r.key.clone(), - container_title: r.container_title.clone(), - year: r.year, - title: r.title.clone(), - locator: r.locator.clone(), - extra_json: r.extra.clone(), - }) - }) - .collect::>>()?; - release_ref_rows.extend(these_ref_rows); - } - }; - - match &model.contribs { - None => (), - Some(contrib_list) => { - let these_contrib_rows: Vec = contrib_list - .iter() - .map(|c| { - Ok(ReleaseContribNewRow { - release_rev: rev_id.clone(), - creator_ident_id: match c.creator_id.clone() { - None => None, - Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), - }, - raw_name: c.raw_name.clone(), - index_val: c.index, - role: c.role.clone(), - extra_json: c.extra.clone(), - }) - }) - .collect::>>()?; - release_contrib_rows.extend(these_contrib_rows); - } - }; - - if let Some(abstract_list) = &model.abstracts { - // For rows that specify content, we need to insert the abstract if it doesn't exist - // already - let new_abstracts: Vec = abstract_list - .iter() - .filter(|ea| ea.content.is_some()) - .map(|c| AbstractsRow { - sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(), - content: c.content.clone().unwrap(), - }) - .collect(); - abstract_rows.extend(new_abstracts); - let new_release_abstract_rows: Vec = abstract_list - .into_iter() - .map(|c| { - Ok(ReleaseRevAbstractNewRow { - release_rev: rev_id.clone(), - abstract_sha1: match c.content { - Some(ref content) => Sha1::from(content).hexdigest(), - None => match c.sha1.clone() { - Some(v) => v, - None => bail!("either abstract_sha1 or content is required"), - }, - }, - lang: c.lang.clone(), - mimetype: c.mimetype.clone(), - }) - }) - .collect::>>()?; - release_abstract_rows.extend(new_release_abstract_rows); - } - } - - if !release_ref_rows.is_empty() { - insert_into(release_ref::table) - .values(release_ref_rows) - .execute(conn)?; - } - - if !release_contrib_rows.is_empty() { - insert_into(release_contrib::table) - .values(release_contrib_rows) - .execute(conn)?; - } - - if !abstract_rows.is_empty() { - // Sort of an "upsert"; only inserts new abstract rows if they don't already exist - insert_into(abstracts::table) - .values(&abstract_rows) - .on_conflict(abstracts::sha1) - .do_nothing() - .execute(conn)?; - insert_into(release_rev_abstract::table) - .values(release_abstract_rows) - .execute(conn)?; - } - - Ok(rev_ids) - } -} - -impl EntityCrud for WorkEntity { - type EditRow = WorkEditRow; - type EditNewRow = WorkEditNewRow; - type IdentRow = WorkIdentRow; - type IdentNewRow = WorkIdentNewRow; - type RevRow = WorkRevRow; - - generic_parse_editgroup_id!(); - generic_db_get!(work_ident, work_rev); - generic_db_get_rev!(work_rev); - generic_db_create!(work_ident, work_edit); - generic_db_create_batch!(work_ident, work_edit); - generic_db_update!(work_ident, work_edit); - generic_db_delete!(work_ident, work_edit); - generic_db_get_history!(work_edit); - generic_db_accept_edits_batch!("work"); - //generic_db_accept_edits_each!(work_ident, work_edit); - generic_db_insert_rev!(); - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - Ok(WorkEntity { - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev_row.extra_json, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - let rev_ids: Vec = insert_into(work_rev::table) - .values( - models - .iter() - .map(|model| WorkRevNewRow { - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(work_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 38f9b343..58147139 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -25,7 +25,7 @@ extern crate sha1; pub mod api_helpers; pub mod api_server; pub mod api_wrappers; -pub mod database_entity_crud; +pub mod api_entity_crud; pub mod database_models; pub mod database_schema; -- cgit v1.2.3 From d57a807ee0481fd6534307f0870a0f96ea19cd25 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 18:11:01 -0700 Subject: squelch unused macro warning --- rust/src/api_entity_crud.rs | 2 ++ 1 file changed, 2 insertions(+) (limited to 'rust') diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs index 01323637..e59b47b8 100644 --- a/rust/src/api_entity_crud.rs +++ b/rust/src/api_entity_crud.rs @@ -333,6 +333,7 @@ macro_rules! generic_db_get_history { // UPDATE FROM version: single query for many rows // Works with Postgres, not Cockroach +#[allow(unused_macros)] macro_rules! generic_db_accept_edits_batch { ($entity_name_str:expr) => { fn db_accept_edits( @@ -361,6 +362,7 @@ macro_rules! generic_db_accept_edits_batch { // UPDATE ROW version: single query per row // CockroachDB version (slow, single query per row) +#[allow(unused_macros)] macro_rules! generic_db_accept_edits_each { ($ident_table:ident, $edit_table:ident) => { fn db_accept_edits( -- cgit v1.2.3 From bf769e17ccdb788cb6127c26fb7f0e60e1fd0a9e Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 18:37:16 -0700 Subject: continue CRUD refactor, removing CUD handlers Can't remove the "get" handlers because of the expand parameter, which is only implemented for releases and in the handler (not in the CRUD trait, yet). Also didn't do the batch handler stuff yet. --- rust/src/api_entity_crud.rs | 8 -- rust/src/api_helpers.rs | 29 ++++++ rust/src/api_server.rs | 221 +------------------------------------------- rust/src/api_wrappers.rs | 78 ++++++---------- 4 files changed, 60 insertions(+), 276 deletions(-) (limited to 'rust') diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs index e59b47b8..a1f4742b 100644 --- a/rust/src/api_entity_crud.rs +++ b/rust/src/api_entity_crud.rs @@ -6,19 +6,11 @@ use diesel::prelude::*; use diesel::{self, insert_into}; use errors::*; use fatcat_api::models::*; -use serde_json; use sha1::Sha1; use std::marker::Sized; use std::str::FromStr; use uuid::Uuid; -pub struct EditContext { - pub editor_id: FatCatId, - pub editgroup_id: FatCatId, - pub extra_json: Option, - pub autoaccept: bool, -} - /* One goal here is to abstract the non-entity-specific bits into generic traits or functions, * instead of macros. * diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs index 456646b4..2d203232 100644 --- a/rust/src/api_helpers.rs +++ b/rust/src/api_helpers.rs @@ -2,6 +2,7 @@ use data_encoding::BASE32_NOPAD; use database_models::*; use database_schema::*; use fatcat_api::models::*; +use serde_json; use diesel; use diesel::prelude::*; use errors::*; @@ -13,6 +14,34 @@ use api_entity_crud::EntityCrud; pub type DbConn = diesel::r2d2::PooledConnection>; +pub struct EditContext { + pub editor_id: FatCatId, + pub editgroup_id: FatCatId, + pub extra_json: Option, + pub autoaccept: bool, +} + +pub fn make_edit_context(conn: &DbConn, editgroup_id: Option, autoaccept: bool) -> Result { + let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth + let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { + (Some(eg), _) => eg, + // If autoaccept and no editgroup_id passed, always create a new one for this transaction + (None, true) => { + let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) + .values((editgroup::editor_id.eq(editor_id),)) + .get_result(conn)?; + FatCatId::from_uuid(&eg_row.id) + }, + (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), + }; + Ok(EditContext { + editor_id: FatCatId::from_uuid(&editor_id), + editgroup_id: editgroup_id, + extra_json: None, + autoaccept: autoaccept, + }) +} + /// This function should always be run within a transaction pub fn get_or_create_editgroup(editor_id: Uuid, conn: &DbConn) -> Result { // check for current active diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs index 70ebb02a..e832385a 100644 --- a/rust/src/api_server.rs +++ b/rust/src/api_server.rs @@ -2,7 +2,7 @@ use api_helpers::*; use chrono; -use api_entity_crud::{EditContext, EntityCrud}; +use api_entity_crud::EntityCrud; use database_models::*; use database_schema::*; use diesel::prelude::*; @@ -53,27 +53,6 @@ macro_rules! count_entity { }}; } -fn make_edit_context(conn: &DbConn, editgroup_id: Option, autoaccept: bool) -> Result { - let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth - let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { - (Some(eg), _) => eg, - // If autoaccept and no editgroup_id passed, always create a new one for this transaction - (None, true) => { - let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) - .values((editgroup::editor_id.eq(editor_id),)) - .get_result(conn)?; - FatCatId::from_uuid(&eg_row.id) - }, - (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), - }; - Ok(EditContext { - editor_id: FatCatId::from_uuid(&editor_id), - editgroup_id: editgroup_id, - extra_json: None, - autoaccept: autoaccept, - }) -} - #[derive(Clone)] pub struct Server { pub db_pool: ConnectionPool, @@ -248,163 +227,6 @@ impl Server { .collect() } - pub fn create_container_handler( - &self, - entity: models::ContainerEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_container_handler( - &self, - id: &Uuid, - entity: models::ContainerEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_container_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = ContainerEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_creator_handler( - &self, - entity: models::CreatorEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_creator_handler( - &self, - id: &Uuid, - entity: models::CreatorEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_creator_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = CreatorEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_file_handler( - &self, - entity: models::FileEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_file_handler( - &self, - id: &Uuid, - entity: models::FileEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_file_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = FileEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_release_handler( - &self, - entity: models::ReleaseEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_release_handler( - &self, - id: &Uuid, - entity: models::ReleaseEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_release_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = ReleaseEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_work_handler( - &self, - entity: models::WorkEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_work_handler( - &self, - id: &Uuid, - entity: models::WorkEntity, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn delete_work_handler( - &self, - id: &Uuid, - editgroup_id: Option, - conn: &DbConn, - ) -> Result { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = WorkEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - - edit.into_model() - } - pub fn accept_editgroup_handler(&self, id: &str, conn: &DbConn) -> Result<()> { accept_editgroup(fcid2uuid(id)?, conn)?; Ok(()) @@ -638,45 +460,4 @@ impl Server { entity_batch_handler!(create_file_batch_handler, FileEntity); entity_batch_handler!(create_release_batch_handler, ReleaseEntity); entity_batch_handler!(create_work_batch_handler, WorkEntity); - - pub fn get_container_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - ContainerEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_creator_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - CreatorEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_file_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - FileEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_release_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - ReleaseEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_work_history_handler( - &self, - id: &Uuid, - limit: Option, - conn: &DbConn, - ) -> Result> { - WorkEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } } diff --git a/rust/src/api_wrappers.rs b/rust/src/api_wrappers.rs index faafe984..0600f3de 100644 --- a/rust/src/api_wrappers.rs +++ b/rust/src/api_wrappers.rs @@ -1,6 +1,6 @@ //! API endpoint handlers -use api_helpers::fcid2uuid; +use api_helpers::*; use api_server::Server; use diesel::Connection; use errors::*; @@ -8,6 +8,9 @@ use fatcat_api::models; use fatcat_api::models::*; use fatcat_api::*; use futures::{self, Future}; +use api_entity_crud::EntityCrud; +use database_models::EntityEditRow; +use std::str::FromStr; /// Helper for generating wrappers (which return "Box::new(futures::done(Ok(BLAH)))" like the /// codegen fatcat-api code wants) that call through to actual helpers (which have simple Result<> @@ -17,11 +20,11 @@ macro_rules! wrap_entity_handlers { // stable doesn't have a mechanism to "concat" or generate new identifiers in macros, at least // in the context of defining new functions. // The only stable approach I know of would be: https://github.com/dtolnay/mashup - ($get_fn:ident, $get_handler:ident, $get_resp:ident, $post_fn:ident, $post_handler:ident, + ($get_fn:ident, $get_handler:ident, $get_resp:ident, $post_fn:ident, $post_resp:ident, $post_batch_fn:ident, $post_batch_handler:ident, - $post_batch_resp:ident, $update_fn:ident, $update_handler:ident, $update_resp:ident, - $delete_fn:ident, $delete_handler:ident, $delete_resp:ident, $get_history_fn:ident, - $get_history_handler:ident, $get_history_resp:ident, $model:ident) => { + $post_batch_resp:ident, $update_fn:ident, $update_resp:ident, + $delete_fn:ident, $delete_resp:ident, $get_history_fn:ident, + $get_history_resp:ident, $model:ident) => { fn $get_fn( &self, @@ -61,7 +64,10 @@ macro_rules! wrap_entity_handlers { _context: &Context, ) -> Box + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$post_handler(entity, &conn)) { + let ret = match conn.transaction(|| { + let edit_context = make_edit_context(&conn, entity.parse_editgroup_id()?, false)?; + Ok(entity.db_create(&conn, &edit_context)?.into_model()?) + }) { Ok(edit) => $post_resp::CreatedEntity(edit), Err(Error(ErrorKind::Diesel(e), _)) => @@ -115,12 +121,12 @@ macro_rules! wrap_entity_handlers { entity: models::$model, _context: &Context, ) -> Box + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($update_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$update_handler(&id, entity, &conn)) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + let edit_context = make_edit_context(&conn, entity.parse_editgroup_id()?, false)?; + Ok(entity.db_update(&conn, &edit_context, entity_id)?.into_model()?) + }) { Ok(edit) => $update_resp::UpdatedEntity(edit), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -148,19 +154,16 @@ macro_rules! wrap_entity_handlers { editgroup_id: Option, _context: &Context, ) -> Box + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($delete_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; - let editgroup_id = match editgroup_id { - Some(raw) => if let Ok(parsed) = fcid2uuid(&raw) { Some(parsed) } else { - return Box::new(futures::done(Ok($delete_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(raw).to_string() })))) - } - None => None - }; let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$delete_handler(&id, editgroup_id, &conn)) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + let editgroup_id: Option = match editgroup_id { + Some(s) => Some(FatCatId::from_str(&s)?), + None => None, + }; + let edit_context = make_edit_context(&conn, editgroup_id, false)?; + Ok($model::db_delete(&conn, &edit_context, entity_id)?.into_model()?) + }) { Ok(edit) => $delete_resp::DeletedEntity(edit), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -188,13 +191,12 @@ macro_rules! wrap_entity_handlers { limit: Option, _context: &Context, ) -> Box + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($get_history_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); // No transaction for GET - let ret = match self.$get_history_handler(&id, limit, &conn) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + Ok($model::db_get_history(&conn, entity_id, limit)?) + }) { Ok(history) => $get_history_resp::FoundEntityHistory(history), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -246,19 +248,15 @@ impl Api for Server { get_container_handler, GetContainerResponse, create_container, - create_container_handler, CreateContainerResponse, create_container_batch, create_container_batch_handler, CreateContainerBatchResponse, update_container, - update_container_handler, UpdateContainerResponse, delete_container, - delete_container_handler, DeleteContainerResponse, get_container_history, - get_container_history_handler, GetContainerHistoryResponse, ContainerEntity ); @@ -268,19 +266,15 @@ impl Api for Server { get_creator_handler, GetCreatorResponse, create_creator, - create_creator_handler, CreateCreatorResponse, create_creator_batch, create_creator_batch_handler, CreateCreatorBatchResponse, update_creator, - update_creator_handler, UpdateCreatorResponse, delete_creator, - delete_creator_handler, DeleteCreatorResponse, get_creator_history, - get_creator_history_handler, GetCreatorHistoryResponse, CreatorEntity ); @@ -289,19 +283,15 @@ impl Api for Server { get_file_handler, GetFileResponse, create_file, - create_file_handler, CreateFileResponse, create_file_batch, create_file_batch_handler, CreateFileBatchResponse, update_file, - update_file_handler, UpdateFileResponse, delete_file, - delete_file_handler, DeleteFileResponse, get_file_history, - get_file_history_handler, GetFileHistoryResponse, FileEntity ); @@ -310,19 +300,15 @@ impl Api for Server { get_release_handler, GetReleaseResponse, create_release, - create_release_handler, CreateReleaseResponse, create_release_batch, create_release_batch_handler, CreateReleaseBatchResponse, update_release, - update_release_handler, UpdateReleaseResponse, delete_release, - delete_release_handler, DeleteReleaseResponse, get_release_history, - get_release_history_handler, GetReleaseHistoryResponse, ReleaseEntity ); @@ -331,19 +317,15 @@ impl Api for Server { get_work_handler, GetWorkResponse, create_work, - create_work_handler, CreateWorkResponse, create_work_batch, create_work_batch_handler, CreateWorkBatchResponse, update_work, - update_work_handler, UpdateWorkResponse, delete_work, - delete_work_handler, DeleteWorkResponse, get_work_history, - get_work_history_handler, GetWorkHistoryResponse, WorkEntity ); -- cgit v1.2.3 From bed2d170a205df1356826d3fead3efa9991137a9 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 19:29:47 -0700 Subject: re-gen rust code --- rust/fatcat-api/README.md | 2 +- rust/fatcat-api/api.yaml | 20 ++++++++++----- rust/fatcat-api/api/swagger.yaml | 54 ++++++++++++++++++++++++++-------------- rust/fatcat-api/src/client.rs | 26 ++++++++++++++----- rust/fatcat-api/src/lib.rs | 28 ++++++++++++--------- rust/fatcat-api/src/mimetypes.rs | 20 ++++++++++----- rust/fatcat-api/src/server.rs | 44 +++++++++++++++++++++++--------- 7 files changed, 133 insertions(+), 61 deletions(-) (limited to 'rust') diff --git a/rust/fatcat-api/README.md b/rust/fatcat-api/README.md index c971b88c..1b566766 100644 --- a/rust/fatcat-api/README.md +++ b/rust/fatcat-api/README.md @@ -13,7 +13,7 @@ To see how to make this your own, look here: [README](https://github.com/swagger-api/swagger-codegen/blob/master/README.md) - API version: 0.1.0 -- Build date: 2018-09-08T04:52:59.479Z +- Build date: 2018-09-11T02:27:08.863Z This autogenerated project defines an API crate `fatcat` which contains: * An `Api` trait defining the API in Rust. diff --git a/rust/fatcat-api/api.yaml b/rust/fatcat-api/api.yaml index a8919216..2b0615d2 100644 --- a/rust/fatcat-api/api.yaml +++ b/rust/fatcat-api/api.yaml @@ -671,7 +671,7 @@ paths: operationId: "get_creator_releases" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -939,7 +939,7 @@ paths: operationId: "get_release_files" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -1081,7 +1081,7 @@ paths: operationId: "get_work_releases" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -1097,9 +1097,13 @@ paths: operationId: "get_editor" responses: 200: - description: Found Editor + description: Found schema: $ref: "#/definitions/editor" + 400: + description: Bad Request + schema: + $ref: "#/definitions/error_response" 404: description: Not Found schema: @@ -1118,11 +1122,15 @@ paths: operationId: "get_editor_changelog" responses: 200: - description: Found Merged Changes + description: Found schema: type: array items: $ref: "#/definitions/changelog_entry" + 400: + description: Bad Request + schema: + $ref: "#/definitions/error_response" 404: description: Not Found schema: @@ -1163,7 +1171,7 @@ paths: operationId: "get_editgroup" responses: 200: - description: Found Entity + description: Found schema: $ref: "#/definitions/editgroup" 400: diff --git a/rust/fatcat-api/api/swagger.yaml b/rust/fatcat-api/api/swagger.yaml index 0b1ca88a..9bc84351 100644 --- a/rust/fatcat-api/api/swagger.yaml +++ b/rust/fatcat-api/api/swagger.yaml @@ -834,13 +834,13 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Entity" + description: "Found" schema: type: "array" items: $ref: "#/definitions/release_entity" - x-responseId: "FoundEntity" - x-uppercaseResponseId: "FOUND_ENTITY" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_CREATOR_RELEASES" uppercase_data_type: "VEC" producesJson: true @@ -1749,13 +1749,13 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Entity" + description: "Found" schema: type: "array" items: $ref: "#/definitions/file_entity" - x-responseId: "FoundEntity" - x-uppercaseResponseId: "FOUND_ENTITY" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_RELEASE_FILES" uppercase_data_type: "VEC" producesJson: true @@ -2232,13 +2232,13 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Entity" + description: "Found" schema: type: "array" items: $ref: "#/definitions/release_entity" - x-responseId: "FoundEntity" - x-uppercaseResponseId: "FOUND_ENTITY" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_WORK_RELEASES" uppercase_data_type: "VEC" producesJson: true @@ -2286,14 +2286,23 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Editor" + description: "Found" schema: $ref: "#/definitions/editor" - x-responseId: "FoundEditor" - x-uppercaseResponseId: "FOUND_EDITOR" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_EDITOR" uppercase_data_type: "EDITOR" producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "GET_EDITOR" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true 404: description: "Not Found" schema: @@ -2329,16 +2338,25 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Merged Changes" + description: "Found" schema: type: "array" items: $ref: "#/definitions/changelog_entry" - x-responseId: "FoundMergedChanges" - x-uppercaseResponseId: "FOUND_MERGED_CHANGES" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_EDITOR_CHANGELOG" uppercase_data_type: "VEC" producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "GET_EDITOR_CHANGELOG" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true 404: description: "Not Found" schema: @@ -2428,11 +2446,11 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Entity" + description: "Found" schema: $ref: "#/definitions/editgroup" - x-responseId: "FoundEntity" - x-uppercaseResponseId: "FOUND_ENTITY" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_EDITGROUP" uppercase_data_type: "EDITGROUP" producesJson: true diff --git a/rust/fatcat-api/src/client.rs b/rust/fatcat-api/src/client.rs index 6f61f773..a08e3cfe 100644 --- a/rust/fatcat-api/src/client.rs +++ b/rust/fatcat-api/src/client.rs @@ -1749,7 +1749,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::>(&buf)?; - Ok(GetCreatorReleasesResponse::FoundEntity(body)) + Ok(GetCreatorReleasesResponse::Found(body)) } 400 => { let mut buf = String::new(); @@ -1809,7 +1809,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::(&buf)?; - Ok(GetEditgroupResponse::FoundEntity(body)) + Ok(GetEditgroupResponse::Found(body)) } 400 => { let mut buf = String::new(); @@ -1869,7 +1869,14 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::(&buf)?; - Ok(GetEditorResponse::FoundEditor(body)) + Ok(GetEditorResponse::Found(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::(&buf)?; + + Ok(GetEditorResponse::BadRequest(body)) } 404 => { let mut buf = String::new(); @@ -1922,7 +1929,14 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::>(&buf)?; - Ok(GetEditorChangelogResponse::FoundMergedChanges(body)) + Ok(GetEditorChangelogResponse::Found(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::(&buf)?; + + Ok(GetEditorChangelogResponse::BadRequest(body)) } 404 => { let mut buf = String::new(); @@ -2179,7 +2193,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::>(&buf)?; - Ok(GetReleaseFilesResponse::FoundEntity(body)) + Ok(GetReleaseFilesResponse::Found(body)) } 400 => { let mut buf = String::new(); @@ -2492,7 +2506,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::>(&buf)?; - Ok(GetWorkReleasesResponse::FoundEntity(body)) + Ok(GetWorkReleasesResponse::Found(body)) } 400 => { let mut buf = String::new(); diff --git a/rust/fatcat-api/src/lib.rs b/rust/fatcat-api/src/lib.rs index fc1ae2a1..a08c6e04 100644 --- a/rust/fatcat-api/src/lib.rs +++ b/rust/fatcat-api/src/lib.rs @@ -304,8 +304,8 @@ pub enum GetCreatorHistoryResponse { #[derive(Debug, PartialEq)] pub enum GetCreatorReleasesResponse { - /// Found Entity - FoundEntity(Vec), + /// Found + Found(Vec), /// Bad Request BadRequest(models::ErrorResponse), /// Not Found @@ -316,8 +316,8 @@ pub enum GetCreatorReleasesResponse { #[derive(Debug, PartialEq)] pub enum GetEditgroupResponse { - /// Found Entity - FoundEntity(models::Editgroup), + /// Found + Found(models::Editgroup), /// Bad Request BadRequest(models::ErrorResponse), /// Not Found @@ -328,8 +328,10 @@ pub enum GetEditgroupResponse { #[derive(Debug, PartialEq)] pub enum GetEditorResponse { - /// Found Editor - FoundEditor(models::Editor), + /// Found + Found(models::Editor), + /// Bad Request + BadRequest(models::ErrorResponse), /// Not Found NotFound(models::ErrorResponse), /// Generic Error @@ -338,8 +340,10 @@ pub enum GetEditorResponse { #[derive(Debug, PartialEq)] pub enum GetEditorChangelogResponse { - /// Found Merged Changes - FoundMergedChanges(Vec), + /// Found + Found(Vec), + /// Bad Request + BadRequest(models::ErrorResponse), /// Not Found NotFound(models::ErrorResponse), /// Generic Error @@ -384,8 +388,8 @@ pub enum GetReleaseResponse { #[derive(Debug, PartialEq)] pub enum GetReleaseFilesResponse { - /// Found Entity - FoundEntity(Vec), + /// Found + Found(Vec), /// Bad Request BadRequest(models::ErrorResponse), /// Not Found @@ -440,8 +444,8 @@ pub enum GetWorkHistoryResponse { #[derive(Debug, PartialEq)] pub enum GetWorkReleasesResponse { - /// Found Entity - FoundEntity(Vec), + /// Found + Found(Vec), /// Bad Request BadRequest(models::ErrorResponse), /// Not Found diff --git a/rust/fatcat-api/src/mimetypes.rs b/rust/fatcat-api/src/mimetypes.rs index 2c54a313..ff2c12ce 100644 --- a/rust/fatcat-api/src/mimetypes.rs +++ b/rust/fatcat-api/src/mimetypes.rs @@ -362,7 +362,7 @@ pub mod responses { } /// Create Mime objects for the response content types for GetCreatorReleases lazy_static! { - pub static ref GET_CREATOR_RELEASES_FOUND_ENTITY: Mime = mime!(Application / Json); + pub static ref GET_CREATOR_RELEASES_FOUND: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetCreatorReleases lazy_static! { @@ -378,7 +378,7 @@ pub mod responses { } /// Create Mime objects for the response content types for GetEditgroup lazy_static! { - pub static ref GET_EDITGROUP_FOUND_ENTITY: Mime = mime!(Application / Json); + pub static ref GET_EDITGROUP_FOUND: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetEditgroup lazy_static! { @@ -394,7 +394,11 @@ pub mod responses { } /// Create Mime objects for the response content types for GetEditor lazy_static! { - pub static ref GET_EDITOR_FOUND_EDITOR: Mime = mime!(Application / Json); + pub static ref GET_EDITOR_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for GetEditor + lazy_static! { + pub static ref GET_EDITOR_BAD_REQUEST: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetEditor lazy_static! { @@ -406,7 +410,11 @@ pub mod responses { } /// Create Mime objects for the response content types for GetEditorChangelog lazy_static! { - pub static ref GET_EDITOR_CHANGELOG_FOUND_MERGED_CHANGES: Mime = mime!(Application / Json); + pub static ref GET_EDITOR_CHANGELOG_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for GetEditorChangelog + lazy_static! { + pub static ref GET_EDITOR_CHANGELOG_BAD_REQUEST: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetEditorChangelog lazy_static! { @@ -466,7 +474,7 @@ pub mod responses { } /// Create Mime objects for the response content types for GetReleaseFiles lazy_static! { - pub static ref GET_RELEASE_FILES_FOUND_ENTITY: Mime = mime!(Application / Json); + pub static ref GET_RELEASE_FILES_FOUND: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetReleaseFiles lazy_static! { @@ -538,7 +546,7 @@ pub mod responses { } /// Create Mime objects for the response content types for GetWorkReleases lazy_static! { - pub static ref GET_WORK_RELEASES_FOUND_ENTITY: Mime = mime!(Application / Json); + pub static ref GET_WORK_RELEASES_FOUND: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetWorkReleases lazy_static! { diff --git a/rust/fatcat-api/src/server.rs b/rust/fatcat-api/src/server.rs index 04d10e14..dfc94a81 100644 --- a/rust/fatcat-api/src/server.rs +++ b/rust/fatcat-api/src/server.rs @@ -2361,11 +2361,11 @@ where match api.get_creator_releases(param_id, context).wait() { Ok(rsp) => match rsp { - GetCreatorReleasesResponse::FoundEntity(body) => { + GetCreatorReleasesResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_CREATOR_RELEASES_FOUND_ENTITY.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_CREATOR_RELEASES_FOUND.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -2449,11 +2449,11 @@ where match api.get_editgroup(param_id, context).wait() { Ok(rsp) => match rsp { - GetEditgroupResponse::FoundEntity(body) => { + GetEditgroupResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_EDITGROUP_FOUND_ENTITY.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_EDITGROUP_FOUND.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -2537,11 +2537,21 @@ where match api.get_editor(param_id, context).wait() { Ok(rsp) => match rsp { - GetEditorResponse::FoundEditor(body) => { + GetEditorResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_FOUND_EDITOR.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + GetEditorResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_BAD_REQUEST.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -2615,11 +2625,21 @@ where match api.get_editor_changelog(param_id, context).wait() { Ok(rsp) => match rsp { - GetEditorChangelogResponse::FoundMergedChanges(body) => { + GetEditorChangelogResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_CHANGELOG_FOUND_MERGED_CHANGES.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_CHANGELOG_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + GetEditorChangelogResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_CHANGELOG_BAD_REQUEST.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -2969,11 +2989,11 @@ where match api.get_release_files(param_id, context).wait() { Ok(rsp) => match rsp { - GetReleaseFilesResponse::FoundEntity(body) => { + GetReleaseFilesResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_RELEASE_FILES_FOUND_ENTITY.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_RELEASE_FILES_FOUND.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -3391,11 +3411,11 @@ where match api.get_work_releases(param_id, context).wait() { Ok(rsp) => match rsp { - GetWorkReleasesResponse::FoundEntity(body) => { + GetWorkReleasesResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_WORK_RELEASES_FOUND_ENTITY.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_WORK_RELEASES_FOUND.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); -- cgit v1.2.3 From 3df7c6fa601309b74c1b664f2f09f048dce2bf29 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 19:30:07 -0700 Subject: many small API cleanups - use FatCatId much more often (though not everywhere yet) - more consistent types - remove redundant error handling code in wrappers --- rust/src/api_entity_crud.rs | 82 +++++++++------------------ rust/src/api_helpers.rs | 35 ++++++------ rust/src/api_server.rs | 90 +++++++++++++++--------------- rust/src/api_wrappers.rs | 132 ++++++++++++++++++++------------------------ rust/src/lib.rs | 2 +- 5 files changed, 148 insertions(+), 193 deletions(-) (limited to 'rust') diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs index a1f4742b..dd0961d5 100644 --- a/rust/src/api_entity_crud.rs +++ b/rust/src/api_entity_crud.rs @@ -43,19 +43,9 @@ where fn parse_editgroup_id(&self) -> Result>; // Generic Methods - fn db_get( - conn: &DbConn, - ident: FatCatId - ) -> Result; - fn db_get_rev( - conn: &DbConn, - rev_id: Uuid - ) -> Result; - fn db_create( - &self, - conn: &DbConn, - edit_context: &EditContext - ) -> Result; + fn db_get(conn: &DbConn, ident: FatCatId) -> Result; + fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result; + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result; fn db_create_batch( conn: &DbConn, edit_context: &EditContext, @@ -77,10 +67,7 @@ where ident: FatCatId, limit: Option, ) -> Result>; - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId - ) -> Result; + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result; // Entity-specific Methods fn db_from_row( @@ -88,14 +75,8 @@ where rev_row: Self::RevRow, ident_row: Option, ) -> Result; - fn db_insert_rev( - &self, - conn: &DbConn - ) -> Result; - fn db_insert_revs( - conn: &DbConn, - models: &[&Self] - ) -> Result>; + fn db_insert_rev(&self, conn: &DbConn) -> Result; + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result>; } // TODO: this could be a separate trait on all entities @@ -328,11 +309,7 @@ macro_rules! generic_db_get_history { #[allow(unused_macros)] macro_rules! generic_db_accept_edits_batch { ($entity_name_str:expr) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result { - + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result { let count = diesel::sql_query(format!( " UPDATE {entity}_ident @@ -344,12 +321,12 @@ macro_rules! generic_db_accept_edits_batch { WHERE {entity}_ident.id = {entity}_edit.ident_id AND {entity}_edit.editgroup_id = $1", - entity = $entity_name_str + entity = $entity_name_str )).bind::(editgroup_id.to_uuid()) .execute(conn)?; Ok(count as u64) } - } + }; } // UPDATE ROW version: single query per row @@ -357,11 +334,7 @@ macro_rules! generic_db_accept_edits_batch { #[allow(unused_macros)] macro_rules! generic_db_accept_edits_each { ($ident_table:ident, $edit_table:ident) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result { - + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result { // 1. select edit rows (in sql) let edit_rows: Vec = $edit_table::table .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) @@ -369,31 +342,26 @@ macro_rules! generic_db_accept_edits_each { // 2. create ident rows (in rust) let ident_rows: Vec = edit_rows .iter() - .map(|edit| - Self::IdentRow { - id: edit.ident_id, - is_live: true, - rev_id: edit.rev_id, - redirect_id: edit.redirect_id, - - } - ) + .map(|edit| Self::IdentRow { + id: edit.ident_id, + is_live: true, + rev_id: edit.rev_id, + redirect_id: edit.redirect_id, + }) .collect(); /* - // 3. upsert ident rows (in sql) - let count: u64 = diesel::insert_into($ident_table::table) - .values(ident_rows) - .on_conflict() - .do_update() - .set(ident_rows) - .execute(conn)?; - */ + // 3. upsert ident rows (in sql) + let count: u64 = diesel::insert_into($ident_table::table) + .values(ident_rows) + .on_conflict() + .do_update() + .set(ident_rows) + .execute(conn)?; + */ // 3. update every row individually let count = ident_rows.len() as u64; for row in ident_rows { - diesel::update(&row) - .set(&row) - .execute(conn)?; + diesel::update(&row).set(&row).execute(conn)?; } Ok(count) } diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs index 2d203232..6c214223 100644 --- a/rust/src/api_helpers.rs +++ b/rust/src/api_helpers.rs @@ -1,15 +1,15 @@ +use api_entity_crud::EntityCrud; use data_encoding::BASE32_NOPAD; use database_models::*; use database_schema::*; -use fatcat_api::models::*; -use serde_json; use diesel; use diesel::prelude::*; use errors::*; +use fatcat_api::models::*; use regex::Regex; +use serde_json; use std::str::FromStr; use uuid::Uuid; -use api_entity_crud::EntityCrud; pub type DbConn = diesel::r2d2::PooledConnection>; @@ -21,7 +21,11 @@ pub struct EditContext { pub autoaccept: bool, } -pub fn make_edit_context(conn: &DbConn, editgroup_id: Option, autoaccept: bool) -> Result { +pub fn make_edit_context( + conn: &DbConn, + editgroup_id: Option, + autoaccept: bool, +) -> Result { let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { (Some(eg), _) => eg, @@ -31,7 +35,7 @@ pub fn make_edit_context(conn: &DbConn, editgroup_id: Option, autoacce .values((editgroup::editor_id.eq(editor_id),)) .get_result(conn)?; FatCatId::from_uuid(&eg_row.id) - }, + } (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), }; Ok(EditContext { @@ -61,34 +65,33 @@ pub fn get_or_create_editgroup(editor_id: Uuid, conn: &DbConn) -> Result { } /// This function should always be run within a transaction -pub fn accept_editgroup(editgroup_id: Uuid, conn: &DbConn) -> Result { +pub fn accept_editgroup(editgroup_id: FatCatId, conn: &DbConn) -> Result { // check that we haven't accepted already (in changelog) // NB: could leave this to a UNIQUE constraint let count: i64 = changelog::table - .filter(changelog::editgroup_id.eq(editgroup_id)) + .filter(changelog::editgroup_id.eq(editgroup_id.to_uuid())) .count() .get_result(conn)?; if count > 0 { - return Err(ErrorKind::EditgroupAlreadyAccepted(uuid2fcid(&editgroup_id)).into()); + return Err(ErrorKind::EditgroupAlreadyAccepted(editgroup_id.to_string()).into()); } // copy edit columns to ident table - let eg_id = FatCatId::from_uuid(&editgroup_id); - ContainerEntity::db_accept_edits(conn, eg_id)?; - CreatorEntity::db_accept_edits(conn, eg_id)?; - FileEntity::db_accept_edits(conn, eg_id)?; - ReleaseEntity::db_accept_edits(conn, eg_id)?; - WorkEntity::db_accept_edits(conn, eg_id)?; + ContainerEntity::db_accept_edits(conn, editgroup_id)?; + CreatorEntity::db_accept_edits(conn, editgroup_id)?; + FileEntity::db_accept_edits(conn, editgroup_id)?; + ReleaseEntity::db_accept_edits(conn, editgroup_id)?; + WorkEntity::db_accept_edits(conn, editgroup_id)?; // append log/changelog row let entry: ChangelogRow = diesel::insert_into(changelog::table) - .values((changelog::editgroup_id.eq(editgroup_id),)) + .values((changelog::editgroup_id.eq(editgroup_id.to_uuid()),)) .get_result(conn)?; // update any editor's active editgroup let no_active: Option = None; diesel::update(editor::table) - .filter(editor::active_editgroup_id.eq(editgroup_id)) + .filter(editor::active_editgroup_id.eq(editgroup_id.to_uuid())) .set(editor::active_editgroup_id.eq(no_active)) .execute(conn)?; Ok(entry) diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs index e832385a..076bc085 100644 --- a/rust/src/api_server.rs +++ b/rust/src/api_server.rs @@ -1,8 +1,8 @@ //! API endpoint handlers +use api_entity_crud::EntityCrud; use api_helpers::*; use chrono; -use api_entity_crud::EntityCrud; use database_models::*; use database_schema::*; use diesel::prelude::*; @@ -11,7 +11,6 @@ use errors::*; use fatcat_api::models; use fatcat_api::models::*; use std::str::FromStr; -use uuid::Uuid; use ConnectionPool; macro_rules! entity_batch_handler { @@ -61,11 +60,11 @@ pub struct Server { impl Server { pub fn get_container_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option, conn: &DbConn, ) -> Result { - ContainerEntity::db_get(conn, FatCatId::from_uuid(id)) + ContainerEntity::db_get(conn, id) } pub fn lookup_container_handler(&self, issnl: &str, conn: &DbConn) -> Result { @@ -85,11 +84,11 @@ impl Server { pub fn get_creator_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option, conn: &DbConn, ) -> Result { - CreatorEntity::db_get(conn, FatCatId::from_uuid(id)) + CreatorEntity::db_get(conn, id) } pub fn lookup_creator_handler(&self, orcid: &str, conn: &DbConn) -> Result { @@ -109,16 +108,14 @@ impl Server { pub fn get_creator_releases_handler( &self, - id: &str, + id: FatCatId, conn: &DbConn, ) -> Result> { - let id = fcid2uuid(&id)?; - // TODO: some kind of unique or group-by? let rows: Vec<(ReleaseRevRow, ReleaseIdentRow, ReleaseContribRow)> = release_rev::table .inner_join(release_ident::table) .inner_join(release_contrib::table) - .filter(release_contrib::creator_ident_id.eq(&id)) + .filter(release_contrib::creator_ident_id.eq(&id.to_uuid())) .filter(release_ident::is_live.eq(true)) .filter(release_ident::redirect_id.is_null()) .load(conn)?; @@ -131,11 +128,11 @@ impl Server { pub fn get_file_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option, conn: &DbConn, ) -> Result { - FileEntity::db_get(conn, FatCatId::from_uuid(id)) + FileEntity::db_get(conn, id) } pub fn lookup_file_handler(&self, sha1: &str, conn: &DbConn) -> Result { @@ -154,19 +151,18 @@ impl Server { pub fn get_release_handler( &self, - id: &Uuid, + id: FatCatId, expand: Option, conn: &DbConn, ) -> Result { - let mut release = ReleaseEntity::db_get(conn, FatCatId::from_uuid(id))?; + let mut release = ReleaseEntity::db_get(conn, id)?; // For now, if there is any expand param we do them all if expand.is_some() { - release.files = - Some(self.get_release_files_handler(&release.ident.clone().unwrap(), conn)?); + release.files = Some(self.get_release_files_handler(id, conn)?); if let Some(ref cid) = release.container_id { release.container = - Some(self.get_container_handler(&fcid2uuid(&cid)?, None, conn)?); + Some(self.get_container_handler(FatCatId::from_str(&cid)?, None, conn)?); } } Ok(release) @@ -187,13 +183,15 @@ impl Server { ReleaseEntity::db_from_row(conn, rev, Some(ident)) } - pub fn get_release_files_handler(&self, id: &str, conn: &DbConn) -> Result> { - let ident = FatCatId::from_str(id)?; - + pub fn get_release_files_handler( + &self, + id: FatCatId, + conn: &DbConn, + ) -> Result> { let rows: Vec<(FileRevRow, FileIdentRow, FileReleaseRow)> = file_rev::table .inner_join(file_ident::table) .inner_join(file_release::table) - .filter(file_release::target_release_ident_id.eq(&ident.to_uuid())) + .filter(file_release::target_release_ident_id.eq(&id.to_uuid())) .filter(file_ident::is_live.eq(true)) .filter(file_ident::redirect_id.is_null()) .load(conn)?; @@ -205,19 +203,21 @@ impl Server { pub fn get_work_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option, conn: &DbConn, ) -> Result { - WorkEntity::db_get(conn, FatCatId::from_uuid(id)) + WorkEntity::db_get(conn, id) } - pub fn get_work_releases_handler(&self, id: &str, conn: &DbConn) -> Result> { - let id = fcid2uuid(&id)?; - + pub fn get_work_releases_handler( + &self, + id: FatCatId, + conn: &DbConn, + ) -> Result> { let rows: Vec<(ReleaseRevRow, ReleaseIdentRow)> = release_rev::table .inner_join(release_ident::table) - .filter(release_rev::work_ident_id.eq(&id)) + .filter(release_rev::work_ident_id.eq(&id.to_uuid())) .filter(release_ident::is_live.eq(true)) .filter(release_ident::redirect_id.is_null()) .load(conn)?; @@ -227,8 +227,8 @@ impl Server { .collect() } - pub fn accept_editgroup_handler(&self, id: &str, conn: &DbConn) -> Result<()> { - accept_editgroup(fcid2uuid(id)?, conn)?; + pub fn accept_editgroup_handler(&self, id: FatCatId, conn: &DbConn) -> Result<()> { + accept_editgroup(id, conn)?; Ok(()) } @@ -239,7 +239,7 @@ impl Server { ) -> Result { let row: EditgroupRow = insert_into(editgroup::table) .values(( - editgroup::editor_id.eq(fcid2uuid(&entity.editor_id)?), + editgroup::editor_id.eq(FatCatId::from_str(&entity.editor_id)?.to_uuid()), editgroup::description.eq(entity.description), editgroup::extra_json.eq(entity.extra), )) @@ -254,14 +254,13 @@ impl Server { }) } - pub fn get_editgroup_handler(&self, id: &str, conn: &DbConn) -> Result { - let id = fcid2uuid(id)?; - let row: EditgroupRow = editgroup::table.find(id).first(conn)?; + pub fn get_editgroup_handler(&self, id: FatCatId, conn: &DbConn) -> Result { + let row: EditgroupRow = editgroup::table.find(id.to_uuid()).first(conn)?; let edits = EditgroupEdits { containers: Some( container_edit::table - .filter(container_edit::editgroup_id.eq(id)) + .filter(container_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: ContainerEditRow| e.into_model().unwrap()) @@ -269,7 +268,7 @@ impl Server { ), creators: Some( creator_edit::table - .filter(creator_edit::editgroup_id.eq(id)) + .filter(creator_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: CreatorEditRow| e.into_model().unwrap()) @@ -277,7 +276,7 @@ impl Server { ), files: Some( file_edit::table - .filter(file_edit::editgroup_id.eq(id)) + .filter(file_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: FileEditRow| e.into_model().unwrap()) @@ -285,7 +284,7 @@ impl Server { ), releases: Some( release_edit::table - .filter(release_edit::editgroup_id.eq(id)) + .filter(release_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: ReleaseEditRow| e.into_model().unwrap()) @@ -293,7 +292,7 @@ impl Server { ), works: Some( work_edit::table - .filter(work_edit::editgroup_id.eq(id)) + .filter(work_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: WorkEditRow| e.into_model().unwrap()) @@ -311,9 +310,8 @@ impl Server { Ok(eg) } - pub fn get_editor_handler(&self, id: &str, conn: &DbConn) -> Result { - let id = fcid2uuid(id)?; - let row: EditorRow = editor::table.find(id).first(conn)?; + pub fn get_editor_handler(&self, id: FatCatId, conn: &DbConn) -> Result { + let row: EditorRow = editor::table.find(id.to_uuid()).first(conn)?; let ed = Editor { id: Some(uuid2fcid(&row.id)), @@ -322,14 +320,13 @@ impl Server { Ok(ed) } - pub fn editor_changelog_get_handler( + pub fn get_editor_changelog_handler( &self, - id: &str, + id: FatCatId, conn: &DbConn, ) -> Result> { - let id = fcid2uuid(id)?; // TODO: single query - let editor: EditorRow = editor::table.find(id).first(conn)?; + let editor: EditorRow = editor::table.find(id.to_uuid()).first(conn)?; let changes: Vec<(ChangelogRow, EditgroupRow)> = changelog::table .inner_join(editgroup::table) .filter(editgroup::editor_id.eq(editor.id)) @@ -374,7 +371,8 @@ impl Server { pub fn get_changelog_entry_handler(&self, id: i64, conn: &DbConn) -> Result { let cl_row: ChangelogRow = changelog::table.find(id).first(conn)?; - let editgroup = self.get_editgroup_handler(&uuid2fcid(&cl_row.editgroup_id), conn)?; + let editgroup = + self.get_editgroup_handler(FatCatId::from_uuid(&cl_row.editgroup_id), conn)?; let mut entry = cl_row.into_model(); entry.editgroup = Some(editgroup); diff --git a/rust/src/api_wrappers.rs b/rust/src/api_wrappers.rs index 0600f3de..f6425c8c 100644 --- a/rust/src/api_wrappers.rs +++ b/rust/src/api_wrappers.rs @@ -1,15 +1,15 @@ //! API endpoint handlers +use api_entity_crud::EntityCrud; use api_helpers::*; use api_server::Server; +use database_models::EntityEditRow; use diesel::Connection; use errors::*; use fatcat_api::models; use fatcat_api::models::*; use fatcat_api::*; use futures::{self, Future}; -use api_entity_crud::EntityCrud; -use database_models::EntityEditRow; use std::str::FromStr; /// Helper for generating wrappers (which return "Box::new(futures::done(Ok(BLAH)))" like the @@ -32,13 +32,12 @@ macro_rules! wrap_entity_handlers { expand: Option, _context: &Context, ) -> Box + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($get_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); // No transaction for GET - let ret = match self.$get_handler(&id, expand, &conn) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + self.$get_handler(entity_id, expand, &conn) + }) { Ok(entity) => $get_resp::FoundEntity(entity), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -242,6 +241,35 @@ macro_rules! wrap_lookup_handler { } } +macro_rules! wrap_fcid_handler { + ($get_fn:ident, $get_handler:ident, $get_resp:ident) => { + fn $get_fn( + &self, + id: String, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET + let ret = match (|| { + let fcid = FatCatId::from_str(&id)?; + self.$get_handler(fcid, &conn) + })() { + Ok(entity) => + $get_resp::Found(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_resp::NotFound(ErrorResponse { message: format!("Not found: {}", id) }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + } +} + impl Api for Server { wrap_entity_handlers!( get_container, @@ -359,26 +387,26 @@ impl Api for Server { String ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_release_files, get_release_files_handler, - GetReleaseFilesResponse, - id, - String + GetReleaseFilesResponse ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_work_releases, get_work_releases_handler, - GetWorkReleasesResponse, - id, - String + GetWorkReleasesResponse ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_creator_releases, get_creator_releases_handler, - GetCreatorReleasesResponse, - id, - String + GetCreatorReleasesResponse + ); + wrap_fcid_handler!(get_editor, get_editor_handler, GetEditorResponse); + wrap_fcid_handler!( + get_editor_changelog, + get_editor_changelog_handler, + GetEditorChangelogResponse ); fn accept_editgroup( @@ -387,7 +415,10 @@ impl Api for Server { _context: &Context, ) -> Box + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.accept_editgroup_handler(&id, &conn)) { + let ret = match conn.transaction(|| { + let id = FatCatId::from_str(&id)?; + self.accept_editgroup_handler(id, &conn) + }) { Ok(()) => AcceptEditgroupResponse::MergedSuccessfully(Success { message: "horray!".to_string(), }), @@ -414,9 +445,12 @@ impl Api for Server { _context: &Context, ) -> Box + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.get_editgroup_handler(&id, &conn)) { + let ret = match conn.transaction(|| { + let id = FatCatId::from_str(&id)?; + self.get_editgroup_handler(id, &conn) + }) { Ok(entity) => - GetEditgroupResponse::FoundEntity(entity), + GetEditgroupResponse::Found(entity), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => GetEditgroupResponse::NotFound( ErrorResponse { message: format!("No such editgroup: {}", id) }), @@ -434,7 +468,9 @@ impl Api for Server { _context: &Context, ) -> Box + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.create_editgroup_handler(entity, &conn)) { + let ret = match conn.transaction(|| + self.create_editgroup_handler(entity, &conn) + ) { Ok(eg) => CreateEditgroupResponse::SuccessfullyCreated(eg), Err(e) => @@ -445,56 +481,6 @@ impl Api for Server { Box::new(futures::done(Ok(ret))) } - fn get_editor_changelog( - &self, - username: String, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match self.editor_changelog_get_handler(&username, &conn) { - Ok(entries) => GetEditorChangelogResponse::FoundMergedChanges(entries), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - GetEditorChangelogResponse::NotFound(ErrorResponse { - message: format!("No such editor: {}", username.clone()), - }) - } - Err(e) => { - // TODO: dig in to error type here - error!("{}", e); - GetEditorChangelogResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn get_editor( - &self, - username: String, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match self.get_editor_handler(&username, &conn) { - Ok(entity) => GetEditorResponse::FoundEditor(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - GetEditorResponse::NotFound(ErrorResponse { - message: format!("No such editor: {}", username.clone()), - }) - } - Err(e) => { - // TODO: dig in to error type here - error!("{}", e); - GetEditorResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - fn get_changelog( &self, limit: Option, diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 58147139..57cc535c 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -22,10 +22,10 @@ extern crate regex; extern crate lazy_static; extern crate sha1; +pub mod api_entity_crud; pub mod api_helpers; pub mod api_server; pub mod api_wrappers; -pub mod api_entity_crud; pub mod database_models; pub mod database_schema; -- cgit v1.2.3 From 5647fd1c8c043a1e36af39a03b6495bc75b576dc Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Mon, 10 Sep 2018 19:32:46 -0700 Subject: update TODO --- rust/TODO | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) (limited to 'rust') diff --git a/rust/TODO b/rust/TODO index ad4b1241..ac378961 100644 --- a/rust/TODO +++ b/rust/TODO @@ -1,16 +1,10 @@ -finish refactor: -- database_entity_crud -> api_entity_crud -x merge autoaccept branch in with http-verbs branch -- direct CRUD calls from api_wrappers (except, maybe, batch?) - => generally, standardize "edit" actions -- FatCatId and edit context between wrappers and handlers -- review editgroup accept code - verbs: - enforce "previous_rev" required in updates +- review editgroup accept code (?) - fatcat_api -> fatcat_api_schema (or spec? models? types?) +- generally, standardize "edit" actions - fatcat -> fatcat-api-server - editgroup param to update => also for creation? for consistency -- cgit v1.2.3