From ba7d6a842cb4d61357b588fb2d3ec552c654ae64 Mon Sep 17 00:00:00 2001 From: Bryan Newbold Date: Tue, 8 Jan 2019 23:18:32 -0800 Subject: huge refactor of rust modules/files Taking advantage of new Rust 2018 crate/module path changes, and re-organizing things. Somewhat optimistic this could help with partial rebuild speed also. --- rust/src/api_entity_crud.rs | 1992 ------------------------------- rust/src/api_helpers.rs | 683 ----------- rust/src/api_server.rs | 586 --------- rust/src/api_wrappers.rs | 1293 -------------------- rust/src/auth.rs | 31 +- rust/src/bin/fatcat-auth.rs | 44 +- rust/src/bin/fatcat-export.rs | 20 +- rust/src/bin/fatcatd.rs | 40 +- rust/src/database_models.rs | 4 +- rust/src/editing.rs | 140 +++ rust/src/endpoint_handlers.rs | 582 +++++++++ rust/src/endpoints.rs | 1298 ++++++++++++++++++++ rust/src/entity_crud.rs | 2162 ++++++++++++++++++++++++++++++++++ rust/src/errors.rs | 55 + rust/src/identifiers.rs | 376 ++++++ rust/src/lib.rs | 203 +--- rust/src/server.rs | 81 ++ rust/tests/helpers.rs | 21 +- rust/tests/test_api_server_client.rs | 5 - rust/tests/test_api_server_http.rs | 10 +- rust/tests/test_auth.rs | 17 +- rust/tests/test_fcid.rs | 2 +- rust/tests/test_old_python_tests.rs | 5 - 23 files changed, 4793 insertions(+), 4857 deletions(-) delete mode 100644 rust/src/api_entity_crud.rs delete mode 100644 rust/src/api_helpers.rs delete mode 100644 rust/src/api_server.rs delete mode 100644 rust/src/api_wrappers.rs create mode 100644 rust/src/editing.rs create mode 100644 rust/src/endpoint_handlers.rs create mode 100644 rust/src/endpoints.rs create mode 100644 rust/src/entity_crud.rs create mode 100644 rust/src/errors.rs create mode 100644 rust/src/identifiers.rs create mode 100644 rust/src/server.rs diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs deleted file mode 100644 index 44b421f9..00000000 --- a/rust/src/api_entity_crud.rs +++ /dev/null @@ -1,1992 +0,0 @@ -use crate::api_helpers::*; -use crate::api_server::get_release_files; -use chrono; -use crate::database_models::*; -use crate::database_schema::*; -use diesel::prelude::*; -use diesel::{self, insert_into}; -use crate::errors::*; -use fatcat_api_spec::models::*; -use sha1::Sha1; -use std::marker::Sized; -use std::str::FromStr; -use uuid::Uuid; - -/* One goal here is to abstract the non-entity-specific bits into generic traits or functions, - * instead of macros. - * - * Notably: - * - * db_get - * db_get_rev - * db_create - * db_create_batch - * db_update - * db_delete - * db_get_history - * db_get_edit - * db_delete_edit - * db_get_redirects - * db_accept_edits - * - * For now, these will probably be macros, until we can level up our trait/generics foo. - */ - -// Associated Type, not parametric -pub trait EntityCrud -where - Self: Sized, -{ - // TODO: could EditRow and IdentRow be generic structs? Or do they need to be bound to a - // specific table? - type EditRow; // EntityEditRow - type EditNewRow; - type IdentRow; // EntityIdentRow - type IdentNewRow; - type RevRow; - - // Generic Methods - fn from_deleted_row(ident_row: Self::IdentRow) -> Result; - fn db_get(conn: &DbConn, ident: FatCatId, hide: HideFlags) -> Result; - fn db_get_rev(conn: &DbConn, rev_id: Uuid, hide: HideFlags) -> Result; - fn db_expand(&mut self, conn: &DbConn, expand: ExpandFlags) -> Result<()>; - fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result; - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result>; - fn db_update( - &self, - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result; - fn db_delete( - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result; - fn db_get_history( - conn: &DbConn, - ident: FatCatId, - limit: Option, - ) -> Result>; - fn db_get_edit(conn: &DbConn, edit_id: Uuid) -> Result; - fn db_delete_edit(conn: &DbConn, edit_id: Uuid) -> Result<()>; - fn db_get_redirects(conn: &DbConn, ident: FatCatId) -> Result>; - fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result; - - // Entity-specific Methods - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - hide: HideFlags, - ) -> Result; - fn db_insert_rev(&self, conn: &DbConn) -> Result; - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result>; -} - -macro_rules! generic_db_get { - ($ident_table:ident, $rev_table:ident) => { - fn db_get(conn: &DbConn, ident: FatCatId, hide: HideFlags) -> Result { - let res: Option<(Self::IdentRow, Self::RevRow)> = $ident_table::table - .find(ident.to_uuid()) - .inner_join($rev_table::table) - .first(conn) - .optional()?; - - match res { - Some((ident, rev)) => { - Self::db_from_row(conn, rev, Some(ident), hide) - }, - None => { - // return a stub (deleted) entity if it's just deleted state - let ident_row: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; - if ident_row.rev_id.is_none() { - Self::from_deleted_row(ident_row) - } else { - bail!("unexpected condition: entity ident/rev join failed, yet row isn't in deleted state") - } - }, - } - } - }; -} - -macro_rules! generic_db_get_rev { - ($rev_table:ident) => { - fn db_get_rev(conn: &DbConn, rev_id: Uuid, hide: HideFlags) -> Result { - let rev = $rev_table::table.find(rev_id).first(conn)?; - - Self::db_from_row(conn, rev, None, hide) - } - }; -} - -macro_rules! generic_db_expand { - () => { - fn db_expand(&mut self, _conn: &DbConn, _expand: ExpandFlags) -> Result<()> { - Ok(()) - } - }; -} - -macro_rules! generic_db_create { - // TODO: this path should call generic_db_create_batch - ($ident_table: ident, $edit_table: ident) => { - fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { - if self.redirect.is_some() { - return Err(ErrorKind::OtherBadRequest( - "can't create an entity that redirects from the start".to_string()).into()); - } - let rev_id = self.db_insert_rev(conn)?; - let ident: Uuid = insert_into($ident_table::table) - .values($ident_table::rev_id.eq(&rev_id)) - .returning($ident_table::id) - .get_result(conn)?; - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::rev_id.eq(&rev_id), - $edit_table::ident_id.eq(&ident), - )) - .get_result(conn)?; - Ok(edit) - } - } -} - -macro_rules! generic_db_create_batch { - ($ident_table:ident, $edit_table:ident) => { - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result> { - if models.iter().any(|m| m.redirect.is_some()) { - return Err(ErrorKind::OtherBadRequest( - "can't create an entity that redirects from the start".to_string(), - ) - .into()); - } - let rev_ids: Vec = Self::db_insert_revs(conn, models)?; - let ident_ids: Vec = insert_into($ident_table::table) - .values( - rev_ids - .iter() - .map(|rev_id| Self::IdentNewRow { - rev_id: Some(rev_id.clone()), - is_live: edit_context.autoaccept, - redirect_id: None, - }) - .collect::>(), - ) - .returning($ident_table::id) - .get_results(conn)?; - let edits: Vec = insert_into($edit_table::table) - .values( - rev_ids - .into_iter() - .zip(ident_ids.into_iter()) - .map(|(rev_id, ident_id)| Self::EditNewRow { - editgroup_id: edit_context.editgroup_id.to_uuid(), - rev_id: Some(rev_id), - ident_id: ident_id, - redirect_id: None, - prev_rev: None, - extra_json: edit_context.extra_json.clone(), - }) - .collect::>(), - ) - .get_results(conn)?; - Ok(edits) - } - }; -} - -macro_rules! generic_db_update { - ($ident_table: ident, $edit_table: ident) => { - fn db_update(&self, conn: &DbConn, edit_context: &EditContext, ident: FatCatId) -> Result { - let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; - let no_redirect: Option = None; - // TODO: is this actually true? or should we allow updates in the same editgroup? - if current.is_live != true { - return Err(ErrorKind::InvalidEntityStateTransform( - "can't update an entity that doesn't exist yet".to_string()).into()); - } - // Don't set prev_rev if current status is redirect - let prev_rev = match current.redirect_id { - Some(_) => None, - None => current.rev_id, - }; - - if self.state.is_none() { - - if Some(ident.to_string()) == self.redirect { - return Err(ErrorKind::OtherBadRequest( - "tried to redirect entity to itself".to_string()).into()); - } - // special case: redirect to another entity - if let Some(ref redirect_ident) = self.redirect { - let redirect_ident = FatCatId::from_str(&redirect_ident)?.to_uuid(); - if Some(redirect_ident) == current.redirect_id { - return Err(ErrorKind::OtherBadRequest( - "redundantly redirecting entity to it's current target currently isn't supported".to_string()).into()); - } - // TODO: if we get a diesel not-found here, should be a special error response? - let target: Self::IdentRow = $ident_table::table.find(redirect_ident).first(conn)?; - if target.is_live != true { - // there is no race condition on this check because WIP -> is_live=true is - // a one-way operation - // XXX: - return Err(ErrorKind::OtherBadRequest( - "attempted to redirect to a WIP entity".to_string()).into()); - } - // Note: there is a condition where the target is already a redirect, but we - // don't handle that here because the state of the redirect could change before - // we accept this editgroup - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::ident_id.eq(&ident.to_uuid()), - $edit_table::rev_id.eq(target.rev_id), - $edit_table::redirect_id.eq(redirect_ident), - $edit_table::prev_rev.eq(prev_rev), - $edit_table::extra_json.eq(&self.edit_extra), - )) - .get_result(conn)?; - return Ok(edit) - } - // special case: revert to point to an existing revision - if let Some(ref rev_id) = self.revision { - let rev_id = Uuid::from_str(&rev_id)?; - if Some(rev_id) == current.rev_id { - return Err(ErrorKind::OtherBadRequest( - "reverted entity to it's current state; this isn't currently supported".to_string()).into()); - } - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::ident_id.eq(&ident.to_uuid()), - $edit_table::rev_id.eq(&rev_id), - $edit_table::redirect_id.eq(no_redirect), - $edit_table::prev_rev.eq(prev_rev), - $edit_table::extra_json.eq(&self.edit_extra), - )) - .get_result(conn)?; - return Ok(edit) - } - } - - // regular insert/update - let rev_id = self.db_insert_rev(conn)?; - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::ident_id.eq(&ident.to_uuid()), - $edit_table::rev_id.eq(&rev_id), - $edit_table::redirect_id.eq(no_redirect), - $edit_table::prev_rev.eq(prev_rev), - $edit_table::extra_json.eq(&self.edit_extra), - )) - .get_result(conn)?; - Ok(edit) - } - } -} - -macro_rules! generic_db_delete { - ($ident_table:ident, $edit_table:ident) => { - fn db_delete( - conn: &DbConn, - edit_context: &EditContext, - ident: FatCatId, - ) -> Result { - let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; - if current.is_live != true { - return Err(ErrorKind::InvalidEntityStateTransform( - "can't update an entity that doesn't exist yet; delete edit object instead" - .to_string(), - ) - .into()); - } - if current.state()? == EntityState::Deleted { - return Err(ErrorKind::InvalidEntityStateTransform( - "entity was already deleted".to_string(), - ) - .into()); - } - let edit: Self::EditRow = insert_into($edit_table::table) - .values(( - $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), - $edit_table::ident_id.eq(ident.to_uuid()), - $edit_table::rev_id.eq(None::), - $edit_table::redirect_id.eq(None::), - $edit_table::prev_rev.eq(current.rev_id), - $edit_table::extra_json.eq(&edit_context.extra_json), - )) - .get_result(conn)?; - - Ok(edit) - } - }; -} - -macro_rules! generic_db_get_history { - ($edit_table:ident) => { - fn db_get_history( - conn: &DbConn, - ident: FatCatId, - limit: Option, - ) -> Result> { - let limit = limit.unwrap_or(50); // TODO: make a static - - let rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table - .inner_join(changelog::table) - .inner_join($edit_table::table) - .filter($edit_table::ident_id.eq(ident.to_uuid())) - .order(changelog::id.desc()) - .limit(limit) - .get_results(conn)?; - - let history: Result> = rows - .into_iter() - .map(|(eg_row, cl_row, e_row)| { - Ok(EntityHistoryEntry { - edit: e_row.into_model()?, - editgroup: eg_row.into_model_partial(), - changelog_entry: cl_row.into_model(), - }) - }) - .collect(); - history - } - }; -} - -macro_rules! generic_db_get_edit { - ($edit_table:ident) => { - fn db_get_edit(conn: &DbConn, edit_id: Uuid) -> Result { - Ok($edit_table::table.find(edit_id).first(conn)?) - } - }; -} - -macro_rules! generic_db_delete_edit { - ($edit_table:ident) => { - /// This method assumes the connection is already in a transaction - fn db_delete_edit(conn: &DbConn, edit_id: Uuid) -> Result<()> { - // ensure that edit hasn't been accepted - let accepted_rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table - .inner_join(changelog::table) - .inner_join($edit_table::table) - .filter($edit_table::id.eq(edit_id)) - .limit(1) - .get_results(conn)?; - if accepted_rows.len() != 0 { - return Err(ErrorKind::EditgroupAlreadyAccepted( - "attempted to delete an already accepted edit".to_string(), - ) - .into()); - } - diesel::delete($edit_table::table.filter($edit_table::id.eq(edit_id))).execute(conn)?; - Ok(()) - } - }; -} - -macro_rules! generic_db_get_redirects { - ($ident_table:ident) => { - fn db_get_redirects(conn: &DbConn, ident: FatCatId) -> Result> { - let res: Vec = $ident_table::table - .select($ident_table::id) - .filter($ident_table::redirect_id.eq(ident.to_uuid())) - .get_results(conn)?; - Ok(res.iter().map(|u| FatCatId::from_uuid(u)).collect()) - } - }; -} - -/* -// This would be the clean and efficient way, but see: -// https://github.com/diesel-rs/diesel/issues/1478 -// - diesel::update(container_ident::table) - .inner_join(container_edit::table.on( - container_ident::id.eq(container_edit::ident_id) - )) - .filter(container_edit::editgroup_id.eq(editgroup_id)) - .values(( - container_ident::is_live.eq(true), - container_ident::rev_id.eq(container_edit::rev_id), - container_ident::redirect_id.eq(container_edit::redirect_id), - )) - .execute()?; - -// Was previously: - - for entity in &["container", "creator", "file", "work", "release"] { - diesel::sql_query(format!( - " - UPDATE {entity}_ident - SET - is_live = true, - rev_id = {entity}_edit.rev_id, - redirect_id = {entity}_edit.redirect_id - FROM {entity}_edit - WHERE - {entity}_ident.id = {entity}_edit.ident_id - AND {entity}_edit.editgroup_id = $1", - entity = entity - )).bind::(editgroup_id) - .execute(conn)?; -*/ - -// UPDATE FROM version: single query for many rows -// Works with Postgres, not Cockroach -#[allow(unused_macros)] -macro_rules! generic_db_accept_edits_batch { - ($entity_name_str:expr, $ident_table:ident, $edit_table:ident) => { - fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result { - // NOTE: the checks and redirects can be skipped for accepts that are all inserts - // (which I guess we only know for batch inserts with auto-accept?) - - // assert that we aren't redirecting to anything which is a redirect already - let forward_recursive_redirects: i64 = $edit_table::table - .inner_join( - $ident_table::table - .on($edit_table::redirect_id.eq($ident_table::id.nullable())), - ) - .filter($edit_table::redirect_id.is_not_null()) - .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) - .filter($ident_table::redirect_id.is_not_null()) - .count() - .get_result(conn)?; - if forward_recursive_redirects != 0 { - // TODO: revert transaction? - return Err(ErrorKind::OtherBadRequest( - "one or more (forward) recurisve redirects".to_string(), - ) - .into()); - } - - // assert that we aren't redirecting while something already redirects to us - let backward_recursive_redirects: i64 = $ident_table::table - .inner_join( - $edit_table::table - .on($ident_table::redirect_id.eq($edit_table::ident_id.nullable())), - ) - .filter($ident_table::redirect_id.is_not_null()) - .filter($edit_table::editgroup_id.eq(editgroup_id.to_uuid())) - .filter($edit_table::redirect_id.is_not_null()) - .count() - .get_result(conn)?; - if backward_recursive_redirects != 0 { - // TODO: revert transaction? - return Err(ErrorKind::OtherBadRequest( - "one or more (backward) recurisve redirects".to_string(), - ) - .into()); - } - - let count = diesel::sql_query(format!( - " - UPDATE {entity}_ident - SET - is_live = true, - rev_id = {entity}_edit.rev_id, - redirect_id = {entity}_edit.redirect_id - FROM {entity}_edit - WHERE - {entity}_ident.id = {entity}_edit.ident_id - AND {entity}_edit.editgroup_id = $1", - entity = $entity_name_str - )) - .bind::(editgroup_id.to_uuid()) - .execute(conn)?; - - // update any/all redirects for updated entities - let _redir_count = diesel::sql_query(format!( - " - UPDATE {entity}_ident - SET - rev_id = {entity}_edit.rev_id - FROM {entity}_edit - WHERE - {entity}_ident.redirect_id = {entity}_edit.ident_id - AND {entity}_edit.editgroup_id = $1", - entity = $entity_name_str - )) - .bind::(editgroup_id.to_uuid()) - .execute(conn)?; - Ok(count as u64) - } - }; -} - -// UPDATE ROW version: single query per row -// CockroachDB version (slow, single query per row) -#[allow(unused_macros)] -macro_rules! generic_db_accept_edits_each { - ($ident_table:ident, $edit_table:ident) => { - fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result { - // 1. select edit rows (in sql) - let edit_rows: Vec = $edit_table::table - .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) - .get_results(conn)?; - // 2. create ident rows (in rust) - let ident_rows: Vec = edit_rows - .iter() - .map(|edit| Self::IdentRow { - id: edit.ident_id, - is_live: true, - rev_id: edit.rev_id, - redirect_id: edit.redirect_id, - }) - .collect(); - /* - // 3. upsert ident rows (in sql) - let count: u64 = diesel::insert_into($ident_table::table) - .values(ident_rows) - .on_conflict() - .do_update() - .set(ident_rows) - .execute(conn)?; - */ - // 3. update every row individually - let count = ident_rows.len() as u64; - for row in ident_rows { - diesel::update(&row).set(&row).execute(conn)?; - } - Ok(count) - } - }; -} - -macro_rules! generic_db_insert_rev { - () => { - fn db_insert_rev(&self, conn: &DbConn) -> Result { - Self::db_insert_revs(conn, &[self]).map(|id_list| id_list[0]) - } - } -} - -impl EntityCrud for ContainerEntity { - type EditRow = ContainerEditRow; - type EditNewRow = ContainerEditNewRow; - type IdentRow = ContainerIdentRow; - type IdentNewRow = ContainerIdentNewRow; - type RevRow = ContainerRevRow; - - generic_db_get!(container_ident, container_rev); - generic_db_get_rev!(container_rev); - generic_db_expand!(); - generic_db_create!(container_ident, container_edit); - generic_db_create_batch!(container_ident, container_edit); - generic_db_update!(container_ident, container_edit); - generic_db_delete!(container_ident, container_edit); - generic_db_get_history!(container_edit); - generic_db_get_edit!(container_edit); - generic_db_delete_edit!(container_edit); - generic_db_get_redirects!(container_ident); - generic_db_accept_edits_batch!("container", container_ident, container_edit); - generic_db_insert_rev!(); - - fn from_deleted_row(ident_row: Self::IdentRow) -> Result { - if ident_row.rev_id.is_some() { - bail!("called from_deleted_row with a non-deleted-state row") - } - - Ok(ContainerEntity { - issnl: None, - wikidata_qid: None, - publisher: None, - name: None, - abbrev: None, - coden: None, - state: Some(ident_row.state().unwrap().shortname()), - ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), - revision: ident_row.rev_id.map(|u| u.to_string()), - redirect: ident_row - .redirect_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - extra: None, - edit_extra: None, - }) - } - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - _hide: HideFlags, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - Ok(ContainerEntity { - issnl: rev_row.issnl, - wikidata_qid: rev_row.wikidata_qid, - publisher: rev_row.publisher, - name: Some(rev_row.name), - abbrev: rev_row.abbrev, - coden: rev_row.coden, - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - edit_extra: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - if let Some(ref extid) = entity.issnl { - check_issn(extid)?; - } - } - - if models.iter().any(|m| m.name.is_none()) { - return Err(ErrorKind::OtherBadRequest( - "name is required for all Container entities".to_string(), - ) - .into()); - } - - let rev_ids: Vec = insert_into(container_rev::table) - .values( - models - .iter() - .map(|model| ContainerRevNewRow { - name: model.name.clone().unwrap(), // unwrap checked above - publisher: model.publisher.clone(), - issnl: model.issnl.clone(), - wikidata_qid: model.wikidata_qid.clone(), - abbrev: model.abbrev.clone(), - coden: model.coden.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(container_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} - -impl EntityCrud for CreatorEntity { - type EditRow = CreatorEditRow; - type EditNewRow = CreatorEditNewRow; - type IdentRow = CreatorIdentRow; - type IdentNewRow = CreatorIdentNewRow; - type RevRow = CreatorRevRow; - - generic_db_get!(creator_ident, creator_rev); - generic_db_get_rev!(creator_rev); - generic_db_expand!(); - generic_db_create!(creator_ident, creator_edit); - generic_db_create_batch!(creator_ident, creator_edit); - generic_db_update!(creator_ident, creator_edit); - generic_db_delete!(creator_ident, creator_edit); - generic_db_get_history!(creator_edit); - generic_db_get_edit!(creator_edit); - generic_db_delete_edit!(creator_edit); - generic_db_get_redirects!(creator_ident); - generic_db_accept_edits_batch!("creator", creator_ident, creator_edit); - generic_db_insert_rev!(); - - fn from_deleted_row(ident_row: Self::IdentRow) -> Result { - if ident_row.rev_id.is_some() { - bail!("called from_deleted_row with a non-deleted-state row") - } - - Ok(CreatorEntity { - extra: None, - edit_extra: None, - display_name: None, - given_name: None, - surname: None, - orcid: None, - wikidata_qid: None, - state: Some(ident_row.state().unwrap().shortname()), - ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), - revision: ident_row.rev_id.map(|u| u.to_string()), - redirect: ident_row - .redirect_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - }) - } - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - _hide: HideFlags, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - Ok(CreatorEntity { - display_name: Some(rev_row.display_name), - given_name: rev_row.given_name, - surname: rev_row.surname, - orcid: rev_row.orcid, - wikidata_qid: rev_row.wikidata_qid, - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - edit_extra: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.orcid { - check_orcid(extid)?; - } - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - } - - if models.iter().any(|m| m.display_name.is_none()) { - return Err(ErrorKind::OtherBadRequest( - "display_name is required for all Creator entities".to_string(), - ) - .into()); - } - - let rev_ids: Vec = insert_into(creator_rev::table) - .values( - models - .iter() - .map(|model| CreatorRevNewRow { - display_name: model.display_name.clone().unwrap(), // unwrapped checked above - given_name: model.given_name.clone(), - surname: model.surname.clone(), - orcid: model.orcid.clone(), - wikidata_qid: model.wikidata_qid.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(creator_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} - -impl EntityCrud for FileEntity { - type EditRow = FileEditRow; - type EditNewRow = FileEditNewRow; - type IdentRow = FileIdentRow; - type IdentNewRow = FileIdentNewRow; - type RevRow = FileRevRow; - - generic_db_get!(file_ident, file_rev); - generic_db_get_rev!(file_rev); - generic_db_expand!(); - generic_db_create!(file_ident, file_edit); - generic_db_create_batch!(file_ident, file_edit); - generic_db_update!(file_ident, file_edit); - generic_db_delete!(file_ident, file_edit); - generic_db_get_history!(file_edit); - generic_db_get_edit!(file_edit); - generic_db_delete_edit!(file_edit); - generic_db_get_redirects!(file_ident); - generic_db_accept_edits_batch!("file", file_ident, file_edit); - generic_db_insert_rev!(); - - fn from_deleted_row(ident_row: Self::IdentRow) -> Result { - if ident_row.rev_id.is_some() { - bail!("called from_deleted_row with a non-deleted-state row") - } - - Ok(FileEntity { - sha1: None, - sha256: None, - md5: None, - size: None, - urls: None, - mimetype: None, - release_ids: None, - state: Some(ident_row.state().unwrap().shortname()), - ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), - revision: ident_row.rev_id.map(|u| u.to_string()), - redirect: ident_row - .redirect_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - extra: None, - edit_extra: None, - }) - } - - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - _hide: HideFlags, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - let urls: Vec = file_rev_url::table - .filter(file_rev_url::file_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FileRevUrlRow| FileEntityUrls { - rel: r.rel, - url: r.url, - }) - .collect(); - - let release_ids: Vec = file_rev_release::table - .filter(file_rev_release::file_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FileRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) - .collect(); - - Ok(FileEntity { - sha1: rev_row.sha1, - sha256: rev_row.sha256, - md5: rev_row.md5, - size: rev_row.size_bytes.map(|v| v as i64), - urls: Some(urls), - mimetype: rev_row.mimetype, - release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()), - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - edit_extra: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify hash syntax - for entity in models { - if let Some(ref hash) = entity.md5 { - check_md5(hash)?; - } - if let Some(ref hash) = entity.sha1 { - check_sha1(hash)?; - } - if let Some(ref hash) = entity.sha256 { - check_sha256(hash)?; - } - } - - let rev_ids: Vec = insert_into(file_rev::table) - .values( - models - .iter() - .map(|model| FileRevNewRow { - size_bytes: model.size, - sha1: model.sha1.clone(), - sha256: model.sha256.clone(), - md5: model.md5.clone(), - mimetype: model.mimetype.clone(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(file_rev::id) - .get_results(conn)?; - - let mut file_rev_release_rows: Vec = vec![]; - let mut file_url_rows: Vec = vec![]; - - for (model, rev_id) in models.iter().zip(rev_ids.iter()) { - match &model.release_ids { - None => (), - Some(release_list) => { - let these_release_rows: Result> = release_list - .iter() - .map(|r| { - Ok(FileRevReleaseRow { - file_rev: *rev_id, - target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), - }) - }) - .collect(); - file_rev_release_rows.extend(these_release_rows?); - } - }; - - match &model.urls { - None => (), - Some(url_list) => { - let these_url_rows: Vec = url_list - .into_iter() - .map(|u| FileRevUrlNewRow { - file_rev: *rev_id, - rel: u.rel.clone(), - url: u.url.clone(), - }) - .collect(); - file_url_rows.extend(these_url_rows); - } - }; - } - - if !file_rev_release_rows.is_empty() { - insert_into(file_rev_release::table) - .values(file_rev_release_rows) - .execute(conn)?; - } - - if !file_url_rows.is_empty() { - insert_into(file_rev_url::table) - .values(file_url_rows) - .execute(conn)?; - } - - Ok(rev_ids) - } -} - -impl EntityCrud for FilesetEntity { - type EditRow = FilesetEditRow; - type EditNewRow = FilesetEditNewRow; - type IdentRow = FilesetIdentRow; - type IdentNewRow = FilesetIdentNewRow; - type RevRow = FilesetRevRow; - - generic_db_get!(fileset_ident, fileset_rev); - generic_db_get_rev!(fileset_rev); - generic_db_expand!(); - generic_db_create!(fileset_ident, fileset_edit); - generic_db_create_batch!(fileset_ident, fileset_edit); - generic_db_update!(fileset_ident, fileset_edit); - generic_db_delete!(fileset_ident, fileset_edit); - generic_db_get_history!(fileset_edit); - generic_db_get_edit!(fileset_edit); - generic_db_delete_edit!(fileset_edit); - generic_db_get_redirects!(fileset_ident); - generic_db_accept_edits_batch!("fileset", fileset_ident, fileset_edit); - generic_db_insert_rev!(); - - fn from_deleted_row(ident_row: Self::IdentRow) -> Result { - if ident_row.rev_id.is_some() { - bail!("called from_deleted_row with a non-deleted-state row") - } - - Ok(FilesetEntity { - manifest: None, - urls: None, - release_ids: None, - state: Some(ident_row.state().unwrap().shortname()), - ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), - revision: ident_row.rev_id.map(|u| u.to_string()), - redirect: ident_row - .redirect_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - extra: None, - edit_extra: None, - }) - } - - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - _hide: HideFlags, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - let manifest: Vec = fileset_rev_file::table - .filter(fileset_rev_file::fileset_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FilesetRevFileRow| FilesetEntityManifest { - path: r.path_name, - size: r.size_bytes, - md5: r.md5, - sha1: r.sha1, - sha256: r.sha256, - extra: r.extra_json, - }) - .collect(); - - let urls: Vec = fileset_rev_url::table - .filter(fileset_rev_url::fileset_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FilesetRevUrlRow| FileEntityUrls { - rel: r.rel, - url: r.url, - }) - .collect(); - - let release_ids: Vec = fileset_rev_release::table - .filter(fileset_rev_release::fileset_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: FilesetRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) - .collect(); - - Ok(FilesetEntity { - manifest: Some(manifest), - urls: Some(urls), - release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()), - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - edit_extra: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify hash syntax - for entity in models { - if let Some(ref manifest) = entity.manifest { - for file in manifest { - if let Some(ref hash) = file.md5 { - check_md5(hash)?; - } - if let Some(ref hash) = file.sha1 { - check_sha1(hash)?; - } - if let Some(ref hash) = file.sha256 { - check_sha256(hash)?; - } - } - } - } - - let rev_ids: Vec = insert_into(fileset_rev::table) - .values( - models - .iter() - .map(|model| FilesetRevNewRow { - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(fileset_rev::id) - .get_results(conn)?; - - let mut fileset_file_rows: Vec = vec![]; - let mut fileset_url_rows: Vec = vec![]; - let mut fileset_release_rows: Vec = vec![]; - - for (model, rev_id) in models.iter().zip(rev_ids.iter()) { - match &model.manifest { - None => (), - Some(file_list) => { - let these_file_rows: Vec = file_list - .into_iter() - .map(|f| FilesetRevFileNewRow { - fileset_rev: *rev_id, - path_name: f.path.clone(), - size_bytes: f.size, - md5: f.md5.clone(), - sha1: f.sha1.clone(), - sha256: f.sha256.clone(), - extra_json: f.extra.clone(), - }) - .collect(); - fileset_file_rows.extend(these_file_rows); - } - }; - - match &model.urls { - None => (), - Some(url_list) => { - let these_url_rows: Vec = url_list - .into_iter() - .map(|u| FilesetRevUrlNewRow { - fileset_rev: *rev_id, - rel: u.rel.clone(), - url: u.url.clone(), - }) - .collect(); - fileset_url_rows.extend(these_url_rows); - } - }; - - match &model.release_ids { - None => (), - Some(release_list) => { - let these_release_rows: Result> = release_list - .iter() - .map(|r| { - Ok(FilesetRevReleaseRow { - fileset_rev: *rev_id, - target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), - }) - }) - .collect(); - fileset_release_rows.extend(these_release_rows?); - } - }; - } - - if !fileset_file_rows.is_empty() { - insert_into(fileset_rev_file::table) - .values(fileset_file_rows) - .execute(conn)?; - } - - if !fileset_url_rows.is_empty() { - insert_into(fileset_rev_url::table) - .values(fileset_url_rows) - .execute(conn)?; - } - - if !fileset_release_rows.is_empty() { - insert_into(fileset_rev_release::table) - .values(fileset_release_rows) - .execute(conn)?; - } - - Ok(rev_ids) - } -} - -impl EntityCrud for WebcaptureEntity { - type EditRow = WebcaptureEditRow; - type EditNewRow = WebcaptureEditNewRow; - type IdentRow = WebcaptureIdentRow; - type IdentNewRow = WebcaptureIdentNewRow; - type RevRow = WebcaptureRevRow; - - generic_db_get!(webcapture_ident, webcapture_rev); - generic_db_get_rev!(webcapture_rev); - generic_db_expand!(); - generic_db_create!(webcapture_ident, webcapture_edit); - generic_db_create_batch!(webcapture_ident, webcapture_edit); - generic_db_update!(webcapture_ident, webcapture_edit); - generic_db_delete!(webcapture_ident, webcapture_edit); - generic_db_get_history!(webcapture_edit); - generic_db_get_edit!(webcapture_edit); - generic_db_delete_edit!(webcapture_edit); - generic_db_get_redirects!(webcapture_ident); - generic_db_accept_edits_batch!("webcapture", webcapture_ident, webcapture_edit); - generic_db_insert_rev!(); - - fn from_deleted_row(ident_row: Self::IdentRow) -> Result { - if ident_row.rev_id.is_some() { - bail!("called from_deleted_row with a non-deleted-state row") - } - - Ok(WebcaptureEntity { - cdx: None, - archive_urls: None, - original_url: None, - timestamp: None, - release_ids: None, - state: Some(ident_row.state().unwrap().shortname()), - ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), - revision: ident_row.rev_id.map(|u| u.to_string()), - redirect: ident_row - .redirect_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - extra: None, - edit_extra: None, - }) - } - - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - _hide: HideFlags, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - let cdx: Vec = webcapture_rev_cdx::table - .filter(webcapture_rev_cdx::webcapture_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|c: WebcaptureRevCdxRow| WebcaptureEntityCdx { - surt: c.surt, - timestamp: c.timestamp, - url: c.url, - mimetype: c.mimetype, - status_code: c.status_code, - sha1: c.sha1, - sha256: c.sha256, - }) - .collect(); - - let archive_urls: Vec = webcapture_rev_url::table - .filter(webcapture_rev_url::webcapture_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: WebcaptureRevUrlRow| WebcaptureEntityArchiveUrls { - rel: r.rel, - url: r.url, - }) - .collect(); - - let release_ids: Vec = webcapture_rev_release::table - .filter(webcapture_rev_release::webcapture_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map(|r: WebcaptureRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) - .collect(); - - Ok(WebcaptureEntity { - cdx: Some(cdx), - archive_urls: Some(archive_urls), - original_url: Some(rev_row.original_url), - timestamp: Some(chrono::DateTime::from_utc(rev_row.timestamp, chrono::Utc)), - release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()), - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - edit_extra: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify hash syntax, and presence of required fields - for entity in models { - if let Some(ref cdx) = entity.cdx { - for row in cdx { - check_sha1(&row.sha1)?; - if let Some(ref hash) = row.sha256 { - check_sha256(hash)?; - } - } - } - if entity.timestamp.is_none() || entity.original_url.is_none() { - return Err(ErrorKind::OtherBadRequest( - "timestamp and original_url are required for webcapture entities".to_string(), - ) - .into()); - } - } - - let rev_ids: Vec = insert_into(webcapture_rev::table) - .values( - models - .iter() - .map(|model| WebcaptureRevNewRow { - // these unwraps safe because of check above - original_url: model.original_url.clone().unwrap(), - timestamp: model.timestamp.unwrap().naive_utc(), - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(webcapture_rev::id) - .get_results(conn)?; - - let mut webcapture_cdx_rows: Vec = vec![]; - let mut webcapture_url_rows: Vec = vec![]; - let mut webcapture_release_rows: Vec = vec![]; - - for (model, rev_id) in models.iter().zip(rev_ids.iter()) { - match &model.cdx { - None => (), - Some(cdx_list) => { - let these_cdx_rows: Vec = cdx_list - .into_iter() - .map(|c| WebcaptureRevCdxNewRow { - webcapture_rev: *rev_id, - surt: c.surt.clone(), - timestamp: c.timestamp.clone(), - url: c.url.clone(), - mimetype: c.mimetype.clone(), - status_code: c.status_code, - sha1: c.sha1.clone(), - sha256: c.sha256.clone(), - }) - .collect(); - webcapture_cdx_rows.extend(these_cdx_rows); - } - }; - - match &model.archive_urls { - None => (), - Some(url_list) => { - let these_url_rows: Vec = url_list - .into_iter() - .map(|u| WebcaptureRevUrlNewRow { - webcapture_rev: *rev_id, - rel: u.rel.clone(), - url: u.url.clone(), - }) - .collect(); - webcapture_url_rows.extend(these_url_rows); - } - }; - - match &model.release_ids { - None => (), - Some(release_list) => { - let these_release_rows: Result> = release_list - .iter() - .map(|r| { - Ok(WebcaptureRevReleaseRow { - webcapture_rev: *rev_id, - target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), - }) - }) - .collect(); - webcapture_release_rows.extend(these_release_rows?); - } - }; - } - - if !webcapture_cdx_rows.is_empty() { - insert_into(webcapture_rev_cdx::table) - .values(webcapture_cdx_rows) - .execute(conn)?; - } - - if !webcapture_url_rows.is_empty() { - insert_into(webcapture_rev_url::table) - .values(webcapture_url_rows) - .execute(conn)?; - } - - if !webcapture_release_rows.is_empty() { - insert_into(webcapture_rev_release::table) - .values(webcapture_release_rows) - .execute(conn)?; - } - - Ok(rev_ids) - } -} - -impl EntityCrud for ReleaseEntity { - type EditRow = ReleaseEditRow; - type EditNewRow = ReleaseEditNewRow; - type IdentRow = ReleaseIdentRow; - type IdentNewRow = ReleaseIdentNewRow; - type RevRow = ReleaseRevRow; - - generic_db_get!(release_ident, release_rev); - generic_db_get_rev!(release_rev); - generic_db_update!(release_ident, release_edit); - generic_db_delete!(release_ident, release_edit); - generic_db_get_history!(release_edit); - generic_db_get_edit!(release_edit); - generic_db_delete_edit!(release_edit); - generic_db_get_redirects!(release_ident); - generic_db_accept_edits_batch!("release", release_ident, release_edit); - generic_db_insert_rev!(); - - fn from_deleted_row(ident_row: Self::IdentRow) -> Result { - if ident_row.rev_id.is_some() { - bail!("called from_deleted_row with a non-deleted-state row") - } - - Ok(ReleaseEntity { - title: None, - release_type: None, - release_status: None, - release_date: None, - release_year: None, - doi: None, - pmid: None, - pmcid: None, - isbn13: None, - core_id: None, - wikidata_qid: None, - volume: None, - issue: None, - pages: None, - files: None, - filesets: None, - webcaptures: None, - container: None, - container_id: None, - publisher: None, - language: None, - work_id: None, - refs: None, - contribs: None, - abstracts: None, - - state: Some(ident_row.state().unwrap().shortname()), - ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), - revision: ident_row.rev_id.map(|u| u.to_string()), - redirect: ident_row - .redirect_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - extra: None, - edit_extra: None, - }) - } - - fn db_expand(&mut self, conn: &DbConn, expand: ExpandFlags) -> Result<()> { - // Don't expand deleted entities - if self.state == Some("deleted".to_string()) { - return Ok(()); - } - // TODO: should clarify behavior here. Would hit this path, eg, expanding files on a - // release revision (not ident). Should we fail (Bad Request), or silently just not include - // any files? - if expand.files && self.ident.is_some() { - let ident = match &self.ident { - None => bail!("Can't expand files on a non-concrete entity"), // redundant with above is_some() - Some(ident) => match &self.redirect { - // If we're a redirect, then expand for the *target* identifier, not *our* - // identifier. Tricky! - None => FatCatId::from_str(&ident)?, - Some(redir) => FatCatId::from_str(&redir)?, - }, - }; - self.files = Some(get_release_files(ident, HideFlags::none(), conn)?); - } - if expand.container { - if let Some(ref cid) = self.container_id { - self.container = Some(ContainerEntity::db_get( - conn, - FatCatId::from_str(&cid)?, - HideFlags::none(), - )?); - } - } - if expand.creators { - if let Some(ref mut contribs) = self.contribs { - for contrib in contribs { - if let Some(ref creator_id) = contrib.creator_id { - contrib.creator = Some(CreatorEntity::db_get( - conn, - FatCatId::from_str(creator_id)?, - HideFlags::none(), - )?); - } - } - } - } - Ok(()) - } - - fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { - if self.redirect.is_some() { - return Err(ErrorKind::OtherBadRequest( - "can't create an entity that redirects from the start".to_string(), - ) - .into()); - } - let mut edits = Self::db_create_batch(conn, edit_context, &[self])?; - // probably a more elegant way to destroy the vec and take first element - Ok(edits.pop().unwrap()) - } - - fn db_create_batch( - conn: &DbConn, - edit_context: &EditContext, - models: &[&Self], - ) -> Result> { - // This isn't the generic implementation because we need to create Work entities for each - // of the release entities passed (at least in the common case) - if models.iter().any(|m| m.redirect.is_some()) { - return Err(ErrorKind::OtherBadRequest( - "can't create an entity that redirects from the start".to_string(), - ) - .into()); - } - - // Generate the set of new work entities to insert (usually one for each release, but some - // releases might be pointed to a work already) - let mut new_work_models: Vec<&WorkEntity> = vec![]; - for entity in models { - if entity.work_id.is_none() { - new_work_models.push(&WorkEntity { - ident: None, - revision: None, - redirect: None, - state: None, - extra: None, - edit_extra: None, - }); - }; - } - - // create the works, then pluck the list of idents from the result - let new_work_edits = - WorkEntity::db_create_batch(conn, edit_context, new_work_models.as_slice())?; - let mut new_work_ids: Vec = new_work_edits.iter().map(|edit| edit.ident_id).collect(); - - // Copy all the release models, and ensure that each has work_id set, using the new work - // idents. There should be one new work ident for each release missing one. - let models_with_work_ids: Vec = models - .iter() - .map(|model| { - let mut model = (*model).clone(); - if model.work_id.is_none() { - model.work_id = - Some(FatCatId::from_uuid(&new_work_ids.pop().unwrap()).to_string()) - } - model - }) - .collect(); - let model_refs: Vec<&Self> = models_with_work_ids.iter().map(|s| s).collect(); - let models = model_refs.as_slice(); - - // The rest here is copy/pasta from the generic (how to avoid copypasta?) - let rev_ids: Vec = Self::db_insert_revs(conn, models)?; - let ident_ids: Vec = insert_into(release_ident::table) - .values( - rev_ids - .iter() - .map(|rev_id| Self::IdentNewRow { - rev_id: Some(*rev_id), - is_live: edit_context.autoaccept, - redirect_id: None, - }) - .collect::>(), - ) - .returning(release_ident::id) - .get_results(conn)?; - let edits: Vec = insert_into(release_edit::table) - .values( - rev_ids - .into_iter() - .zip(ident_ids.into_iter()) - .map(|(rev_id, ident_id)| Self::EditNewRow { - editgroup_id: edit_context.editgroup_id.to_uuid(), - rev_id: Some(rev_id), - ident_id: ident_id, - redirect_id: None, - prev_rev: None, - extra_json: edit_context.extra_json.clone(), - }) - .collect::>(), - ) - .get_results(conn)?; - Ok(edits) - } - - fn db_from_row( - conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - hide: HideFlags, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - let refs: Option> = match hide.refs { - true => None, - false => Some( - release_ref::table - .filter(release_ref::release_rev.eq(rev_row.id)) - .order(release_ref::index_val.asc()) - .get_results(conn)? - .into_iter() - .map(|r: ReleaseRefRow| ReleaseRef { - index: r.index_val.map(|v| v as i64), - key: r.key, - extra: r.extra_json, - container_name: r.container_name, - year: r.year.map(|v| v as i64), - title: r.title, - locator: r.locator, - target_release_id: r - .target_release_ident_id - .map(|v| FatCatId::from_uuid(&v).to_string()), - }) - .collect(), - ), - }; - - let contribs: Option> = match hide.contribs { - true => None, - false => Some( - release_contrib::table - .filter(release_contrib::release_rev.eq(rev_row.id)) - .order(( - release_contrib::role.asc(), - release_contrib::index_val.asc(), - )) - .get_results(conn)? - .into_iter() - .map(|c: ReleaseContribRow| ReleaseContrib { - index: c.index_val.map(|v| v as i64), - raw_name: c.raw_name, - role: c.role, - extra: c.extra_json, - creator_id: c - .creator_ident_id - .map(|v| FatCatId::from_uuid(&v).to_string()), - creator: None, - }) - .collect(), - ), - }; - - let abstracts: Option> = match hide.abstracts { - true => None, - false => Some( - release_rev_abstract::table - .inner_join(abstracts::table) - .filter(release_rev_abstract::release_rev.eq(rev_row.id)) - .get_results(conn)? - .into_iter() - .map( - |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts { - sha1: Some(r.0.abstract_sha1), - mimetype: r.0.mimetype, - lang: r.0.lang, - content: Some(r.1.content), - }, - ) - .collect(), - ), - }; - - Ok(ReleaseEntity { - title: Some(rev_row.title), - release_type: rev_row.release_type, - release_status: rev_row.release_status, - release_date: rev_row.release_date, - release_year: rev_row.release_year, - doi: rev_row.doi, - pmid: rev_row.pmid, - pmcid: rev_row.pmcid, - isbn13: rev_row.isbn13, - core_id: rev_row.core_id, - wikidata_qid: rev_row.wikidata_qid, - volume: rev_row.volume, - issue: rev_row.issue, - pages: rev_row.pages, - files: None, - filesets: None, - webcaptures: None, - container: None, - container_id: rev_row - .container_ident_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - publisher: rev_row.publisher, - language: rev_row.language, - work_id: Some(FatCatId::from_uuid(&rev_row.work_ident_id).to_string()), - refs: refs, - contribs: contribs, - abstracts: abstracts, - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - edit_extra: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - // first verify external identifier syntax - for entity in models { - if let Some(ref extid) = entity.doi { - check_doi(extid)?; - } - if let Some(ref extid) = entity.pmid { - check_pmid(extid)?; - } - if let Some(ref extid) = entity.pmcid { - check_pmcid(extid)?; - } - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - if let Some(ref release_type) = entity.release_type { - check_release_type(release_type)?; - } - if let Some(ref contribs) = entity.contribs { - for contrib in contribs { - if let Some(ref role) = contrib.role { - check_contrib_role(role)?; - } - } - } - } - - if models.iter().any(|m| m.title.is_none()) { - return Err(ErrorKind::OtherBadRequest( - "title is required for all Release entities".to_string(), - ) - .into()); - } - - let rev_ids: Vec = insert_into(release_rev::table) - .values( - models - .iter() - .map(|model| { - Ok(ReleaseRevNewRow { - title: model.title.clone().unwrap(), // titles checked above - release_type: model.release_type.clone(), - release_status: model.release_status.clone(), - release_date: model.release_date, - release_year: model.release_year, - doi: model.doi.clone(), - pmid: model.pmid.clone(), - pmcid: model.pmcid.clone(), - wikidata_qid: model.wikidata_qid.clone(), - isbn13: model.isbn13.clone(), - core_id: model.core_id.clone(), - volume: model.volume.clone(), - issue: model.issue.clone(), - pages: model.pages.clone(), - work_ident_id: match model.work_id.clone() { - None => bail!("release_revs must have a work_id by the time they are inserted; this is an internal soundness error"), - Some(s) => FatCatId::from_str(&s)?.to_uuid(), - }, - container_ident_id: match model.container_id.clone() { - None => None, - Some(s) => Some(FatCatId::from_str(&s)?.to_uuid()), - }, - publisher: model.publisher.clone(), - language: model.language.clone(), - extra_json: model.extra.clone() - }) - }) - .collect::>>()?, - ) - .returning(release_rev::id) - .get_results(conn)?; - - let mut release_ref_rows: Vec = vec![]; - let mut release_contrib_rows: Vec = vec![]; - let mut abstract_rows: Vec = vec![]; - let mut release_abstract_rows: Vec = vec![]; - - for (model, rev_id) in models.iter().zip(rev_ids.iter()) { - match &model.refs { - None => (), - Some(ref_list) => { - let these_ref_rows: Vec = ref_list - .iter() - .map(|r| { - Ok(ReleaseRefNewRow { - release_rev: rev_id.clone(), - target_release_ident_id: match r.target_release_id.clone() { - None => None, - Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), - }, - index_val: r.index.map(|v| v as i32), - key: r.key.clone(), - container_name: r.container_name.clone(), - year: r.year.map(|v| v as i32), - title: r.title.clone(), - locator: r.locator.clone(), - extra_json: r.extra.clone(), - }) - }) - .collect::>>()?; - release_ref_rows.extend(these_ref_rows); - } - }; - - match &model.contribs { - None => (), - Some(contrib_list) => { - let these_contrib_rows: Vec = contrib_list - .iter() - .map(|c| { - Ok(ReleaseContribNewRow { - release_rev: rev_id.clone(), - creator_ident_id: match c.creator_id.clone() { - None => None, - Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), - }, - raw_name: c.raw_name.clone(), - index_val: c.index.map(|v| v as i32), - role: c.role.clone(), - extra_json: c.extra.clone(), - }) - }) - .collect::>>()?; - release_contrib_rows.extend(these_contrib_rows); - } - }; - - if let Some(abstract_list) = &model.abstracts { - // For rows that specify content, we need to insert the abstract if it doesn't exist - // already - let new_abstracts: Vec = abstract_list - .iter() - .filter(|ea| ea.content.is_some()) - .map(|c| AbstractsRow { - sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(), - content: c.content.clone().unwrap(), - }) - .collect(); - abstract_rows.extend(new_abstracts); - let new_release_abstract_rows: Vec = abstract_list - .into_iter() - .map(|c| { - Ok(ReleaseRevAbstractNewRow { - release_rev: *rev_id, - abstract_sha1: match c.content { - Some(ref content) => Sha1::from(content).hexdigest(), - None => match c.sha1.clone() { - Some(v) => v, - None => bail!("either abstract_sha1 or content is required"), - }, - }, - lang: c.lang.clone(), - mimetype: c.mimetype.clone(), - }) - }) - .collect::>>()?; - release_abstract_rows.extend(new_release_abstract_rows); - } - } - - if !release_ref_rows.is_empty() { - insert_into(release_ref::table) - .values(release_ref_rows) - .execute(conn)?; - } - - if !release_contrib_rows.is_empty() { - insert_into(release_contrib::table) - .values(release_contrib_rows) - .execute(conn)?; - } - - if !abstract_rows.is_empty() { - // Sort of an "upsert"; only inserts new abstract rows if they don't already exist - insert_into(abstracts::table) - .values(&abstract_rows) - .on_conflict(abstracts::sha1) - .do_nothing() - .execute(conn)?; - insert_into(release_rev_abstract::table) - .values(release_abstract_rows) - .execute(conn)?; - } - - Ok(rev_ids) - } -} - -impl EntityCrud for WorkEntity { - type EditRow = WorkEditRow; - type EditNewRow = WorkEditNewRow; - type IdentRow = WorkIdentRow; - type IdentNewRow = WorkIdentNewRow; - type RevRow = WorkRevRow; - - generic_db_get!(work_ident, work_rev); - generic_db_get_rev!(work_rev); - generic_db_expand!(); - generic_db_create!(work_ident, work_edit); - generic_db_create_batch!(work_ident, work_edit); - generic_db_update!(work_ident, work_edit); - generic_db_delete!(work_ident, work_edit); - generic_db_get_history!(work_edit); - generic_db_get_edit!(work_edit); - generic_db_delete_edit!(work_edit); - generic_db_get_redirects!(work_ident); - generic_db_accept_edits_batch!("work", work_ident, work_edit); - generic_db_insert_rev!(); - - fn from_deleted_row(ident_row: Self::IdentRow) -> Result { - if ident_row.rev_id.is_some() { - bail!("called from_deleted_row with a non-deleted-state row") - } - - Ok(WorkEntity { - state: Some(ident_row.state().unwrap().shortname()), - ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), - revision: ident_row.rev_id.map(|u| u.to_string()), - redirect: ident_row - .redirect_id - .map(|u| FatCatId::from_uuid(&u).to_string()), - extra: None, - edit_extra: None, - }) - } - - fn db_from_row( - _conn: &DbConn, - rev_row: Self::RevRow, - ident_row: Option, - _hide: HideFlags, - ) -> Result { - let (state, ident_id, redirect_id) = match ident_row { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(FatCatId::from_uuid(&i.id).to_string()), - i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), - ), - None => (None, None, None), - }; - - Ok(WorkEntity { - state: state, - ident: ident_id, - revision: Some(rev_row.id.to_string()), - redirect: redirect_id, - extra: rev_row.extra_json, - edit_extra: None, - }) - } - - fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { - let rev_ids: Vec = insert_into(work_rev::table) - .values( - models - .iter() - .map(|model| WorkRevNewRow { - extra_json: model.extra.clone(), - }) - .collect::>(), - ) - .returning(work_rev::id) - .get_results(conn)?; - Ok(rev_ids) - } -} diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs deleted file mode 100644 index 55085403..00000000 --- a/rust/src/api_helpers.rs +++ /dev/null @@ -1,683 +0,0 @@ -use crate::api_entity_crud::EntityCrud; -use data_encoding::BASE32_NOPAD; -use crate::database_models::*; -use crate::database_schema::*; -use diesel; -use diesel::prelude::*; -use crate::errors::*; -use fatcat_api_spec::models::*; -use regex::Regex; -use serde_json; -use std::str::FromStr; -use uuid::Uuid; - -pub type DbConn = - diesel::r2d2::PooledConnection>; - -pub struct EditContext { - pub editor_id: FatCatId, - pub editgroup_id: FatCatId, - pub extra_json: Option, - pub autoaccept: bool, -} - -impl EditContext { - /// This function should always be run within a transaction - pub fn check(&self, conn: &DbConn) -> Result<()> { - let count: i64 = changelog::table - .filter(changelog::editgroup_id.eq(&self.editgroup_id.to_uuid())) - .count() - .get_result(conn)?; - if count > 0 { - return Err(ErrorKind::EditgroupAlreadyAccepted(self.editgroup_id.to_string()).into()); - } - return Ok(()); - } -} - -#[derive(Clone, Copy, PartialEq)] -pub struct ExpandFlags { - pub files: bool, - pub filesets: bool, - pub webcaptures: bool, - pub container: bool, - pub releases: bool, - pub creators: bool, -} - -impl FromStr for ExpandFlags { - type Err = Error; - fn from_str(param: &str) -> Result { - let list: Vec<&str> = param.split_terminator(",").collect(); - Ok(ExpandFlags::from_str_list(&list)) - } -} - -impl ExpandFlags { - pub fn from_str_list(list: &[&str]) -> ExpandFlags { - ExpandFlags { - files: list.contains(&"files"), - filesets: list.contains(&"filesets"), - webcaptures: list.contains(&"webcaptures"), - container: list.contains(&"container"), - releases: list.contains(&"releases"), - creators: list.contains(&"creators"), - } - } - pub fn none() -> ExpandFlags { - ExpandFlags { - files: false, - filesets: false, - webcaptures: false, - container: false, - releases: false, - creators: false, - } - } -} - -#[test] -fn test_expand_flags() { - assert!(ExpandFlags::from_str_list(&vec![]).files == false); - assert!(ExpandFlags::from_str_list(&vec!["files"]).files == true); - assert!(ExpandFlags::from_str_list(&vec!["file"]).files == false); - let all = ExpandFlags::from_str_list(&vec![ - "files", - "filesets", - "webcaptures", - "container", - "other_thing", - "releases", - "creators", - ]); - assert!( - all == ExpandFlags { - files: true, - filesets: true, - webcaptures: true, - container: true, - releases: true, - creators: true - } - ); - assert!(ExpandFlags::from_str("").unwrap().files == false); - assert!(ExpandFlags::from_str("files").unwrap().files == true); - assert!(ExpandFlags::from_str("something,,files").unwrap().files == true); - assert!(ExpandFlags::from_str("file").unwrap().files == false); - let all = - ExpandFlags::from_str("files,container,other_thing,releases,creators,filesets,webcaptures") - .unwrap(); - assert!( - all == ExpandFlags { - files: true, - filesets: true, - webcaptures: true, - container: true, - releases: true, - creators: true - } - ); -} - -#[derive(Clone, Copy, PartialEq)] -pub struct HideFlags { - // release - pub abstracts: bool, - pub refs: bool, - pub contribs: bool, - // fileset - pub manifest: bool, - // webcapture - pub cdx: bool, -} - -impl FromStr for HideFlags { - type Err = Error; - fn from_str(param: &str) -> Result { - let list: Vec<&str> = param.split_terminator(",").collect(); - Ok(HideFlags::from_str_list(&list)) - } -} - -impl HideFlags { - pub fn from_str_list(list: &[&str]) -> HideFlags { - HideFlags { - abstracts: list.contains(&"abstracts"), - refs: list.contains(&"refs"), - contribs: list.contains(&"contribs"), - manifest: list.contains(&"contribs"), - cdx: list.contains(&"contribs"), - } - } - pub fn none() -> HideFlags { - HideFlags { - abstracts: false, - refs: false, - contribs: false, - manifest: false, - cdx: false, - } - } -} - -#[test] -fn test_hide_flags() { - assert!(HideFlags::from_str_list(&vec![]).abstracts == false); - assert!(HideFlags::from_str_list(&vec!["abstracts"]).abstracts == true); - assert!(HideFlags::from_str_list(&vec!["abstract"]).abstracts == false); - let all = HideFlags::from_str_list(&vec![ - "abstracts", - "refs", - "other_thing", - "contribs", - "manifest", - "cdx", - ]); - assert!( - all == HideFlags { - abstracts: true, - refs: true, - contribs: true, - manifest: true, - cdx: true, - } - ); - assert!(HideFlags::from_str("").unwrap().abstracts == false); - assert!(HideFlags::from_str("abstracts").unwrap().abstracts == true); - assert!( - HideFlags::from_str("something,,abstracts") - .unwrap() - .abstracts - == true - ); - assert!(HideFlags::from_str("file").unwrap().abstracts == false); - let all = HideFlags::from_str("abstracts,cdx,refs,manifest,other_thing,contribs").unwrap(); - assert!( - all == HideFlags { - abstracts: true, - refs: true, - contribs: true, - manifest: true, - cdx: true, - } - ); -} - -pub fn make_edit_context( - conn: &DbConn, - editor_id: FatCatId, - editgroup_id: Option, - autoaccept: bool, -) -> Result { - let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { - (Some(eg), _) => eg, - // If autoaccept and no editgroup_id passed, always create a new one for this transaction - (None, true) => { - let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) - .values((editgroup::editor_id.eq(editor_id.to_uuid()),)) - .get_result(conn)?; - FatCatId::from_uuid(&eg_row.id) - } - (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id.to_uuid(), conn)?), - }; - Ok(EditContext { - editor_id: editor_id, - editgroup_id: editgroup_id, - extra_json: None, - autoaccept: autoaccept, - }) -} - -pub fn create_editor( - conn: &DbConn, - username: String, - is_admin: bool, - is_bot: bool, -) -> Result { - check_username(&username)?; - let ed: EditorRow = diesel::insert_into(editor::table) - .values(( - editor::username.eq(username), - editor::is_admin.eq(is_admin), - editor::is_bot.eq(is_bot), - )) - .get_result(conn)?; - Ok(ed) -} - -pub fn update_editor_username( - conn: &DbConn, - editor_id: FatCatId, - username: String, -) -> Result { - check_username(&username)?; - diesel::update(editor::table.find(editor_id.to_uuid())) - .set(editor::username.eq(username)) - .execute(conn)?; - let editor: EditorRow = editor::table.find(editor_id.to_uuid()).get_result(conn)?; - Ok(editor) -} - -/// This function should always be run within a transaction -pub fn get_or_create_editgroup(editor_id: Uuid, conn: &DbConn) -> Result { - // check for current active - let ed_row: EditorRow = editor::table.find(editor_id).first(conn)?; - if let Some(current) = ed_row.active_editgroup_id { - return Ok(current); - } - - // need to insert and update - let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) - .values((editgroup::editor_id.eq(ed_row.id),)) - .get_result(conn)?; - diesel::update(editor::table.find(ed_row.id)) - .set(editor::active_editgroup_id.eq(eg_row.id)) - .execute(conn)?; - Ok(eg_row.id) -} - -/// This function should always be run within a transaction -pub fn accept_editgroup(editgroup_id: FatCatId, conn: &DbConn) -> Result { - // check that we haven't accepted already (in changelog) - // NB: could leave this to a UNIQUE constraint - // TODO: redundant with check_edit_context - let count: i64 = changelog::table - .filter(changelog::editgroup_id.eq(editgroup_id.to_uuid())) - .count() - .get_result(conn)?; - if count > 0 { - return Err(ErrorKind::EditgroupAlreadyAccepted(editgroup_id.to_string()).into()); - } - - // copy edit columns to ident table - ContainerEntity::db_accept_edits(conn, editgroup_id)?; - CreatorEntity::db_accept_edits(conn, editgroup_id)?; - FileEntity::db_accept_edits(conn, editgroup_id)?; - FilesetEntity::db_accept_edits(conn, editgroup_id)?; - WebcaptureEntity::db_accept_edits(conn, editgroup_id)?; - ReleaseEntity::db_accept_edits(conn, editgroup_id)?; - WorkEntity::db_accept_edits(conn, editgroup_id)?; - - // append log/changelog row - let entry: ChangelogRow = diesel::insert_into(changelog::table) - .values((changelog::editgroup_id.eq(editgroup_id.to_uuid()),)) - .get_result(conn)?; - - // update any editor's active editgroup - let no_active: Option = None; - diesel::update(editor::table) - .filter(editor::active_editgroup_id.eq(editgroup_id.to_uuid())) - .set(editor::active_editgroup_id.eq(no_active)) - .execute(conn)?; - Ok(entry) -} - -#[derive(Clone, Copy, PartialEq, Debug)] -pub struct FatCatId(Uuid); - -impl ToString for FatCatId { - fn to_string(&self) -> String { - uuid2fcid(&self.to_uuid()) - } -} - -impl FromStr for FatCatId { - type Err = Error; - fn from_str(s: &str) -> Result { - fcid2uuid(s).map(|u| FatCatId(u)) - } -} - -impl FatCatId { - pub fn to_uuid(&self) -> Uuid { - self.0 - } - // TODO: just make it u: Uuid and clone (not by ref) - pub fn from_uuid(u: &Uuid) -> FatCatId { - FatCatId(*u) - } -} - -/// Convert fatcat IDs (base32 strings) to UUID -pub fn fcid2uuid(fcid: &str) -> Result { - if fcid.len() != 26 { - return Err(ErrorKind::InvalidFatcatId(fcid.to_string()).into()); - } - let mut raw = vec![0; 16]; - BASE32_NOPAD - .decode_mut(fcid.to_uppercase().as_bytes(), &mut raw) - .map_err(|_dp| ErrorKind::InvalidFatcatId(fcid.to_string()))?; - // unwrap() is safe here, because we know raw is always 16 bytes - Ok(Uuid::from_bytes(&raw).unwrap()) -} - -/// Convert UUID to fatcat ID string (base32 encoded) -pub fn uuid2fcid(id: &Uuid) -> String { - let raw = id.as_bytes(); - BASE32_NOPAD.encode(raw).to_lowercase() -} - -pub fn check_username(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^[A-Za-z][A-Za-z0-9._-]{2,24}$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedExternalId(format!( - "not a valid username: '{}' (expected, eg, 'AcidBurn')", - raw - )) - .into()) - } -} - -#[test] -fn test_check_username() { - assert!(check_username("bnewbold").is_ok()); - assert!(check_username("BNEWBOLD").is_ok()); - assert!(check_username("admin").is_ok()); - assert!(check_username("friend-bot").is_ok()); - assert!(check_username("dog").is_ok()); - assert!(check_username("g_____").is_ok()); - assert!(check_username("bnewbold2-archive").is_ok()); - assert!(check_username("bnewbold2-internetarchive").is_ok()); - - assert!(check_username("").is_err()); - assert!(check_username("_").is_err()); - assert!(check_username("gg").is_err()); - assert!(check_username("adminadminadminadminadminadminadmin").is_err()); - assert!(check_username("bryan newbold").is_err()); - assert!(check_username("01234567-3456-6780").is_err()); - assert!(check_username(".admin").is_err()); - assert!(check_username("-bot").is_err()); -} - -pub fn check_pmcid(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^PMC\d+$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedExternalId(format!( - "not a valid PubMed Central ID (PMCID): '{}' (expected, eg, 'PMC12345')", - raw - )) - .into()) - } -} - -pub fn check_pmid(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^\d+$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedExternalId(format!( - "not a valid PubMed ID (PMID): '{}' (expected, eg, '1234')", - raw - )) - .into()) - } -} - -pub fn check_wikidata_qid(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^Q\d+$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedExternalId(format!( - "not a valid Wikidata QID: '{}' (expected, eg, 'Q1234')", - raw - )) - .into()) - } -} - -pub fn check_doi(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^10.\d{3,6}/.+$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedExternalId(format!( - "not a valid DOI: '{}' (expected, eg, '10.1234/aksjdfh')", - raw - )) - .into()) - } -} - -pub fn check_issn(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^\d{4}-\d{3}[0-9X]$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedExternalId(format!( - "not a valid ISSN: '{}' (expected, eg, '1234-5678')", - raw - )) - .into()) - } -} - -pub fn check_orcid(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^\d{4}-\d{4}-\d{4}-\d{3}[\dX]$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedExternalId(format!( - "not a valid ORCID: '{}' (expected, eg, '0123-4567-3456-6789')", - raw - )) - .into()) - } -} - -#[test] -fn test_check_orcid() { - assert!(check_orcid("0123-4567-3456-6789").is_ok()); - assert!(check_orcid("0123-4567-3456-678X").is_ok()); - assert!(check_orcid("01234567-3456-6780").is_err()); - assert!(check_orcid("0x23-4567-3456-6780").is_err()); -} - -pub fn check_md5(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^[a-f0-9]{32}$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedChecksum(format!( - "not a valid MD5: '{}' (expected lower-case hex, eg, '1b39813549077b2347c0f370c3864b40')", - raw - )) - .into()) - } -} - -#[test] -fn test_check_md5() { - assert!(check_md5("1b39813549077b2347c0f370c3864b40").is_ok()); - assert!(check_md5("1g39813549077b2347c0f370c3864b40").is_err()); - assert!(check_md5("1B39813549077B2347C0F370c3864b40").is_err()); - assert!(check_md5("1b39813549077b2347c0f370c3864b4").is_err()); - assert!(check_md5("1b39813549077b2347c0f370c3864b411").is_err()); -} - -pub fn check_sha1(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^[a-f0-9]{40}$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedChecksum(format!( - "not a valid SHA-1: '{}' (expected lower-case hex, eg, 'e9dd75237c94b209dc3ccd52722de6931a310ba3')", - raw - )) - .into()) - } -} - -#[test] -fn test_check_sha1() { - assert!(check_sha1("e9dd75237c94b209dc3ccd52722de6931a310ba3").is_ok()); - assert!(check_sha1("g9dd75237c94b209dc3ccd52722de6931a310ba3").is_err()); - assert!(check_sha1("e9DD75237C94B209DC3CCD52722de6931a310ba3").is_err()); - assert!(check_sha1("e9dd75237c94b209dc3ccd52722de6931a310ba").is_err()); - assert!(check_sha1("e9dd75237c94b209dc3ccd52722de6931a310ba33").is_err()); -} - -pub fn check_sha256(raw: &str) -> Result<()> { - lazy_static! { - static ref RE: Regex = Regex::new(r"^[a-f0-9]{64}$").unwrap(); - } - if RE.is_match(raw) { - Ok(()) - } else { - Err(ErrorKind::MalformedChecksum(format!( - "not a valid SHA-256: '{}' (expected lower-case hex, eg, 'cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452')", - raw - )) - .into()) - } -} - -#[test] -fn test_check_sha256() { - assert!( - check_sha256("cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452").is_ok() - ); - assert!( - check_sha256("gb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452").is_err() - ); - assert!( - check_sha256("UB1C378F464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452").is_err() - ); - assert!( - check_sha256("cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e45").is_err() - ); - assert!( - check_sha256("cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e4522").is_err() - ); -} - -pub fn check_release_type(raw: &str) -> Result<()> { - let valid_types = vec![ - // Citation Style Language official types - "article", - "article-magazine", - "article-newspaper", - "article-journal", - "bill", - "book", - "broadcast", - "chapter", - "dataset", - "entry", - "entry-dictionary", - "entry-encyclopedia", - "figure", - "graphic", - "interview", - "legislation", - "legal_case", - "manuscript", - "map", - "motion_picture", - "musical_score", - "pamphlet", - "paper-conference", - "patent", - "post", - "post-weblog", - "personal_communication", - "report", - "review", - "review-book", - "song", - "speech", - "thesis", - "treaty", - "webpage", - // fatcat-specific extensions - "peer_review", - "software", - "standard", - ]; - for good in valid_types { - if raw == good { - return Ok(()); - } - } - Err(ErrorKind::NotInControlledVocabulary(format!( - "not a valid release_type: '{}' (expected a CSL type, eg, 'article-journal', 'book')", - raw - )) - .into()) -} - -#[test] -fn test_check_release_type() { - assert!(check_release_type("book").is_ok()); - assert!(check_release_type("article-journal").is_ok()); - assert!(check_release_type("standard").is_ok()); - assert!(check_release_type("journal-article").is_err()); - assert!(check_release_type("BOOK").is_err()); - assert!(check_release_type("book ").is_err()); -} - -pub fn check_contrib_role(raw: &str) -> Result<()> { - let valid_types = vec![ - // Citation Style Language official role types - "author", - "collection-editor", - "composer", - "container-author", - "director", - "editor", - "editorial-director", - "editortranslator", - "illustrator", - "interviewer", - "original-author", - "recipient", - "reviewed-author", - "translator", - // common extension (for conference proceeding chair) - //"chair", - ]; - for good in valid_types { - if raw == good { - return Ok(()); - } - } - Err(ErrorKind::NotInControlledVocabulary(format!( - "not a valid contrib.role: '{}' (expected a CSL type, eg, 'author', 'editor')", - raw - )) - .into()) -} - -#[test] -fn test_check_contrib_role() { - assert!(check_contrib_role("author").is_ok()); - assert!(check_contrib_role("editor").is_ok()); - assert!(check_contrib_role("chair").is_err()); - assert!(check_contrib_role("EDITOR").is_err()); - assert!(check_contrib_role("editor ").is_err()); -} - -// TODO: make the above checks "more correct" -// TODO: check ISBN-13 diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs deleted file mode 100644 index 0377f970..00000000 --- a/rust/src/api_server.rs +++ /dev/null @@ -1,586 +0,0 @@ -//! API endpoint handlers - -use crate::api_entity_crud::EntityCrud; -use crate::api_helpers::*; -use crate::auth::*; -use chrono; -use crate::database_models::*; -use crate::database_schema::*; -use diesel::prelude::*; -use diesel::{self, insert_into}; -use crate::errors::*; -use fatcat_api_spec::models; -use fatcat_api_spec::models::*; -use std::str::FromStr; -use crate::ConnectionPool; - -macro_rules! entity_batch_handler { - ($post_batch_handler:ident, $model:ident) => { - pub fn $post_batch_handler( - &self, - entity_list: &[models::$model], - autoaccept: bool, - editor_id: FatCatId, - editgroup_id: Option, - conn: &DbConn, - ) -> Result> { - - let edit_context = make_edit_context(conn, editor_id, editgroup_id, autoaccept)?; - edit_context.check(&conn)?; - let model_list: Vec<&models::$model> = entity_list.iter().map(|e| e).collect(); - let edits = $model::db_create_batch(conn, &edit_context, model_list.as_slice())?; - - if autoaccept { - let _clr: ChangelogRow = diesel::insert_into(changelog::table) - .values((changelog::editgroup_id.eq(edit_context.editgroup_id.to_uuid()),)) - .get_result(conn)?; - } - edits.into_iter().map(|e| e.into_model()).collect() - } - } -} - -#[derive(Clone)] -pub struct Server { - pub db_pool: ConnectionPool, - pub auth_confectionary: AuthConfectionary, -} - -pub fn get_release_files( - ident: FatCatId, - hide_flags: HideFlags, - conn: &DbConn, -) -> Result> { - let rows: Vec<(FileRevRow, FileIdentRow, FileRevReleaseRow)> = file_rev::table - .inner_join(file_ident::table) - .inner_join(file_rev_release::table) - .filter(file_rev_release::target_release_ident_id.eq(&ident.to_uuid())) - .filter(file_ident::is_live.eq(true)) - .filter(file_ident::redirect_id.is_null()) - .load(conn)?; - - rows.into_iter() - .map(|(rev, ident, _)| FileEntity::db_from_row(conn, rev, Some(ident), hide_flags)) - .collect() -} - -pub fn get_release_filesets( - ident: FatCatId, - hide_flags: HideFlags, - conn: &DbConn, -) -> Result> { - let rows: Vec<(FilesetRevRow, FilesetIdentRow, FilesetRevReleaseRow)> = fileset_rev::table - .inner_join(fileset_ident::table) - .inner_join(fileset_rev_release::table) - .filter(fileset_rev_release::target_release_ident_id.eq(&ident.to_uuid())) - .filter(fileset_ident::is_live.eq(true)) - .filter(fileset_ident::redirect_id.is_null()) - .load(conn)?; - - rows.into_iter() - .map(|(rev, ident, _)| FilesetEntity::db_from_row(conn, rev, Some(ident), hide_flags)) - .collect() -} - -pub fn get_release_webcaptures( - ident: FatCatId, - hide_flags: HideFlags, - conn: &DbConn, -) -> Result> { - let rows: Vec<( - WebcaptureRevRow, - WebcaptureIdentRow, - WebcaptureRevReleaseRow, - )> = webcapture_rev::table - .inner_join(webcapture_ident::table) - .inner_join(webcapture_rev_release::table) - .filter(webcapture_rev_release::target_release_ident_id.eq(&ident.to_uuid())) - .filter(webcapture_ident::is_live.eq(true)) - .filter(webcapture_ident::redirect_id.is_null()) - .load(conn)?; - - rows.into_iter() - .map(|(rev, ident, _)| WebcaptureEntity::db_from_row(conn, rev, Some(ident), hide_flags)) - .collect() -} - -impl Server { - pub fn lookup_container_handler( - &self, - issnl: &Option, - wikidata_qid: &Option, - expand_flags: ExpandFlags, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result { - let (ident, rev): (ContainerIdentRow, ContainerRevRow) = match (issnl, wikidata_qid) { - (Some(issnl), None) => { - check_issn(issnl)?; - container_ident::table - .inner_join(container_rev::table) - .filter(container_rev::issnl.eq(&issnl)) - .filter(container_ident::is_live.eq(true)) - .filter(container_ident::redirect_id.is_null()) - .first(conn)? - } - (None, Some(wikidata_qid)) => { - check_wikidata_qid(wikidata_qid)?; - container_ident::table - .inner_join(container_rev::table) - .filter(container_rev::wikidata_qid.eq(&wikidata_qid)) - .filter(container_ident::is_live.eq(true)) - .filter(container_ident::redirect_id.is_null()) - .first(conn)? - } - _ => { - return Err(ErrorKind::MissingOrMultipleExternalId("in lookup".to_string()).into()); - } - }; - - let mut entity = ContainerEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; - entity.db_expand(&conn, expand_flags)?; - Ok(entity) - } - - pub fn lookup_creator_handler( - &self, - orcid: &Option, - wikidata_qid: &Option, - expand_flags: ExpandFlags, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result { - let (ident, rev): (CreatorIdentRow, CreatorRevRow) = match (orcid, wikidata_qid) { - (Some(orcid), None) => { - check_orcid(orcid)?; - creator_ident::table - .inner_join(creator_rev::table) - .filter(creator_rev::orcid.eq(orcid)) - .filter(creator_ident::is_live.eq(true)) - .filter(creator_ident::redirect_id.is_null()) - .first(conn)? - } - (None, Some(wikidata_qid)) => { - check_wikidata_qid(wikidata_qid)?; - creator_ident::table - .inner_join(creator_rev::table) - .filter(creator_rev::wikidata_qid.eq(wikidata_qid)) - .filter(creator_ident::is_live.eq(true)) - .filter(creator_ident::redirect_id.is_null()) - .first(conn)? - } - _ => { - return Err(ErrorKind::MissingOrMultipleExternalId("in lookup".to_string()).into()); - } - }; - - let mut entity = CreatorEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; - entity.db_expand(&conn, expand_flags)?; - Ok(entity) - } - - pub fn get_creator_releases_handler( - &self, - ident: FatCatId, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result> { - // TODO: some kind of unique or group-by? - let rows: Vec<(ReleaseRevRow, ReleaseIdentRow, ReleaseContribRow)> = release_rev::table - .inner_join(release_ident::table) - .inner_join(release_contrib::table) - .filter(release_contrib::creator_ident_id.eq(&ident.to_uuid())) - .filter(release_ident::is_live.eq(true)) - .filter(release_ident::redirect_id.is_null()) - .load(conn)?; - - // TODO: from_rows, not from_row? - rows.into_iter() - .map(|(rev, ident, _)| ReleaseEntity::db_from_row(conn, rev, Some(ident), hide_flags)) - .collect() - } - - pub fn lookup_file_handler( - &self, - md5: &Option, - sha1: &Option, - sha256: &Option, - expand_flags: ExpandFlags, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result { - let (ident, rev): (FileIdentRow, FileRevRow) = match (md5, sha1, sha256) { - (Some(md5), None, None) => { - check_md5(md5)?; - file_ident::table - .inner_join(file_rev::table) - .filter(file_rev::md5.eq(md5)) - .filter(file_ident::is_live.eq(true)) - .filter(file_ident::redirect_id.is_null()) - .first(conn)? - } - (None, Some(sha1), None) => { - check_sha1(sha1)?; - file_ident::table - .inner_join(file_rev::table) - .filter(file_rev::sha1.eq(sha1)) - .filter(file_ident::is_live.eq(true)) - .filter(file_ident::redirect_id.is_null()) - .first(conn)? - } - (None, None, Some(sha256)) => { - check_sha256(sha256)?; - file_ident::table - .inner_join(file_rev::table) - .filter(file_rev::sha256.eq(sha256)) - .filter(file_ident::is_live.eq(true)) - .filter(file_ident::redirect_id.is_null()) - .first(conn)? - } - _ => { - return Err(ErrorKind::MissingOrMultipleExternalId("in lookup".to_string()).into()); - } - }; - - let mut entity = FileEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; - entity.db_expand(&conn, expand_flags)?; - Ok(entity) - } - - pub fn lookup_release_handler( - &self, - doi: &Option, - wikidata_qid: &Option, - isbn13: &Option, - pmid: &Option, - pmcid: &Option, - core_id: &Option, - expand_flags: ExpandFlags, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result { - let (ident, rev): (ReleaseIdentRow, ReleaseRevRow) = - match (doi, wikidata_qid, isbn13, pmid, pmcid, core_id) { - (Some(doi), None, None, None, None, None) => { - check_doi(doi)?; - release_ident::table - .inner_join(release_rev::table) - .filter(release_rev::doi.eq(doi)) - .filter(release_ident::is_live.eq(true)) - .filter(release_ident::redirect_id.is_null()) - .first(conn)? - } - (None, Some(wikidata_qid), None, None, None, None) => { - check_wikidata_qid(wikidata_qid)?; - release_ident::table - .inner_join(release_rev::table) - .filter(release_rev::wikidata_qid.eq(wikidata_qid)) - .filter(release_ident::is_live.eq(true)) - .filter(release_ident::redirect_id.is_null()) - .first(conn)? - } - (None, None, Some(isbn13), None, None, None) => { - // TODO: check_isbn13(isbn13)?; - release_ident::table - .inner_join(release_rev::table) - .filter(release_rev::isbn13.eq(isbn13)) - .filter(release_ident::is_live.eq(true)) - .filter(release_ident::redirect_id.is_null()) - .first(conn)? - } - (None, None, None, Some(pmid), None, None) => { - check_pmid(pmid)?; - release_ident::table - .inner_join(release_rev::table) - .filter(release_rev::pmid.eq(pmid)) - .filter(release_ident::is_live.eq(true)) - .filter(release_ident::redirect_id.is_null()) - .first(conn)? - } - (None, None, None, None, Some(pmcid), None) => { - check_pmcid(pmcid)?; - release_ident::table - .inner_join(release_rev::table) - .filter(release_rev::pmcid.eq(pmcid)) - .filter(release_ident::is_live.eq(true)) - .filter(release_ident::redirect_id.is_null()) - .first(conn)? - } - (None, None, None, None, None, Some(core_id)) => { - // TODO: check_core_id(core_id)?; - release_ident::table - .inner_join(release_rev::table) - .filter(release_rev::core_id.eq(core_id)) - .filter(release_ident::is_live.eq(true)) - .filter(release_ident::redirect_id.is_null()) - .first(conn)? - } - _ => { - return Err( - ErrorKind::MissingOrMultipleExternalId("in lookup".to_string()).into(), - ); - } - }; - - let mut entity = ReleaseEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; - entity.db_expand(&conn, expand_flags)?; - Ok(entity) - } - - pub fn get_release_files_handler( - &self, - ident: FatCatId, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result> { - get_release_files(ident, hide_flags, conn) - } - - pub fn get_release_filesets_handler( - &self, - ident: FatCatId, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result> { - get_release_filesets(ident, hide_flags, conn) - } - - pub fn get_release_webcaptures_handler( - &self, - ident: FatCatId, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result> { - get_release_webcaptures(ident, hide_flags, conn) - } - - pub fn get_work_releases_handler( - &self, - ident: FatCatId, - hide_flags: HideFlags, - conn: &DbConn, - ) -> Result> { - let rows: Vec<(ReleaseRevRow, ReleaseIdentRow)> = release_rev::table - .inner_join(release_ident::table) - .filter(release_rev::work_ident_id.eq(&ident.to_uuid())) - .filter(release_ident::is_live.eq(true)) - .filter(release_ident::redirect_id.is_null()) - .load(conn)?; - - rows.into_iter() - .map(|(rev, ident)| ReleaseEntity::db_from_row(conn, rev, Some(ident), hide_flags)) - .collect() - } - - pub fn accept_editgroup_handler(&self, editgroup_id: FatCatId, conn: &DbConn) -> Result<()> { - accept_editgroup(editgroup_id, conn)?; - Ok(()) - } - - pub fn create_editgroup_handler( - &self, - entity: models::Editgroup, - conn: &DbConn, - ) -> Result { - let row: EditgroupRow = insert_into(editgroup::table) - .values(( - editgroup::editor_id.eq(FatCatId::from_str(&entity.editor_id.unwrap())?.to_uuid()), - editgroup::description.eq(entity.description), - editgroup::extra_json.eq(entity.extra), - )) - .get_result(conn)?; - - Ok(Editgroup { - editgroup_id: Some(uuid2fcid(&row.id)), - editor_id: Some(uuid2fcid(&row.editor_id)), - description: row.description, - edits: None, - extra: row.extra_json, - }) - } - - pub fn get_editgroup_handler( - &self, - editgroup_id: FatCatId, - conn: &DbConn, - ) -> Result { - let row: EditgroupRow = editgroup::table.find(editgroup_id.to_uuid()).first(conn)?; - - let edits = EditgroupEdits { - containers: Some( - container_edit::table - .filter(container_edit::editgroup_id.eq(editgroup_id.to_uuid())) - .get_results(conn)? - .into_iter() - .map(|e: ContainerEditRow| e.into_model().unwrap()) - .collect(), - ), - creators: Some( - creator_edit::table - .filter(creator_edit::editgroup_id.eq(editgroup_id.to_uuid())) - .get_results(conn)? - .into_iter() - .map(|e: CreatorEditRow| e.into_model().unwrap()) - .collect(), - ), - files: Some( - file_edit::table - .filter(file_edit::editgroup_id.eq(editgroup_id.to_uuid())) - .get_results(conn)? - .into_iter() - .map(|e: FileEditRow| e.into_model().unwrap()) - .collect(), - ), - filesets: Some( - fileset_edit::table - .filter(fileset_edit::editgroup_id.eq(editgroup_id.to_uuid())) - .get_results(conn)? - .into_iter() - .map(|e: FilesetEditRow| e.into_model().unwrap()) - .collect(), - ), - webcaptures: Some( - webcapture_edit::table - .filter(webcapture_edit::editgroup_id.eq(editgroup_id.to_uuid())) - .get_results(conn)? - .into_iter() - .map(|e: WebcaptureEditRow| e.into_model().unwrap()) - .collect(), - ), - releases: Some( - release_edit::table - .filter(release_edit::editgroup_id.eq(editgroup_id.to_uuid())) - .get_results(conn)? - .into_iter() - .map(|e: ReleaseEditRow| e.into_model().unwrap()) - .collect(), - ), - works: Some( - work_edit::table - .filter(work_edit::editgroup_id.eq(editgroup_id.to_uuid())) - .get_results(conn)? - .into_iter() - .map(|e: WorkEditRow| e.into_model().unwrap()) - .collect(), - ), - }; - - let eg = Editgroup { - editgroup_id: Some(uuid2fcid(&row.id)), - editor_id: Some(uuid2fcid(&row.editor_id)), - description: row.description, - edits: Some(edits), - extra: row.extra_json, - }; - Ok(eg) - } - - pub fn get_editor_handler(&self, editor_id: FatCatId, conn: &DbConn) -> Result { - let row: EditorRow = editor::table.find(editor_id.to_uuid()).first(conn)?; - Ok(row.into_model()) - } - - pub fn get_editor_changelog_handler( - &self, - editor_id: FatCatId, - conn: &DbConn, - ) -> Result> { - // TODO: single query - let editor: EditorRow = editor::table.find(editor_id.to_uuid()).first(conn)?; - let changes: Vec<(ChangelogRow, EditgroupRow)> = changelog::table - .inner_join(editgroup::table) - .filter(editgroup::editor_id.eq(editor.id)) - .load(conn)?; - - let entries = changes - .into_iter() - .map(|(cl_row, eg_row)| ChangelogEntry { - index: cl_row.id, - editgroup: Some(eg_row.into_model_partial()), - editgroup_id: uuid2fcid(&cl_row.editgroup_id), - timestamp: chrono::DateTime::from_utc(cl_row.timestamp, chrono::Utc), - }) - .collect(); - Ok(entries) - } - - pub fn get_changelog_handler( - &self, - limit: Option, - conn: &DbConn, - ) -> Result> { - let limit = limit.unwrap_or(50); - - let changes: Vec<(ChangelogRow, EditgroupRow)> = changelog::table - .inner_join(editgroup::table) - .order(changelog::id.desc()) - .limit(limit) - .load(conn)?; - - let entries = changes - .into_iter() - .map(|(cl_row, eg_row)| ChangelogEntry { - index: cl_row.id, - editgroup: Some(eg_row.into_model_partial()), - editgroup_id: uuid2fcid(&cl_row.editgroup_id), - timestamp: chrono::DateTime::from_utc(cl_row.timestamp, chrono::Utc), - }) - .collect(); - Ok(entries) - } - - pub fn get_changelog_entry_handler(&self, index: i64, conn: &DbConn) -> Result { - let cl_row: ChangelogRow = changelog::table.find(index).first(conn)?; - let editgroup = - self.get_editgroup_handler(FatCatId::from_uuid(&cl_row.editgroup_id), conn)?; - - let mut entry = cl_row.into_model(); - entry.editgroup = Some(editgroup); - Ok(entry) - } - - /// This helper either finds an Editor model by OIDC parameters (eg, remote domain and - /// identifier), or creates one and inserts the appropriate auth rows. The semantics are - /// basically an "upsert" of signup/account-creation. - /// Returns an editor model and boolean flag indicating whether a new editor was created or - /// not. - /// If this function creates an editor, it sets the username to - /// "{preferred_username}-{provider}"; the intent is for this to be temporary but unique. Might - /// look like "bnewbold-github", or might look like "895139824-github". This is a hack to make - /// check/creation idempotent. - pub fn auth_oidc_handler(&self, params: AuthOidc, conn: &DbConn) -> Result<(Editor, bool)> { - let existing: Vec<(EditorRow, AuthOidcRow)> = editor::table - .inner_join(auth_oidc::table) - .filter(auth_oidc::oidc_sub.eq(params.sub.clone())) - .filter(auth_oidc::oidc_iss.eq(params.iss.clone())) - .load(conn)?; - - let (editor_row, created): (EditorRow, bool) = match existing.first() { - Some((editor, _)) => (editor.clone(), false), - None => { - let username = format!("{}-{}", params.preferred_username, params.provider); - let editor = create_editor(conn, username, false, false)?; - // create an auth login row so the user can log back in - diesel::insert_into(auth_oidc::table) - .values(( - auth_oidc::editor_id.eq(editor.id), - auth_oidc::provider.eq(params.provider), - auth_oidc::oidc_iss.eq(params.iss), - auth_oidc::oidc_sub.eq(params.sub), - )) - .execute(conn)?; - (editor, true) - } - }; - - Ok((editor_row.into_model(), created)) - } - - entity_batch_handler!(create_container_batch_handler, ContainerEntity); - entity_batch_handler!(create_creator_batch_handler, CreatorEntity); - entity_batch_handler!(create_file_batch_handler, FileEntity); - entity_batch_handler!(create_fileset_batch_handler, FilesetEntity); - entity_batch_handler!(create_webcapture_batch_handler, WebcaptureEntity); - entity_batch_handler!(create_release_batch_handler, ReleaseEntity); - entity_batch_handler!(create_work_batch_handler, WorkEntity); -} diff --git a/rust/src/api_wrappers.rs b/rust/src/api_wrappers.rs deleted file mode 100644 index 69bdd88e..00000000 --- a/rust/src/api_wrappers.rs +++ /dev/null @@ -1,1293 +0,0 @@ -//! API endpoint handlers - -use crate::api_entity_crud::EntityCrud; -use crate::api_helpers::*; -use crate::api_server::Server; -use crate::auth::*; -use crate::database_models::EntityEditRow; -use diesel::Connection; -use crate::errors::*; -use fatcat_api_spec::models; -use fatcat_api_spec::models::*; -use fatcat_api_spec::*; -use futures::{self, Future}; -use std::str::FromStr; -use uuid::Uuid; - -/// Helper for generating wrappers (which return "Box::new(futures::done(Ok(BLAH)))" like the -/// codegen fatcat-api-spec code wants) that call through to actual helpers (which have simple -/// Result<> return types) -macro_rules! wrap_entity_handlers { - // Would much rather just have entity ident, then generate the other fields from that, but Rust - // stable doesn't have a mechanism to "concat" or generate new identifiers in macros, at least - // in the context of defining new functions. - // The only stable approach I know of would be: https://github.com/dtolnay/mashup - ($get_fn:ident, $get_resp:ident, $post_fn:ident, $post_resp:ident, $post_batch_fn:ident, - $post_batch_handler:ident, $post_batch_resp:ident, $update_fn:ident, $update_resp:ident, - $delete_fn:ident, $delete_resp:ident, $get_history_fn:ident, $get_history_resp:ident, - $get_edit_fn:ident, $get_edit_resp:ident, $delete_edit_fn:ident, $delete_edit_resp:ident, - $get_rev_fn:ident, $get_rev_resp:ident, $get_redirects_fn:ident, $get_redirects_resp:ident, - $model:ident) => { - - fn $get_fn( - &self, - ident: String, - expand: Option, - hide: Option, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match (|| { - let entity_id = FatCatId::from_str(&ident)?; - let hide_flags = match hide { - None => HideFlags::none(), - Some(param) => HideFlags::from_str(¶m)?, - }; - match expand { - None => $model::db_get(&conn, entity_id, hide_flags), - Some(param) => { - let expand_flags = ExpandFlags::from_str(¶m)?; - let mut entity = $model::db_get(&conn, entity_id, hide_flags)?; - entity.db_expand(&conn, expand_flags)?; - Ok(entity) - }, - } - })() { - Ok(entity) => - $get_resp::FoundEntity(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $get_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), - Err(Error(ErrorKind::Uuid(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => - $get_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string() }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $get_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $post_fn( - &self, - entity: models::$model, - editgroup_id: Option, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($post_fn)))?; - auth_context.require_role(FatcatRole::Editor)?; - let editgroup_id = if let Some(s) = editgroup_id { - let eg_id = FatCatId::from_str(&s)?; - auth_context.require_editgroup(&conn, eg_id)?; - Some(eg_id) - } else { None }; - let edit_context = make_edit_context(&conn, auth_context.editor_id, editgroup_id, false)?; - edit_context.check(&conn)?; - entity.db_create(&conn, &edit_context)?.into_model() - }) { - Ok(edit) => - $post_resp::CreatedEntity(edit), - Err(Error(ErrorKind::Diesel(e), _)) => - $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::Uuid(e), _)) => - $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => - $post_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string() }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::MalformedChecksum(e), _)) => - $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => - $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => - $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidCredentials(e), _)) => - // TODO: why can't I NotAuthorized here? - $post_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => - $post_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $post_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $post_batch_fn( - &self, - entity_list: &Vec, - autoaccept: Option, - editgroup_id: Option, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($post_batch_fn)))?; - auth_context.require_role(FatcatRole::Editor)?; - let editgroup_id = if let Some(s) = editgroup_id { - let eg_id = FatCatId::from_str(&s)?; - auth_context.require_editgroup(&conn, eg_id)?; - Some(eg_id) - } else { None }; - self.$post_batch_handler(entity_list, autoaccept.unwrap_or(false), auth_context.editor_id, editgroup_id, &conn) - }) { - Ok(edit) => - $post_batch_resp::CreatedEntities(edit), - Err(Error(ErrorKind::Diesel(e), _)) => - $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::Uuid(e), _)) => - $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => - $post_batch_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string() }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::MalformedChecksum(e), _)) => - $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => - $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => - $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidCredentials(e), _)) => - // TODO: why can't I NotAuthorized here? - $post_batch_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => - $post_batch_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $post_batch_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $update_fn( - &self, - ident: String, - entity: models::$model, - editgroup_id: Option, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($update_fn)))?; - auth_context.require_role(FatcatRole::Editor)?; - let entity_id = FatCatId::from_str(&ident)?; - let editgroup_id = if let Some(s) = editgroup_id { - let eg_id = FatCatId::from_str(&s)?; - auth_context.require_editgroup(&conn, eg_id)?; - Some(eg_id) - } else { None }; - let edit_context = make_edit_context(&conn, auth_context.editor_id, editgroup_id, false)?; - edit_context.check(&conn)?; - entity.db_update(&conn, &edit_context, entity_id)?.into_model() - }) { - Ok(edit) => - $update_resp::UpdatedEntity(edit), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $update_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), - Err(Error(ErrorKind::Diesel(e), _)) => - $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::Uuid(e), _)) => - $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => - $update_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string() }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::MalformedChecksum(e), _)) => - $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => - $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => - $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidEntityStateTransform(e), _)) => - $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidCredentials(e), _)) => - // TODO: why can't I NotAuthorized here? - $update_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => - $update_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $update_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $delete_fn( - &self, - ident: String, - editgroup_id: Option, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($delete_fn)))?; - auth_context.require_role(FatcatRole::Editor)?; - let entity_id = FatCatId::from_str(&ident)?; - let editgroup_id: Option = match editgroup_id { - Some(s) => { - let editgroup_id = FatCatId::from_str(&s)?; - auth_context.require_editgroup(&conn, editgroup_id)?; - Some(editgroup_id) - }, - None => None, - }; - let edit_context = make_edit_context(&conn, auth_context.editor_id, editgroup_id, false)?; - edit_context.check(&conn)?; - $model::db_delete(&conn, &edit_context, entity_id)?.into_model() - }) { - Ok(edit) => - $delete_resp::DeletedEntity(edit), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $delete_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), - Err(Error(ErrorKind::Diesel(e), _)) => - $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::Uuid(e), _)) => - $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => - $delete_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string() }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => - $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidEntityStateTransform(e), _)) => - $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidCredentials(e), _)) => - // TODO: why can't I NotAuthorized here? - $delete_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => - $delete_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $delete_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $get_history_fn( - &self, - ident: String, - limit: Option, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET? - let ret = match (|| { - let entity_id = FatCatId::from_str(&ident)?; - $model::db_get_history(&conn, entity_id, limit) - })() { - Ok(history) => - $get_history_resp::FoundEntityHistory(history), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $get_history_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), - Err(Error(ErrorKind::Uuid(e), _)) => - $get_history_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => - $get_history_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $get_history_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $get_history_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $get_rev_fn( - &self, - rev_id: String, - expand: Option, - hide: Option, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET? - let ret = match (|| { - let rev_id = Uuid::from_str(&rev_id)?; - let hide_flags = match hide { - None => HideFlags::none(), - Some(param) => HideFlags::from_str(¶m)?, - }; - match expand { - None => $model::db_get_rev(&conn, rev_id, hide_flags), - Some(param) => { - let expand_flags = ExpandFlags::from_str(¶m)?; - let mut entity = $model::db_get_rev(&conn, rev_id, hide_flags)?; - entity.db_expand(&conn, expand_flags)?; - Ok(entity) - }, - } - })() { - Ok(entity) => - $get_rev_resp::FoundEntityRevision(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $get_rev_resp::NotFound(ErrorResponse { message: format!("No such entity revision {}: {}", stringify!($model), rev_id) }), - Err(Error(ErrorKind::Uuid(e), _)) => - $get_rev_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $get_rev_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $get_rev_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $get_rev_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $get_edit_fn( - &self, - edit_id: String, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET? - let ret = match (|| { - let edit_id = Uuid::from_str(&edit_id)?; - $model::db_get_edit(&conn, edit_id)?.into_model() - })() { - Ok(edit) => - $get_edit_resp::FoundEdit(edit), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $get_edit_resp::NotFound(ErrorResponse { message: format!("No such {} entity edit: {}", stringify!($model), edit_id) }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $get_edit_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $get_edit_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $delete_edit_fn( - &self, - edit_id: String, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let edit_id = Uuid::from_str(&edit_id)?; - let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($delete_edit_fn)))?; - auth_context.require_role(FatcatRole::Editor)?; - let edit = $model::db_get_edit(&conn, edit_id)?; - auth_context.require_editgroup(&conn, FatCatId::from_uuid(&edit.editgroup_id))?; - $model::db_delete_edit(&conn, edit_id) - }) { - Ok(()) => - $delete_edit_resp::DeletedEdit(Success { message: format!("Successfully deleted work-in-progress {} edit: {}", stringify!($model), edit_id) } ), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $delete_edit_resp::NotFound(ErrorResponse { message: format!("No such {} edit: {}", stringify!($model), edit_id) }), - Err(Error(ErrorKind::Diesel(e), _)) => - $delete_edit_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => - $delete_edit_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $delete_edit_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidCredentials(e), _)) => - // TODO: why can't I NotAuthorized here? - $delete_edit_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => - $delete_edit_resp::Forbidden(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $delete_edit_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - fn $get_redirects_fn( - &self, - ident: String, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET? - let ret = match (|| { - let entity_id = FatCatId::from_str(&ident)?; - let redirects: Vec = $model::db_get_redirects(&conn, entity_id)?; - Ok(redirects.into_iter().map(|fcid| fcid.to_string()).collect()) - })() { - Ok(redirects) => - $get_redirects_resp::FoundEntityRedirects(redirects), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $get_redirects_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), - Err(Error(ErrorKind::Uuid(e), _)) => - $get_redirects_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => - $get_redirects_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $get_redirects_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $get_redirects_resp::GenericError(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - - } -} - -macro_rules! wrap_lookup_handler { - ($get_fn:ident, $get_handler:ident, $get_resp:ident, $idname:ident) => { - fn $get_fn( - &self, - $idname: Option, - wikidata_qid: Option, - expand: Option, - hide: Option, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let expand_flags = match expand { - None => ExpandFlags::none(), - Some(param) => ExpandFlags::from_str(¶m).unwrap(), - }; - let hide_flags = match hide { - None => HideFlags::none(), - Some(param) => HideFlags::from_str(¶m).unwrap(), - }; - // No transaction for GET - let ret = match self.$get_handler(&$idname, &wikidata_qid, expand_flags, hide_flags, &conn) { - Ok(entity) => - $get_resp::FoundEntity(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $get_resp::NotFound(ErrorResponse { message: format!("Not found: {:?} / {:?}", $idname, wikidata_qid) }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::MalformedChecksum(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::MissingOrMultipleExternalId(e), _)) => { - $get_resp::BadRequest(ErrorResponse { message: e.to_string(), }) }, - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - } -} - -macro_rules! wrap_fcid_handler { - ($get_fn:ident, $get_handler:ident, $get_resp:ident) => { - fn $get_fn( - &self, - id: String, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match (|| { - let fcid = FatCatId::from_str(&id)?; - self.$get_handler(fcid, &conn) - })() { - Ok(entity) => - $get_resp::Found(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $get_resp::NotFound(ErrorResponse { message: format!("Not found: {}", id) }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - } -} - -macro_rules! wrap_fcid_hide_handler { - ($get_fn:ident, $get_handler:ident, $get_resp:ident) => { - fn $get_fn( - &self, - id: String, - hide: Option, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match (|| { - let fcid = FatCatId::from_str(&id)?; - let hide_flags = match hide { - None => HideFlags::none(), - Some(param) => HideFlags::from_str(¶m)?, - }; - self.$get_handler(fcid, hide_flags, &conn) - })() { - Ok(entity) => - $get_resp::Found(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => - $get_resp::NotFound(ErrorResponse { message: format!("Not found: {}", id) }), - Err(Error(ErrorKind::MalformedExternalId(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(Error(ErrorKind::OtherBadRequest(e), _)) => - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), - Err(e) => { - error!("{}", e); - $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) - }, - }; - Box::new(futures::done(Ok(ret))) - } - } -} - -impl Api for Server { - wrap_entity_handlers!( - get_container, - GetContainerResponse, - create_container, - CreateContainerResponse, - create_container_batch, - create_container_batch_handler, - CreateContainerBatchResponse, - update_container, - UpdateContainerResponse, - delete_container, - DeleteContainerResponse, - get_container_history, - GetContainerHistoryResponse, - get_container_edit, - GetContainerEditResponse, - delete_container_edit, - DeleteContainerEditResponse, - get_container_revision, - GetContainerRevisionResponse, - get_container_redirects, - GetContainerRedirectsResponse, - ContainerEntity - ); - - wrap_entity_handlers!( - get_creator, - GetCreatorResponse, - create_creator, - CreateCreatorResponse, - create_creator_batch, - create_creator_batch_handler, - CreateCreatorBatchResponse, - update_creator, - UpdateCreatorResponse, - delete_creator, - DeleteCreatorResponse, - get_creator_history, - GetCreatorHistoryResponse, - get_creator_edit, - GetCreatorEditResponse, - delete_creator_edit, - DeleteCreatorEditResponse, - get_creator_revision, - GetCreatorRevisionResponse, - get_creator_redirects, - GetCreatorRedirectsResponse, - CreatorEntity - ); - wrap_entity_handlers!( - get_file, - GetFileResponse, - create_file, - CreateFileResponse, - create_file_batch, - create_file_batch_handler, - CreateFileBatchResponse, - update_file, - UpdateFileResponse, - delete_file, - DeleteFileResponse, - get_file_history, - GetFileHistoryResponse, - get_file_edit, - GetFileEditResponse, - delete_file_edit, - DeleteFileEditResponse, - get_file_revision, - GetFileRevisionResponse, - get_file_redirects, - GetFileRedirectsResponse, - FileEntity - ); - wrap_entity_handlers!( - get_fileset, - GetFilesetResponse, - create_fileset, - CreateFilesetResponse, - create_fileset_batch, - create_fileset_batch_handler, - CreateFilesetBatchResponse, - update_fileset, - UpdateFilesetResponse, - delete_fileset, - DeleteFilesetResponse, - get_fileset_history, - GetFilesetHistoryResponse, - get_fileset_edit, - GetFilesetEditResponse, - delete_fileset_edit, - DeleteFilesetEditResponse, - get_fileset_revision, - GetFilesetRevisionResponse, - get_fileset_redirects, - GetFilesetRedirectsResponse, - FilesetEntity - ); - wrap_entity_handlers!( - get_webcapture, - GetWebcaptureResponse, - create_webcapture, - CreateWebcaptureResponse, - create_webcapture_batch, - create_webcapture_batch_handler, - CreateWebcaptureBatchResponse, - update_webcapture, - UpdateWebcaptureResponse, - delete_webcapture, - DeleteWebcaptureResponse, - get_webcapture_history, - GetWebcaptureHistoryResponse, - get_webcapture_edit, - GetWebcaptureEditResponse, - delete_webcapture_edit, - DeleteWebcaptureEditResponse, - get_webcapture_revision, - GetWebcaptureRevisionResponse, - get_webcapture_redirects, - GetWebcaptureRedirectsResponse, - WebcaptureEntity - ); - wrap_entity_handlers!( - get_release, - GetReleaseResponse, - create_release, - CreateReleaseResponse, - create_release_batch, - create_release_batch_handler, - CreateReleaseBatchResponse, - update_release, - UpdateReleaseResponse, - delete_release, - DeleteReleaseResponse, - get_release_history, - GetReleaseHistoryResponse, - get_release_edit, - GetReleaseEditResponse, - delete_release_edit, - DeleteReleaseEditResponse, - get_release_revision, - GetReleaseRevisionResponse, - get_release_redirects, - GetReleaseRedirectsResponse, - ReleaseEntity - ); - wrap_entity_handlers!( - get_work, - GetWorkResponse, - create_work, - CreateWorkResponse, - create_work_batch, - create_work_batch_handler, - CreateWorkBatchResponse, - update_work, - UpdateWorkResponse, - delete_work, - DeleteWorkResponse, - get_work_history, - GetWorkHistoryResponse, - get_work_edit, - GetWorkEditResponse, - delete_work_edit, - DeleteWorkEditResponse, - get_work_revision, - GetWorkRevisionResponse, - get_work_redirects, - GetWorkRedirectsResponse, - WorkEntity - ); - - wrap_lookup_handler!( - lookup_container, - lookup_container_handler, - LookupContainerResponse, - issnl - ); - wrap_lookup_handler!( - lookup_creator, - lookup_creator_handler, - LookupCreatorResponse, - orcid - ); - - wrap_fcid_hide_handler!( - get_release_files, - get_release_files_handler, - GetReleaseFilesResponse - ); - wrap_fcid_hide_handler!( - get_release_filesets, - get_release_filesets_handler, - GetReleaseFilesetsResponse - ); - wrap_fcid_hide_handler!( - get_release_webcaptures, - get_release_webcaptures_handler, - GetReleaseWebcapturesResponse - ); - wrap_fcid_hide_handler!( - get_work_releases, - get_work_releases_handler, - GetWorkReleasesResponse - ); - wrap_fcid_hide_handler!( - get_creator_releases, - get_creator_releases_handler, - GetCreatorReleasesResponse - ); - wrap_fcid_handler!(get_editor, get_editor_handler, GetEditorResponse); - wrap_fcid_handler!( - get_editor_changelog, - get_editor_changelog_handler, - GetEditorChangelogResponse - ); - - fn lookup_file( - &self, - md5: Option, - sha1: Option, - sha256: Option, - expand: Option, - hide: Option, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let expand_flags = match expand { - None => ExpandFlags::none(), - Some(param) => ExpandFlags::from_str(¶m).unwrap(), - }; - let hide_flags = match hide { - None => HideFlags::none(), - Some(param) => HideFlags::from_str(¶m).unwrap(), - }; - // No transaction for GET - let ret = - match self.lookup_file_handler(&md5, &sha1, &sha256, expand_flags, hide_flags, &conn) { - Ok(entity) => LookupFileResponse::FoundEntity(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - LookupFileResponse::NotFound(ErrorResponse { - message: format!("Not found: {:?} / {:?} / {:?}", md5, sha1, sha256), - }) - } - Err(Error(ErrorKind::MalformedExternalId(e), _)) => { - LookupFileResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::MalformedChecksum(e), _)) => { - LookupFileResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::MissingOrMultipleExternalId(e), _)) => { - LookupFileResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(e) => { - error!("{}", e); - LookupFileResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn lookup_release( - &self, - doi: Option, - wikidata_qid: Option, - isbn13: Option, - pmid: Option, - pmcid: Option, - core_id: Option, - expand: Option, - hide: Option, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let expand_flags = match expand { - None => ExpandFlags::none(), - Some(param) => ExpandFlags::from_str(¶m).unwrap(), - }; - let hide_flags = match hide { - None => HideFlags::none(), - Some(param) => HideFlags::from_str(¶m).unwrap(), - }; - // No transaction for GET - let ret = match self.lookup_release_handler( - &doi, - &wikidata_qid, - &isbn13, - &pmid, - &pmcid, - &core_id, - expand_flags, - hide_flags, - &conn, - ) { - Ok(entity) => LookupReleaseResponse::FoundEntity(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - LookupReleaseResponse::NotFound(ErrorResponse { - message: format!( - "Not found: {:?} / {:?} / {:?} / {:?} / {:?} / {:?}", - doi, wikidata_qid, isbn13, pmid, pmcid, core_id - ), - }) - } - Err(Error(ErrorKind::MalformedExternalId(e), _)) => { - LookupReleaseResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::MissingOrMultipleExternalId(e), _)) => { - LookupReleaseResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(e) => { - error!("{}", e); - LookupReleaseResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - /// For now, only implements updating username - fn update_editor( - &self, - editor_id: String, - editor: models::Editor, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - if Some(editor_id.clone()) != editor.editor_id { - return Err( - ErrorKind::OtherBadRequest("editor_id doesn't match".to_string()).into(), - ); - } - let auth_context = self.auth_confectionary.require_auth( - &conn, - &context.auth_data, - Some("update_editor"), - )?; - let editor_id = FatCatId::from_str(&editor_id)?; - // DANGER! these permissions are for username updates only! - if editor_id == auth_context.editor_id { - // self edit of username allowed - auth_context.require_role(FatcatRole::Editor)?; - } else { - // admin can update any username - auth_context.require_role(FatcatRole::Admin)?; - }; - update_editor_username(&conn, editor_id, editor.username).map(|e| e.into_model()) - }) { - Ok(editor) => UpdateEditorResponse::UpdatedEditor(editor), - Err(Error(ErrorKind::Diesel(e), _)) => { - UpdateEditorResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::Uuid(e), _)) => UpdateEditorResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => { - UpdateEditorResponse::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string(), - }) - } - Err(Error(ErrorKind::MalformedExternalId(e), _)) => { - UpdateEditorResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::InvalidCredentials(e), _)) => - // TODO: why can't I NotAuthorized here? - { - UpdateEditorResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { - UpdateEditorResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::OtherBadRequest(e), _)) => { - UpdateEditorResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(e) => { - error!("{}", e); - UpdateEditorResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn accept_editgroup( - &self, - editgroup_id: String, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let editgroup_id = FatCatId::from_str(&editgroup_id)?; - let auth_context = self.auth_confectionary.require_auth( - &conn, - &context.auth_data, - Some("accept_editgroup"), - )?; - auth_context.require_role(FatcatRole::Admin)?; - // NOTE: this is currently redundant, but zero-cost - auth_context.require_editgroup(&conn, editgroup_id)?; - self.accept_editgroup_handler(editgroup_id, &conn) - }) { - Ok(()) => AcceptEditgroupResponse::MergedSuccessfully(Success { - message: "horray!".to_string(), - }), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - AcceptEditgroupResponse::NotFound(ErrorResponse { - message: format!("No such editgroup: {}", editgroup_id), - }) - } - Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => { - AcceptEditgroupResponse::BadRequest(ErrorResponse { - message: ErrorKind::EditgroupAlreadyAccepted(e).to_string(), - }) - } - Err(Error(ErrorKind::InvalidCredentials(e), _)) => { - AcceptEditgroupResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { - AcceptEditgroupResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(e) => AcceptEditgroupResponse::GenericError(ErrorResponse { - message: e.to_string(), - }), - }; - Box::new(futures::done(Ok(ret))) - } - - fn get_editgroup( - &self, - editgroup_id: String, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let editgroup_id = FatCatId::from_str(&editgroup_id)?; - self.get_editgroup_handler(editgroup_id, &conn) - }) { - Ok(entity) => GetEditgroupResponse::Found(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - GetEditgroupResponse::NotFound(ErrorResponse { - message: format!("No such editgroup: {}", editgroup_id), - }) - } - Err(e) => - // TODO: dig in to error type here - { - GetEditgroupResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn create_editgroup( - &self, - entity: models::Editgroup, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let auth_context = self.auth_confectionary.require_auth( - &conn, - &context.auth_data, - Some("create_editgroup"), - )?; - auth_context.require_role(FatcatRole::Editor)?; - let mut entity = entity.clone(); - match entity.editor_id.clone() { - Some(editor_id) => { - if !auth_context.has_role(FatcatRole::Admin) { - if editor_id != auth_context.editor_id.to_string() { - bail!("not authorized to create editgroups in others' names"); - } - } - } - None => { - entity.editor_id = Some(auth_context.editor_id.to_string()); - } - }; - self.create_editgroup_handler(entity, &conn) - }) { - Ok(eg) => CreateEditgroupResponse::SuccessfullyCreated(eg), - Err(Error(ErrorKind::InvalidCredentials(e), _)) => { - CreateEditgroupResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { - CreateEditgroupResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(e) => - // TODO: dig in to error type here - { - CreateEditgroupResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn get_changelog( - &self, - limit: Option, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match self.get_changelog_handler(limit, &conn) { - Ok(changelog) => GetChangelogResponse::Success(changelog), - Err(e) => { - error!("{}", e); - GetChangelogResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn get_changelog_entry( - &self, - id: i64, - _context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match self.get_changelog_entry_handler(id, &conn) { - Ok(entry) => GetChangelogEntryResponse::FoundChangelogEntry(entry), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - GetChangelogEntryResponse::NotFound(ErrorResponse { - message: format!("No such changelog entry: {}", id), - }) - } - Err(e) => { - error!("{}", e); - GetChangelogEntryResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn auth_oidc( - &self, - params: models::AuthOidc, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let auth_context = self.auth_confectionary.require_auth( - &conn, - &context.auth_data, - Some("auth_oidc"), - )?; - auth_context.require_role(FatcatRole::Superuser)?; - let (editor, created) = self.auth_oidc_handler(params, &conn)?; - // create an auth token with 31 day duration - let token = self.auth_confectionary.create_token( - FatCatId::from_str(&editor.editor_id.clone().unwrap())?, - Some(chrono::Duration::days(31)), - )?; - let result = AuthOidcResult { editor, token }; - Ok((result, created)) - }) { - Ok((result, true)) => AuthOidcResponse::Created(result), - Ok((result, false)) => AuthOidcResponse::Found(result), - Err(Error(ErrorKind::Diesel(e), _)) => AuthOidcResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }), - Err(Error(ErrorKind::Uuid(e), _)) => AuthOidcResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }), - Err(Error(ErrorKind::InvalidFatcatId(e), _)) => { - AuthOidcResponse::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(e).to_string(), - }) - } - Err(Error(ErrorKind::MalformedExternalId(e), _)) => { - AuthOidcResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::MalformedChecksum(e), _)) => { - AuthOidcResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => { - AuthOidcResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => { - AuthOidcResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::InvalidCredentials(e), _)) => - // TODO: why can't I NotAuthorized here? - { - AuthOidcResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { - AuthOidcResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::OtherBadRequest(e), _)) => { - AuthOidcResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(e) => { - error!("{}", e); - AuthOidcResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn auth_check( - &self, - role: Option, - context: &Context, - ) -> Box + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| { - let auth_context = self.auth_confectionary.require_auth( - &conn, - &context.auth_data, - Some("auth_check"), - )?; - if let Some(role) = role { - let role = match role.to_lowercase().as_ref() { - "superuser" => FatcatRole::Superuser, - "admin" => FatcatRole::Admin, - "editor" => FatcatRole::Editor, - "bot" => FatcatRole::Bot, - "human" => FatcatRole::Human, - "public" => FatcatRole::Public, - _ => bail!("unknown auth role: {}", role), - }; - auth_context.require_role(role)?; - }; - Ok(()) - }) { - Ok(()) => AuthCheckResponse::Success(Success { - message: "auth check successful!".to_string(), - }), - Err(Error(ErrorKind::Diesel(e), _)) => AuthCheckResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }), - Err(Error(ErrorKind::Uuid(e), _)) => AuthCheckResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }), - Err(Error(ErrorKind::InvalidCredentials(e), _)) => - // TODO: why can't I NotAuthorized here? - { - AuthCheckResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { - AuthCheckResponse::Forbidden(ErrorResponse { - message: e.to_string(), - }) - } - Err(Error(ErrorKind::OtherBadRequest(e), _)) => { - AuthCheckResponse::BadRequest(ErrorResponse { - message: e.to_string(), - }) - } - Err(e) => { - error!("{}", e); - AuthCheckResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } -} diff --git a/rust/src/auth.rs b/rust/src/auth.rs index da038b6b..255da8dd 100644 --- a/rust/src/auth.rs +++ b/rust/src/auth.rs @@ -5,14 +5,16 @@ use macaroon::{Format, Macaroon, Verifier}; use std::fmt; use swagger::auth::{AuthData, Authorization, Scopes}; -use crate::api_helpers::*; -use chrono::prelude::*; use crate::database_models::*; use crate::database_schema::*; +use crate::errors::*; +use crate::identifiers::*; +use crate::server::*; +use chrono::prelude::*; use diesel; use diesel::prelude::*; -use crate::errors::*; use std::collections::HashMap; +use std::env; use std::str::FromStr; // 32 bytes max (!) @@ -468,3 +470,26 @@ pub fn print_editors(conn: &DbConn) -> Result<()> { } Ok(()) } + +pub fn env_confectionary() -> Result { + let auth_location = env::var("AUTH_LOCATION").expect("AUTH_LOCATION must be set"); + let auth_key = env::var("AUTH_SECRET_KEY").expect("AUTH_SECRET_KEY must be set"); + let auth_key_ident = env::var("AUTH_KEY_IDENT").expect("AUTH_KEY_IDENT must be set"); + info!("Loaded primary auth key: {}", auth_key_ident); + let mut confectionary = AuthConfectionary::new(auth_location, auth_key_ident, auth_key)?; + match env::var("AUTH_ALT_KEYS") { + Ok(var) => { + for pair in var.split(",") { + let pair: Vec<&str> = pair.split(":").collect(); + if pair.len() != 2 { + println!("{:#?}", pair); + bail!("couldn't parse keypair from AUTH_ALT_KEYS (expected 'ident:key' pairs separated by commas)"); + } + info!("Loading alt auth key: {}", pair[0]); + confectionary.add_keypair(pair[0].to_string(), pair[1].to_string())?; + } + } + Err(_) => (), + } + Ok(confectionary) +} diff --git a/rust/src/bin/fatcat-auth.rs b/rust/src/bin/fatcat-auth.rs index addd2b66..7e2a7c39 100644 --- a/rust/src/bin/fatcat-auth.rs +++ b/rust/src/bin/fatcat-auth.rs @@ -1,32 +1,16 @@ //! JSON Export Helper -//#[macro_use] -extern crate clap; -extern crate diesel; -extern crate dotenv; -#[macro_use] -extern crate error_chain; -extern crate fatcat; -//#[macro_use] -extern crate env_logger; -extern crate log; -extern crate serde_json; -extern crate uuid; - use clap::{App, SubCommand}; -use diesel::prelude::*; -use fatcat::api_helpers::FatCatId; +use fatcat::auth; +use fatcat::editing; use fatcat::errors::*; +use fatcat::identifiers::FatCatId; +use fatcat::server::*; +use std::process; use std::str::FromStr; -//use uuid::Uuid; - -//use error_chain::ChainedError; -//use std::io::{Stdout,StdoutLock}; -//use std::io::prelude::*; -//use std::io::{BufReader, BufWriter}; -fn run() -> Result<()> { +fn main() -> Result<()> { let m = App::new("fatcat-auth") .version(env!("CARGO_PKG_VERSION")) .author("Bryan Newbold ") @@ -84,16 +68,14 @@ fn run() -> Result<()> { } // Then the ones that do - let db_conn = fatcat::database_worker_pool()? - .get() - .expect("database pool"); - let confectionary = fatcat::env_confectionary()?; + let db_conn = database_worker_pool()?.get().expect("database pool"); + let confectionary = auth::env_confectionary()?; match m.subcommand() { ("list-editors", Some(_subm)) => { fatcat::auth::print_editors(&db_conn)?; } ("create-editor", Some(subm)) => { - let editor = fatcat::api_helpers::create_editor( + let editor = editing::create_editor( &db_conn, subm.value_of("username").unwrap().to_string(), subm.is_present("admin"), @@ -104,10 +86,6 @@ fn run() -> Result<()> { } ("create-token", Some(subm)) => { let editor_id = FatCatId::from_str(subm.value_of("editor-id").unwrap())?; - // check that editor exists - let _ed: fatcat::database_models::EditorRow = fatcat::database_schema::editor::table - .find(&editor_id.to_uuid()) - .get_result(&db_conn)?; println!("{}", confectionary.create_token(editor_id, None)?); } ("inspect-token", Some(subm)) => { @@ -125,10 +103,8 @@ fn run() -> Result<()> { _ => { println!("Missing or unimplemented command!"); println!("{}", m.usage()); - ::std::process::exit(-1); + process::exit(-1); } } Ok(()) } - -quick_main!(run); diff --git a/rust/src/bin/fatcat-export.rs b/rust/src/bin/fatcat-export.rs index e1b930fc..889d7dff 100644 --- a/rust/src/bin/fatcat-export.rs +++ b/rust/src/bin/fatcat-export.rs @@ -2,25 +2,17 @@ #[macro_use] extern crate clap; -extern crate diesel; -extern crate dotenv; #[macro_use] extern crate error_chain; -extern crate fatcat; -extern crate fatcat_api_spec; #[macro_use] extern crate log; -extern crate crossbeam_channel; -extern crate env_logger; -extern crate num_cpus; -extern crate serde_json; -extern crate uuid; use clap::{App, Arg}; -use fatcat::api_entity_crud::*; -use fatcat::api_helpers::*; +use fatcat::entity_crud::*; use fatcat::errors::*; +use fatcat::identifiers::*; +use fatcat::server::*; use fatcat_api_spec::models::*; use std::str::FromStr; use uuid::Uuid; @@ -167,7 +159,7 @@ pub fn do_export( entity_type: ExportEntityType, redirects: bool, ) -> Result<()> { - let db_pool = fatcat::database_worker_pool()?; + let db_pool = database_worker_pool()?; let buf_input = BufReader::new(std::io::stdin()); let (row_sender, row_receiver) = channel::bounded(CHANNEL_BUFFER_LEN); let (output_sender, output_receiver) = channel::bounded(CHANNEL_BUFFER_LEN); @@ -232,7 +224,7 @@ pub fn do_export( Ok(()) } -fn run() -> Result<()> { +fn main() -> Result<()> { let m = App::new("fatcat-export") .version(env!("CARGO_PKG_VERSION")) .author("Bryan Newbold ") @@ -273,5 +265,3 @@ fn run() -> Result<()> { m.is_present("include_redirects"), ) } - -quick_main!(run); diff --git a/rust/src/bin/fatcatd.rs b/rust/src/bin/fatcatd.rs index 682f5038..34652105 100644 --- a/rust/src/bin/fatcatd.rs +++ b/rust/src/bin/fatcatd.rs @@ -1,29 +1,35 @@ #![allow(missing_docs)] -extern crate chrono; -extern crate clap; -extern crate diesel; -//extern crate dotenv; -extern crate error_chain; -extern crate fatcat; -extern crate fatcat_api_spec; -extern crate futures; -extern crate iron; -extern crate iron_slog; #[macro_use] extern crate slog; -extern crate slog_async; -extern crate slog_term; +#[macro_use] +extern crate hyper; use clap::{App, Arg}; +use fatcat::errors::*; +use fatcat::server::*; +use iron::middleware::AfterMiddleware; use iron::modifiers::RedirectRaw; use iron::{status, Chain, Iron, IronResult, Request, Response}; use iron_slog::{DefaultLogFormatter, LoggerMiddleware}; use slog::{Drain, Logger}; +// HTTP header middleware +header! { (XClacksOverhead, "X-Clacks-Overhead") => [String] } + +pub struct XClacksOverheadMiddleware; + +impl AfterMiddleware for XClacksOverheadMiddleware { + fn after(&self, _req: &mut Request, mut res: Response) -> iron::IronResult { + res.headers + .set(XClacksOverhead("GNU aaronsw, jpb".to_owned())); + Ok(res) + } +} + /// Create custom server, wire it to the autogenerated router, /// and pass it to the web server. -fn main() { +fn main() -> Result<()> { let matches = App::new("server") .arg( Arg::with_name("https") @@ -38,7 +44,7 @@ fn main() { let logger = Logger::root(drain, o!()); let formatter = DefaultLogFormatter; - let server = fatcat::server().unwrap(); + let server = create_server()?; info!( logger, "using primary auth key: {}", server.auth_confectionary.identifier, @@ -59,7 +65,6 @@ fn main() { router.get("/v0/openapi2.yml", yaml_handler, "openapi2-spec-yaml"); fn root_handler(_: &mut Request) -> IronResult { - //Ok(Response::with((status::Found, Redirect(Url::parse("/swagger-ui").unwrap())))) Ok(Response::with(( status::Found, RedirectRaw("/swagger-ui".to_string()), @@ -92,7 +97,7 @@ fn main() { chain.link_before(fatcat_api_spec::server::ExtractAuthData); chain.link_before(fatcat::auth::MacaroonAuthMiddleware::new()); - chain.link_after(fatcat::XClacksOverheadMiddleware); + chain.link_after(XClacksOverheadMiddleware); if matches.is_present("https") { unimplemented!() @@ -100,6 +105,7 @@ fn main() { // Using HTTP Iron::new(chain) .http(host_port) - .expect("Failed to start HTTP server"); + .expect("failed to start HTTP server"); } + Ok(()) } diff --git a/rust/src/database_models.rs b/rust/src/database_models.rs index ad9aaf29..4575aeaf 100644 --- a/rust/src/database_models.rs +++ b/rust/src/database_models.rs @@ -1,9 +1,9 @@ #![allow(proc_macro_derive_resolution_fallback)] -use crate::api_helpers::uuid2fcid; -use chrono; use crate::database_schema::*; use crate::errors::*; +use crate::identifiers::uuid2fcid; +use chrono; use fatcat_api_spec::models::{ChangelogEntry, Editgroup, Editor, EntityEdit}; use serde_json; use uuid::Uuid; diff --git a/rust/src/editing.rs b/rust/src/editing.rs new file mode 100644 index 00000000..e3777e24 --- /dev/null +++ b/rust/src/editing.rs @@ -0,0 +1,140 @@ +use crate::database_models::*; +use crate::database_schema::*; +use crate::entity_crud::EntityCrud; +use crate::errors::*; +use crate::identifiers::*; +use crate::server::*; +use diesel; +use diesel::prelude::*; +use fatcat_api_spec::models::*; +use uuid::Uuid; + +pub struct EditContext { + pub editor_id: FatCatId, + pub editgroup_id: FatCatId, + pub extra_json: Option, + pub autoaccept: bool, +} + +impl EditContext { + /// This function should always be run within a transaction + pub fn check(&self, conn: &DbConn) -> Result<()> { + let count: i64 = changelog::table + .filter(changelog::editgroup_id.eq(&self.editgroup_id.to_uuid())) + .count() + .get_result(conn)?; + if count > 0 { + return Err(ErrorKind::EditgroupAlreadyAccepted(self.editgroup_id.to_string()).into()); + } + return Ok(()); + } +} + +pub fn make_edit_context( + conn: &DbConn, + editor_id: FatCatId, + editgroup_id: Option, + autoaccept: bool, +) -> Result { + let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { + (Some(eg), _) => eg, + // If autoaccept and no editgroup_id passed, always create a new one for this transaction + (None, true) => { + let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) + .values((editgroup::editor_id.eq(editor_id.to_uuid()),)) + .get_result(conn)?; + FatCatId::from_uuid(&eg_row.id) + } + (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id.to_uuid(), conn)?), + }; + Ok(EditContext { + editor_id: editor_id, + editgroup_id: editgroup_id, + extra_json: None, + autoaccept: autoaccept, + }) +} + +pub fn create_editor( + conn: &DbConn, + username: String, + is_admin: bool, + is_bot: bool, +) -> Result { + check_username(&username)?; + let ed: EditorRow = diesel::insert_into(editor::table) + .values(( + editor::username.eq(username), + editor::is_admin.eq(is_admin), + editor::is_bot.eq(is_bot), + )) + .get_result(conn)?; + Ok(ed) +} + +pub fn update_editor_username( + conn: &DbConn, + editor_id: FatCatId, + username: String, +) -> Result { + check_username(&username)?; + diesel::update(editor::table.find(editor_id.to_uuid())) + .set(editor::username.eq(username)) + .execute(conn)?; + let editor: EditorRow = editor::table.find(editor_id.to_uuid()).get_result(conn)?; + Ok(editor) +} + +/// This function should always be run within a transaction +pub fn get_or_create_editgroup(editor_id: Uuid, conn: &DbConn) -> Result { + // check for current active + let ed_row: EditorRow = editor::table.find(editor_id).first(conn)?; + if let Some(current) = ed_row.active_editgroup_id { + return Ok(current); + } + + // need to insert and update + let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) + .values((editgroup::editor_id.eq(ed_row.id),)) + .get_result(conn)?; + diesel::update(editor::table.find(ed_row.id)) + .set(editor::active_editgroup_id.eq(eg_row.id)) + .execute(conn)?; + Ok(eg_row.id) +} + +/// This function should always be run within a transaction +pub fn accept_editgroup(editgroup_id: FatCatId, conn: &DbConn) -> Result { + // check that we haven't accepted already (in changelog) + // NB: could leave this to a UNIQUE constraint + // TODO: redundant with check_edit_context + let count: i64 = changelog::table + .filter(changelog::editgroup_id.eq(editgroup_id.to_uuid())) + .count() + .get_result(conn)?; + if count > 0 { + return Err(ErrorKind::EditgroupAlreadyAccepted(editgroup_id.to_string()).into()); + } + + // copy edit columns to ident table + ContainerEntity::db_accept_edits(conn, editgroup_id)?; + CreatorEntity::db_accept_edits(conn, editgroup_id)?; + FileEntity::db_accept_edits(conn, editgroup_id)?; + FilesetEntity::db_accept_edits(conn, editgroup_id)?; + WebcaptureEntity::db_accept_edits(conn, editgroup_id)?; + ReleaseEntity::db_accept_edits(conn, editgroup_id)?; + WorkEntity::db_accept_edits(conn, editgroup_id)?; + + // append log/changelog row + let entry: ChangelogRow = diesel::insert_into(changelog::table) + .values((changelog::editgroup_id.eq(editgroup_id.to_uuid()),)) + .get_result(conn)?; + + // update any editor's active editgroup + let no_active: Option = None; + diesel::update(editor::table) + .filter(editor::active_editgroup_id.eq(editgroup_id.to_uuid())) + .set(editor::active_editgroup_id.eq(no_active)) + .execute(conn)?; + Ok(entry) +} diff --git a/rust/src/endpoint_handlers.rs b/rust/src/endpoint_handlers.rs new file mode 100644 index 00000000..d2576d53 --- /dev/null +++ b/rust/src/endpoint_handlers.rs @@ -0,0 +1,582 @@ +//! API endpoint handlers +//! +//! This module contains actual implementations of endpoints with rust-style type signatures. + +use crate::database_models::*; +use crate::database_schema::*; +use crate::editing::*; +use crate::entity_crud::{EntityCrud, ExpandFlags, HideFlags}; +use crate::errors::*; +use crate::identifiers::*; +use crate::server::*; +use chrono; +use diesel::prelude::*; +use diesel::{self, insert_into}; +use fatcat_api_spec::models; +use fatcat_api_spec::models::*; +use std::str::FromStr; + +macro_rules! entity_batch_handler { + ($post_batch_handler:ident, $model:ident) => { + pub fn $post_batch_handler( + &self, + entity_list: &[models::$model], + autoaccept: bool, + editor_id: FatCatId, + editgroup_id: Option, + conn: &DbConn, + ) -> Result> { + + let edit_context = make_edit_context(conn, editor_id, editgroup_id, autoaccept)?; + edit_context.check(&conn)?; + let model_list: Vec<&models::$model> = entity_list.iter().map(|e| e).collect(); + let edits = $model::db_create_batch(conn, &edit_context, model_list.as_slice())?; + + if autoaccept { + let _clr: ChangelogRow = diesel::insert_into(changelog::table) + .values((changelog::editgroup_id.eq(edit_context.editgroup_id.to_uuid()),)) + .get_result(conn)?; + } + edits.into_iter().map(|e| e.into_model()).collect() + } + } +} + +pub fn get_release_files( + ident: FatCatId, + hide_flags: HideFlags, + conn: &DbConn, +) -> Result> { + let rows: Vec<(FileRevRow, FileIdentRow, FileRevReleaseRow)> = file_rev::table + .inner_join(file_ident::table) + .inner_join(file_rev_release::table) + .filter(file_rev_release::target_release_ident_id.eq(&ident.to_uuid())) + .filter(file_ident::is_live.eq(true)) + .filter(file_ident::redirect_id.is_null()) + .load(conn)?; + + rows.into_iter() + .map(|(rev, ident, _)| FileEntity::db_from_row(conn, rev, Some(ident), hide_flags)) + .collect() +} + +pub fn get_release_filesets( + ident: FatCatId, + hide_flags: HideFlags, + conn: &DbConn, +) -> Result> { + let rows: Vec<(FilesetRevRow, FilesetIdentRow, FilesetRevReleaseRow)> = fileset_rev::table + .inner_join(fileset_ident::table) + .inner_join(fileset_rev_release::table) + .filter(fileset_rev_release::target_release_ident_id.eq(&ident.to_uuid())) + .filter(fileset_ident::is_live.eq(true)) + .filter(fileset_ident::redirect_id.is_null()) + .load(conn)?; + + rows.into_iter() + .map(|(rev, ident, _)| FilesetEntity::db_from_row(conn, rev, Some(ident), hide_flags)) + .collect() +} + +pub fn get_release_webcaptures( + ident: FatCatId, + hide_flags: HideFlags, + conn: &DbConn, +) -> Result> { + let rows: Vec<( + WebcaptureRevRow, + WebcaptureIdentRow, + WebcaptureRevReleaseRow, + )> = webcapture_rev::table + .inner_join(webcapture_ident::table) + .inner_join(webcapture_rev_release::table) + .filter(webcapture_rev_release::target_release_ident_id.eq(&ident.to_uuid())) + .filter(webcapture_ident::is_live.eq(true)) + .filter(webcapture_ident::redirect_id.is_null()) + .load(conn)?; + + rows.into_iter() + .map(|(rev, ident, _)| WebcaptureEntity::db_from_row(conn, rev, Some(ident), hide_flags)) + .collect() +} + +impl Server { + pub fn lookup_container_handler( + &self, + issnl: &Option, + wikidata_qid: &Option, + expand_flags: ExpandFlags, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result { + let (ident, rev): (ContainerIdentRow, ContainerRevRow) = match (issnl, wikidata_qid) { + (Some(issnl), None) => { + check_issn(issnl)?; + container_ident::table + .inner_join(container_rev::table) + .filter(container_rev::issnl.eq(&issnl)) + .filter(container_ident::is_live.eq(true)) + .filter(container_ident::redirect_id.is_null()) + .first(conn)? + } + (None, Some(wikidata_qid)) => { + check_wikidata_qid(wikidata_qid)?; + container_ident::table + .inner_join(container_rev::table) + .filter(container_rev::wikidata_qid.eq(&wikidata_qid)) + .filter(container_ident::is_live.eq(true)) + .filter(container_ident::redirect_id.is_null()) + .first(conn)? + } + _ => { + return Err(ErrorKind::MissingOrMultipleExternalId("in lookup".to_string()).into()); + } + }; + + let mut entity = ContainerEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; + entity.db_expand(&conn, expand_flags)?; + Ok(entity) + } + + pub fn lookup_creator_handler( + &self, + orcid: &Option, + wikidata_qid: &Option, + expand_flags: ExpandFlags, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result { + let (ident, rev): (CreatorIdentRow, CreatorRevRow) = match (orcid, wikidata_qid) { + (Some(orcid), None) => { + check_orcid(orcid)?; + creator_ident::table + .inner_join(creator_rev::table) + .filter(creator_rev::orcid.eq(orcid)) + .filter(creator_ident::is_live.eq(true)) + .filter(creator_ident::redirect_id.is_null()) + .first(conn)? + } + (None, Some(wikidata_qid)) => { + check_wikidata_qid(wikidata_qid)?; + creator_ident::table + .inner_join(creator_rev::table) + .filter(creator_rev::wikidata_qid.eq(wikidata_qid)) + .filter(creator_ident::is_live.eq(true)) + .filter(creator_ident::redirect_id.is_null()) + .first(conn)? + } + _ => { + return Err(ErrorKind::MissingOrMultipleExternalId("in lookup".to_string()).into()); + } + }; + + let mut entity = CreatorEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; + entity.db_expand(&conn, expand_flags)?; + Ok(entity) + } + + pub fn get_creator_releases_handler( + &self, + ident: FatCatId, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result> { + // TODO: some kind of unique or group-by? + let rows: Vec<(ReleaseRevRow, ReleaseIdentRow, ReleaseContribRow)> = release_rev::table + .inner_join(release_ident::table) + .inner_join(release_contrib::table) + .filter(release_contrib::creator_ident_id.eq(&ident.to_uuid())) + .filter(release_ident::is_live.eq(true)) + .filter(release_ident::redirect_id.is_null()) + .load(conn)?; + + // TODO: from_rows, not from_row? + rows.into_iter() + .map(|(rev, ident, _)| ReleaseEntity::db_from_row(conn, rev, Some(ident), hide_flags)) + .collect() + } + + pub fn lookup_file_handler( + &self, + md5: &Option, + sha1: &Option, + sha256: &Option, + expand_flags: ExpandFlags, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result { + let (ident, rev): (FileIdentRow, FileRevRow) = match (md5, sha1, sha256) { + (Some(md5), None, None) => { + check_md5(md5)?; + file_ident::table + .inner_join(file_rev::table) + .filter(file_rev::md5.eq(md5)) + .filter(file_ident::is_live.eq(true)) + .filter(file_ident::redirect_id.is_null()) + .first(conn)? + } + (None, Some(sha1), None) => { + check_sha1(sha1)?; + file_ident::table + .inner_join(file_rev::table) + .filter(file_rev::sha1.eq(sha1)) + .filter(file_ident::is_live.eq(true)) + .filter(file_ident::redirect_id.is_null()) + .first(conn)? + } + (None, None, Some(sha256)) => { + check_sha256(sha256)?; + file_ident::table + .inner_join(file_rev::table) + .filter(file_rev::sha256.eq(sha256)) + .filter(file_ident::is_live.eq(true)) + .filter(file_ident::redirect_id.is_null()) + .first(conn)? + } + _ => { + return Err(ErrorKind::MissingOrMultipleExternalId("in lookup".to_string()).into()); + } + }; + + let mut entity = FileEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; + entity.db_expand(&conn, expand_flags)?; + Ok(entity) + } + + pub fn lookup_release_handler( + &self, + doi: &Option, + wikidata_qid: &Option, + isbn13: &Option, + pmid: &Option, + pmcid: &Option, + core_id: &Option, + expand_flags: ExpandFlags, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result { + let (ident, rev): (ReleaseIdentRow, ReleaseRevRow) = + match (doi, wikidata_qid, isbn13, pmid, pmcid, core_id) { + (Some(doi), None, None, None, None, None) => { + check_doi(doi)?; + release_ident::table + .inner_join(release_rev::table) + .filter(release_rev::doi.eq(doi)) + .filter(release_ident::is_live.eq(true)) + .filter(release_ident::redirect_id.is_null()) + .first(conn)? + } + (None, Some(wikidata_qid), None, None, None, None) => { + check_wikidata_qid(wikidata_qid)?; + release_ident::table + .inner_join(release_rev::table) + .filter(release_rev::wikidata_qid.eq(wikidata_qid)) + .filter(release_ident::is_live.eq(true)) + .filter(release_ident::redirect_id.is_null()) + .first(conn)? + } + (None, None, Some(isbn13), None, None, None) => { + // TODO: check_isbn13(isbn13)?; + release_ident::table + .inner_join(release_rev::table) + .filter(release_rev::isbn13.eq(isbn13)) + .filter(release_ident::is_live.eq(true)) + .filter(release_ident::redirect_id.is_null()) + .first(conn)? + } + (None, None, None, Some(pmid), None, None) => { + check_pmid(pmid)?; + release_ident::table + .inner_join(release_rev::table) + .filter(release_rev::pmid.eq(pmid)) + .filter(release_ident::is_live.eq(true)) + .filter(release_ident::redirect_id.is_null()) + .first(conn)? + } + (None, None, None, None, Some(pmcid), None) => { + check_pmcid(pmcid)?; + release_ident::table + .inner_join(release_rev::table) + .filter(release_rev::pmcid.eq(pmcid)) + .filter(release_ident::is_live.eq(true)) + .filter(release_ident::redirect_id.is_null()) + .first(conn)? + } + (None, None, None, None, None, Some(core_id)) => { + // TODO: check_core_id(core_id)?; + release_ident::table + .inner_join(release_rev::table) + .filter(release_rev::core_id.eq(core_id)) + .filter(release_ident::is_live.eq(true)) + .filter(release_ident::redirect_id.is_null()) + .first(conn)? + } + _ => { + return Err( + ErrorKind::MissingOrMultipleExternalId("in lookup".to_string()).into(), + ); + } + }; + + let mut entity = ReleaseEntity::db_from_row(conn, rev, Some(ident), hide_flags)?; + entity.db_expand(&conn, expand_flags)?; + Ok(entity) + } + + pub fn get_release_files_handler( + &self, + ident: FatCatId, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result> { + get_release_files(ident, hide_flags, conn) + } + + pub fn get_release_filesets_handler( + &self, + ident: FatCatId, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result> { + get_release_filesets(ident, hide_flags, conn) + } + + pub fn get_release_webcaptures_handler( + &self, + ident: FatCatId, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result> { + get_release_webcaptures(ident, hide_flags, conn) + } + + pub fn get_work_releases_handler( + &self, + ident: FatCatId, + hide_flags: HideFlags, + conn: &DbConn, + ) -> Result> { + let rows: Vec<(ReleaseRevRow, ReleaseIdentRow)> = release_rev::table + .inner_join(release_ident::table) + .filter(release_rev::work_ident_id.eq(&ident.to_uuid())) + .filter(release_ident::is_live.eq(true)) + .filter(release_ident::redirect_id.is_null()) + .load(conn)?; + + rows.into_iter() + .map(|(rev, ident)| ReleaseEntity::db_from_row(conn, rev, Some(ident), hide_flags)) + .collect() + } + + pub fn accept_editgroup_handler(&self, editgroup_id: FatCatId, conn: &DbConn) -> Result<()> { + accept_editgroup(editgroup_id, conn)?; + Ok(()) + } + + pub fn create_editgroup_handler( + &self, + entity: models::Editgroup, + conn: &DbConn, + ) -> Result { + let row: EditgroupRow = insert_into(editgroup::table) + .values(( + editgroup::editor_id.eq(FatCatId::from_str(&entity.editor_id.unwrap())?.to_uuid()), + editgroup::description.eq(entity.description), + editgroup::extra_json.eq(entity.extra), + )) + .get_result(conn)?; + + Ok(Editgroup { + editgroup_id: Some(uuid2fcid(&row.id)), + editor_id: Some(uuid2fcid(&row.editor_id)), + description: row.description, + edits: None, + extra: row.extra_json, + }) + } + + pub fn get_editgroup_handler( + &self, + editgroup_id: FatCatId, + conn: &DbConn, + ) -> Result { + let row: EditgroupRow = editgroup::table.find(editgroup_id.to_uuid()).first(conn)?; + + let edits = EditgroupEdits { + containers: Some( + container_edit::table + .filter(container_edit::editgroup_id.eq(editgroup_id.to_uuid())) + .get_results(conn)? + .into_iter() + .map(|e: ContainerEditRow| e.into_model().unwrap()) + .collect(), + ), + creators: Some( + creator_edit::table + .filter(creator_edit::editgroup_id.eq(editgroup_id.to_uuid())) + .get_results(conn)? + .into_iter() + .map(|e: CreatorEditRow| e.into_model().unwrap()) + .collect(), + ), + files: Some( + file_edit::table + .filter(file_edit::editgroup_id.eq(editgroup_id.to_uuid())) + .get_results(conn)? + .into_iter() + .map(|e: FileEditRow| e.into_model().unwrap()) + .collect(), + ), + filesets: Some( + fileset_edit::table + .filter(fileset_edit::editgroup_id.eq(editgroup_id.to_uuid())) + .get_results(conn)? + .into_iter() + .map(|e: FilesetEditRow| e.into_model().unwrap()) + .collect(), + ), + webcaptures: Some( + webcapture_edit::table + .filter(webcapture_edit::editgroup_id.eq(editgroup_id.to_uuid())) + .get_results(conn)? + .into_iter() + .map(|e: WebcaptureEditRow| e.into_model().unwrap()) + .collect(), + ), + releases: Some( + release_edit::table + .filter(release_edit::editgroup_id.eq(editgroup_id.to_uuid())) + .get_results(conn)? + .into_iter() + .map(|e: ReleaseEditRow| e.into_model().unwrap()) + .collect(), + ), + works: Some( + work_edit::table + .filter(work_edit::editgroup_id.eq(editgroup_id.to_uuid())) + .get_results(conn)? + .into_iter() + .map(|e: WorkEditRow| e.into_model().unwrap()) + .collect(), + ), + }; + + let eg = Editgroup { + editgroup_id: Some(uuid2fcid(&row.id)), + editor_id: Some(uuid2fcid(&row.editor_id)), + description: row.description, + edits: Some(edits), + extra: row.extra_json, + }; + Ok(eg) + } + + pub fn get_editor_handler(&self, editor_id: FatCatId, conn: &DbConn) -> Result { + let row: EditorRow = editor::table.find(editor_id.to_uuid()).first(conn)?; + Ok(row.into_model()) + } + + pub fn get_editor_changelog_handler( + &self, + editor_id: FatCatId, + conn: &DbConn, + ) -> Result> { + // TODO: single query + let editor: EditorRow = editor::table.find(editor_id.to_uuid()).first(conn)?; + let changes: Vec<(ChangelogRow, EditgroupRow)> = changelog::table + .inner_join(editgroup::table) + .filter(editgroup::editor_id.eq(editor.id)) + .load(conn)?; + + let entries = changes + .into_iter() + .map(|(cl_row, eg_row)| ChangelogEntry { + index: cl_row.id, + editgroup: Some(eg_row.into_model_partial()), + editgroup_id: uuid2fcid(&cl_row.editgroup_id), + timestamp: chrono::DateTime::from_utc(cl_row.timestamp, chrono::Utc), + }) + .collect(); + Ok(entries) + } + + pub fn get_changelog_handler( + &self, + limit: Option, + conn: &DbConn, + ) -> Result> { + let limit = limit.unwrap_or(50); + + let changes: Vec<(ChangelogRow, EditgroupRow)> = changelog::table + .inner_join(editgroup::table) + .order(changelog::id.desc()) + .limit(limit) + .load(conn)?; + + let entries = changes + .into_iter() + .map(|(cl_row, eg_row)| ChangelogEntry { + index: cl_row.id, + editgroup: Some(eg_row.into_model_partial()), + editgroup_id: uuid2fcid(&cl_row.editgroup_id), + timestamp: chrono::DateTime::from_utc(cl_row.timestamp, chrono::Utc), + }) + .collect(); + Ok(entries) + } + + pub fn get_changelog_entry_handler(&self, index: i64, conn: &DbConn) -> Result { + let cl_row: ChangelogRow = changelog::table.find(index).first(conn)?; + let editgroup = + self.get_editgroup_handler(FatCatId::from_uuid(&cl_row.editgroup_id), conn)?; + + let mut entry = cl_row.into_model(); + entry.editgroup = Some(editgroup); + Ok(entry) + } + + /// This helper either finds an Editor model by OIDC parameters (eg, remote domain and + /// identifier), or creates one and inserts the appropriate auth rows. The semantics are + /// basically an "upsert" of signup/account-creation. + /// Returns an editor model and boolean flag indicating whether a new editor was created or + /// not. + /// If this function creates an editor, it sets the username to + /// "{preferred_username}-{provider}"; the intent is for this to be temporary but unique. Might + /// look like "bnewbold-github", or might look like "895139824-github". This is a hack to make + /// check/creation idempotent. + pub fn auth_oidc_handler(&self, params: AuthOidc, conn: &DbConn) -> Result<(Editor, bool)> { + let existing: Vec<(EditorRow, AuthOidcRow)> = editor::table + .inner_join(auth_oidc::table) + .filter(auth_oidc::oidc_sub.eq(params.sub.clone())) + .filter(auth_oidc::oidc_iss.eq(params.iss.clone())) + .load(conn)?; + + let (editor_row, created): (EditorRow, bool) = match existing.first() { + Some((editor, _)) => (editor.clone(), false), + None => { + let username = format!("{}-{}", params.preferred_username, params.provider); + let editor = create_editor(conn, username, false, false)?; + // create an auth login row so the user can log back in + diesel::insert_into(auth_oidc::table) + .values(( + auth_oidc::editor_id.eq(editor.id), + auth_oidc::provider.eq(params.provider), + auth_oidc::oidc_iss.eq(params.iss), + auth_oidc::oidc_sub.eq(params.sub), + )) + .execute(conn)?; + (editor, true) + } + }; + + Ok((editor_row.into_model(), created)) + } + + entity_batch_handler!(create_container_batch_handler, ContainerEntity); + entity_batch_handler!(create_creator_batch_handler, CreatorEntity); + entity_batch_handler!(create_file_batch_handler, FileEntity); + entity_batch_handler!(create_fileset_batch_handler, FilesetEntity); + entity_batch_handler!(create_webcapture_batch_handler, WebcaptureEntity); + entity_batch_handler!(create_release_batch_handler, ReleaseEntity); + entity_batch_handler!(create_work_batch_handler, WorkEntity); +} diff --git a/rust/src/endpoints.rs b/rust/src/endpoints.rs new file mode 100644 index 00000000..91db1027 --- /dev/null +++ b/rust/src/endpoints.rs @@ -0,0 +1,1298 @@ +//! API server endpoint request/response wrappers +//! +//! These mostly deal with type conversion between internal function signatures and API-defined +//! response types (mapping to HTTP statuses. Some contain actual endpoint implementations, but +//! most implementation lives in the server module. + +use crate::auth::*; +use crate::database_models::EntityEditRow; +use crate::editing::*; +use crate::entity_crud::{EntityCrud, ExpandFlags, HideFlags}; +use crate::errors::*; +use crate::identifiers::*; +use crate::server::*; +use diesel::Connection; +use fatcat_api_spec::models; +use fatcat_api_spec::models::*; +use fatcat_api_spec::*; +use futures::{self, Future}; +use std::str::FromStr; +use uuid::Uuid; + +/// Helper for generating wrappers (which return "Box::new(futures::done(Ok(BLAH)))" like the +/// codegen fatcat-api-spec code wants) that call through to actual helpers (which have simple +/// Result<> return types) +macro_rules! wrap_entity_handlers { + // Would much rather just have entity ident, then generate the other fields from that, but Rust + // stable doesn't have a mechanism to "concat" or generate new identifiers in macros, at least + // in the context of defining new functions. + // The only stable approach I know of would be: https://github.com/dtolnay/mashup + ($get_fn:ident, $get_resp:ident, $post_fn:ident, $post_resp:ident, $post_batch_fn:ident, + $post_batch_handler:ident, $post_batch_resp:ident, $update_fn:ident, $update_resp:ident, + $delete_fn:ident, $delete_resp:ident, $get_history_fn:ident, $get_history_resp:ident, + $get_edit_fn:ident, $get_edit_resp:ident, $delete_edit_fn:ident, $delete_edit_resp:ident, + $get_rev_fn:ident, $get_rev_resp:ident, $get_redirects_fn:ident, $get_redirects_resp:ident, + $model:ident) => { + + fn $get_fn( + &self, + ident: String, + expand: Option, + hide: Option, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET + let ret = match (|| { + let entity_id = FatCatId::from_str(&ident)?; + let hide_flags = match hide { + None => HideFlags::none(), + Some(param) => HideFlags::from_str(¶m)?, + }; + match expand { + None => $model::db_get(&conn, entity_id, hide_flags), + Some(param) => { + let expand_flags = ExpandFlags::from_str(¶m)?; + let mut entity = $model::db_get(&conn, entity_id, hide_flags)?; + entity.db_expand(&conn, expand_flags)?; + Ok(entity) + }, + } + })() { + Ok(entity) => + $get_resp::FoundEntity(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), + Err(Error(ErrorKind::Uuid(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $get_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $post_fn( + &self, + entity: models::$model, + editgroup_id: Option, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($post_fn)))?; + auth_context.require_role(FatcatRole::Editor)?; + let editgroup_id = if let Some(s) = editgroup_id { + let eg_id = FatCatId::from_str(&s)?; + auth_context.require_editgroup(&conn, eg_id)?; + Some(eg_id) + } else { None }; + let edit_context = make_edit_context(&conn, auth_context.editor_id, editgroup_id, false)?; + edit_context.check(&conn)?; + entity.db_create(&conn, &edit_context)?.into_model() + }) { + Ok(edit) => + $post_resp::CreatedEntity(edit), + Err(Error(ErrorKind::Diesel(e), _)) => + $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::Uuid(e), _)) => + $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $post_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::MalformedChecksum(e), _)) => + $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => + $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => + $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidCredentials(e), _)) => + // TODO: why can't I NotAuthorized here? + $post_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => + $post_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $post_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $post_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $post_batch_fn( + &self, + entity_list: &Vec, + autoaccept: Option, + editgroup_id: Option, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($post_batch_fn)))?; + auth_context.require_role(FatcatRole::Editor)?; + let editgroup_id = if let Some(s) = editgroup_id { + let eg_id = FatCatId::from_str(&s)?; + auth_context.require_editgroup(&conn, eg_id)?; + Some(eg_id) + } else { None }; + self.$post_batch_handler(entity_list, autoaccept.unwrap_or(false), auth_context.editor_id, editgroup_id, &conn) + }) { + Ok(edit) => + $post_batch_resp::CreatedEntities(edit), + Err(Error(ErrorKind::Diesel(e), _)) => + $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::Uuid(e), _)) => + $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $post_batch_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::MalformedChecksum(e), _)) => + $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => + $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => + $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidCredentials(e), _)) => + // TODO: why can't I NotAuthorized here? + $post_batch_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => + $post_batch_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $post_batch_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $post_batch_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $update_fn( + &self, + ident: String, + entity: models::$model, + editgroup_id: Option, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($update_fn)))?; + auth_context.require_role(FatcatRole::Editor)?; + let entity_id = FatCatId::from_str(&ident)?; + let editgroup_id = if let Some(s) = editgroup_id { + let eg_id = FatCatId::from_str(&s)?; + auth_context.require_editgroup(&conn, eg_id)?; + Some(eg_id) + } else { None }; + let edit_context = make_edit_context(&conn, auth_context.editor_id, editgroup_id, false)?; + edit_context.check(&conn)?; + entity.db_update(&conn, &edit_context, entity_id)?.into_model() + }) { + Ok(edit) => + $update_resp::UpdatedEntity(edit), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $update_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), + Err(Error(ErrorKind::Diesel(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::Uuid(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $update_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::MalformedChecksum(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidEntityStateTransform(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidCredentials(e), _)) => + // TODO: why can't I NotAuthorized here? + $update_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => + $update_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $update_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $delete_fn( + &self, + ident: String, + editgroup_id: Option, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($delete_fn)))?; + auth_context.require_role(FatcatRole::Editor)?; + let entity_id = FatCatId::from_str(&ident)?; + let editgroup_id: Option = match editgroup_id { + Some(s) => { + let editgroup_id = FatCatId::from_str(&s)?; + auth_context.require_editgroup(&conn, editgroup_id)?; + Some(editgroup_id) + }, + None => None, + }; + let edit_context = make_edit_context(&conn, auth_context.editor_id, editgroup_id, false)?; + edit_context.check(&conn)?; + $model::db_delete(&conn, &edit_context, entity_id)?.into_model() + }) { + Ok(edit) => + $delete_resp::DeletedEntity(edit), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $delete_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), + Err(Error(ErrorKind::Diesel(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::Uuid(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $delete_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidEntityStateTransform(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidCredentials(e), _)) => + // TODO: why can't I NotAuthorized here? + $delete_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => + $delete_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $delete_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $get_history_fn( + &self, + ident: String, + limit: Option, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET? + let ret = match (|| { + let entity_id = FatCatId::from_str(&ident)?; + $model::db_get_history(&conn, entity_id, limit) + })() { + Ok(history) => + $get_history_resp::FoundEntityHistory(history), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_history_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), + Err(Error(ErrorKind::Uuid(e), _)) => + $get_history_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $get_history_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $get_history_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_history_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $get_rev_fn( + &self, + rev_id: String, + expand: Option, + hide: Option, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET? + let ret = match (|| { + let rev_id = Uuid::from_str(&rev_id)?; + let hide_flags = match hide { + None => HideFlags::none(), + Some(param) => HideFlags::from_str(¶m)?, + }; + match expand { + None => $model::db_get_rev(&conn, rev_id, hide_flags), + Some(param) => { + let expand_flags = ExpandFlags::from_str(¶m)?; + let mut entity = $model::db_get_rev(&conn, rev_id, hide_flags)?; + entity.db_expand(&conn, expand_flags)?; + Ok(entity) + }, + } + })() { + Ok(entity) => + $get_rev_resp::FoundEntityRevision(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_rev_resp::NotFound(ErrorResponse { message: format!("No such entity revision {}: {}", stringify!($model), rev_id) }), + Err(Error(ErrorKind::Uuid(e), _)) => + $get_rev_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $get_rev_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $get_rev_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_rev_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $get_edit_fn( + &self, + edit_id: String, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET? + let ret = match (|| { + let edit_id = Uuid::from_str(&edit_id)?; + $model::db_get_edit(&conn, edit_id)?.into_model() + })() { + Ok(edit) => + $get_edit_resp::FoundEdit(edit), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_edit_resp::NotFound(ErrorResponse { message: format!("No such {} entity edit: {}", stringify!($model), edit_id) }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $get_edit_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_edit_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $delete_edit_fn( + &self, + edit_id: String, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let edit_id = Uuid::from_str(&edit_id)?; + let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($delete_edit_fn)))?; + auth_context.require_role(FatcatRole::Editor)?; + let edit = $model::db_get_edit(&conn, edit_id)?; + auth_context.require_editgroup(&conn, FatCatId::from_uuid(&edit.editgroup_id))?; + $model::db_delete_edit(&conn, edit_id) + }) { + Ok(()) => + $delete_edit_resp::DeletedEdit(Success { message: format!("Successfully deleted work-in-progress {} edit: {}", stringify!($model), edit_id) } ), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $delete_edit_resp::NotFound(ErrorResponse { message: format!("No such {} edit: {}", stringify!($model), edit_id) }), + Err(Error(ErrorKind::Diesel(e), _)) => + $delete_edit_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => + $delete_edit_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $delete_edit_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidCredentials(e), _)) => + // TODO: why can't I NotAuthorized here? + $delete_edit_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => + $delete_edit_resp::Forbidden(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $delete_edit_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $get_redirects_fn( + &self, + ident: String, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET? + let ret = match (|| { + let entity_id = FatCatId::from_str(&ident)?; + let redirects: Vec = $model::db_get_redirects(&conn, entity_id)?; + Ok(redirects.into_iter().map(|fcid| fcid.to_string()).collect()) + })() { + Ok(redirects) => + $get_redirects_resp::FoundEntityRedirects(redirects), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_redirects_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), ident) }), + Err(Error(ErrorKind::Uuid(e), _)) => + $get_redirects_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $get_redirects_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $get_redirects_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_redirects_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + } +} + +macro_rules! wrap_lookup_handler { + ($get_fn:ident, $get_handler:ident, $get_resp:ident, $idname:ident) => { + fn $get_fn( + &self, + $idname: Option, + wikidata_qid: Option, + expand: Option, + hide: Option, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let expand_flags = match expand { + None => ExpandFlags::none(), + Some(param) => ExpandFlags::from_str(¶m).unwrap(), + }; + let hide_flags = match hide { + None => HideFlags::none(), + Some(param) => HideFlags::from_str(¶m).unwrap(), + }; + // No transaction for GET + let ret = match self.$get_handler(&$idname, &wikidata_qid, expand_flags, hide_flags, &conn) { + Ok(entity) => + $get_resp::FoundEntity(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_resp::NotFound(ErrorResponse { message: format!("Not found: {:?} / {:?}", $idname, wikidata_qid) }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::MalformedChecksum(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::MissingOrMultipleExternalId(e), _)) => { + $get_resp::BadRequest(ErrorResponse { message: e.to_string(), }) }, + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + } +} + +macro_rules! wrap_fcid_handler { + ($get_fn:ident, $get_handler:ident, $get_resp:ident) => { + fn $get_fn( + &self, + id: String, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET + let ret = match (|| { + let fcid = FatCatId::from_str(&id)?; + self.$get_handler(fcid, &conn) + })() { + Ok(entity) => + $get_resp::Found(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_resp::NotFound(ErrorResponse { message: format!("Not found: {}", id) }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + } +} + +macro_rules! wrap_fcid_hide_handler { + ($get_fn:ident, $get_handler:ident, $get_resp:ident) => { + fn $get_fn( + &self, + id: String, + hide: Option, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET + let ret = match (|| { + let fcid = FatCatId::from_str(&id)?; + let hide_flags = match hide { + None => HideFlags::none(), + Some(param) => HideFlags::from_str(¶m)?, + }; + self.$get_handler(fcid, hide_flags, &conn) + })() { + Ok(entity) => + $get_resp::Found(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_resp::NotFound(ErrorResponse { message: format!("Not found: {}", id) }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::OtherBadRequest(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + } +} + +impl Api for Server { + wrap_entity_handlers!( + get_container, + GetContainerResponse, + create_container, + CreateContainerResponse, + create_container_batch, + create_container_batch_handler, + CreateContainerBatchResponse, + update_container, + UpdateContainerResponse, + delete_container, + DeleteContainerResponse, + get_container_history, + GetContainerHistoryResponse, + get_container_edit, + GetContainerEditResponse, + delete_container_edit, + DeleteContainerEditResponse, + get_container_revision, + GetContainerRevisionResponse, + get_container_redirects, + GetContainerRedirectsResponse, + ContainerEntity + ); + + wrap_entity_handlers!( + get_creator, + GetCreatorResponse, + create_creator, + CreateCreatorResponse, + create_creator_batch, + create_creator_batch_handler, + CreateCreatorBatchResponse, + update_creator, + UpdateCreatorResponse, + delete_creator, + DeleteCreatorResponse, + get_creator_history, + GetCreatorHistoryResponse, + get_creator_edit, + GetCreatorEditResponse, + delete_creator_edit, + DeleteCreatorEditResponse, + get_creator_revision, + GetCreatorRevisionResponse, + get_creator_redirects, + GetCreatorRedirectsResponse, + CreatorEntity + ); + wrap_entity_handlers!( + get_file, + GetFileResponse, + create_file, + CreateFileResponse, + create_file_batch, + create_file_batch_handler, + CreateFileBatchResponse, + update_file, + UpdateFileResponse, + delete_file, + DeleteFileResponse, + get_file_history, + GetFileHistoryResponse, + get_file_edit, + GetFileEditResponse, + delete_file_edit, + DeleteFileEditResponse, + get_file_revision, + GetFileRevisionResponse, + get_file_redirects, + GetFileRedirectsResponse, + FileEntity + ); + wrap_entity_handlers!( + get_fileset, + GetFilesetResponse, + create_fileset, + CreateFilesetResponse, + create_fileset_batch, + create_fileset_batch_handler, + CreateFilesetBatchResponse, + update_fileset, + UpdateFilesetResponse, + delete_fileset, + DeleteFilesetResponse, + get_fileset_history, + GetFilesetHistoryResponse, + get_fileset_edit, + GetFilesetEditResponse, + delete_fileset_edit, + DeleteFilesetEditResponse, + get_fileset_revision, + GetFilesetRevisionResponse, + get_fileset_redirects, + GetFilesetRedirectsResponse, + FilesetEntity + ); + wrap_entity_handlers!( + get_webcapture, + GetWebcaptureResponse, + create_webcapture, + CreateWebcaptureResponse, + create_webcapture_batch, + create_webcapture_batch_handler, + CreateWebcaptureBatchResponse, + update_webcapture, + UpdateWebcaptureResponse, + delete_webcapture, + DeleteWebcaptureResponse, + get_webcapture_history, + GetWebcaptureHistoryResponse, + get_webcapture_edit, + GetWebcaptureEditResponse, + delete_webcapture_edit, + DeleteWebcaptureEditResponse, + get_webcapture_revision, + GetWebcaptureRevisionResponse, + get_webcapture_redirects, + GetWebcaptureRedirectsResponse, + WebcaptureEntity + ); + wrap_entity_handlers!( + get_release, + GetReleaseResponse, + create_release, + CreateReleaseResponse, + create_release_batch, + create_release_batch_handler, + CreateReleaseBatchResponse, + update_release, + UpdateReleaseResponse, + delete_release, + DeleteReleaseResponse, + get_release_history, + GetReleaseHistoryResponse, + get_release_edit, + GetReleaseEditResponse, + delete_release_edit, + DeleteReleaseEditResponse, + get_release_revision, + GetReleaseRevisionResponse, + get_release_redirects, + GetReleaseRedirectsResponse, + ReleaseEntity + ); + wrap_entity_handlers!( + get_work, + GetWorkResponse, + create_work, + CreateWorkResponse, + create_work_batch, + create_work_batch_handler, + CreateWorkBatchResponse, + update_work, + UpdateWorkResponse, + delete_work, + DeleteWorkResponse, + get_work_history, + GetWorkHistoryResponse, + get_work_edit, + GetWorkEditResponse, + delete_work_edit, + DeleteWorkEditResponse, + get_work_revision, + GetWorkRevisionResponse, + get_work_redirects, + GetWorkRedirectsResponse, + WorkEntity + ); + + wrap_lookup_handler!( + lookup_container, + lookup_container_handler, + LookupContainerResponse, + issnl + ); + wrap_lookup_handler!( + lookup_creator, + lookup_creator_handler, + LookupCreatorResponse, + orcid + ); + + wrap_fcid_hide_handler!( + get_release_files, + get_release_files_handler, + GetReleaseFilesResponse + ); + wrap_fcid_hide_handler!( + get_release_filesets, + get_release_filesets_handler, + GetReleaseFilesetsResponse + ); + wrap_fcid_hide_handler!( + get_release_webcaptures, + get_release_webcaptures_handler, + GetReleaseWebcapturesResponse + ); + wrap_fcid_hide_handler!( + get_work_releases, + get_work_releases_handler, + GetWorkReleasesResponse + ); + wrap_fcid_hide_handler!( + get_creator_releases, + get_creator_releases_handler, + GetCreatorReleasesResponse + ); + wrap_fcid_handler!(get_editor, get_editor_handler, GetEditorResponse); + wrap_fcid_handler!( + get_editor_changelog, + get_editor_changelog_handler, + GetEditorChangelogResponse + ); + + fn lookup_file( + &self, + md5: Option, + sha1: Option, + sha256: Option, + expand: Option, + hide: Option, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let expand_flags = match expand { + None => ExpandFlags::none(), + Some(param) => ExpandFlags::from_str(¶m).unwrap(), + }; + let hide_flags = match hide { + None => HideFlags::none(), + Some(param) => HideFlags::from_str(¶m).unwrap(), + }; + // No transaction for GET + let ret = + match self.lookup_file_handler(&md5, &sha1, &sha256, expand_flags, hide_flags, &conn) { + Ok(entity) => LookupFileResponse::FoundEntity(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { + LookupFileResponse::NotFound(ErrorResponse { + message: format!("Not found: {:?} / {:?} / {:?}", md5, sha1, sha256), + }) + } + Err(Error(ErrorKind::MalformedExternalId(e), _)) => { + LookupFileResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::MalformedChecksum(e), _)) => { + LookupFileResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::MissingOrMultipleExternalId(e), _)) => { + LookupFileResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(e) => { + error!("{}", e); + LookupFileResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } + + fn lookup_release( + &self, + doi: Option, + wikidata_qid: Option, + isbn13: Option, + pmid: Option, + pmcid: Option, + core_id: Option, + expand: Option, + hide: Option, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let expand_flags = match expand { + None => ExpandFlags::none(), + Some(param) => ExpandFlags::from_str(¶m).unwrap(), + }; + let hide_flags = match hide { + None => HideFlags::none(), + Some(param) => HideFlags::from_str(¶m).unwrap(), + }; + // No transaction for GET + let ret = match self.lookup_release_handler( + &doi, + &wikidata_qid, + &isbn13, + &pmid, + &pmcid, + &core_id, + expand_flags, + hide_flags, + &conn, + ) { + Ok(entity) => LookupReleaseResponse::FoundEntity(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { + LookupReleaseResponse::NotFound(ErrorResponse { + message: format!( + "Not found: {:?} / {:?} / {:?} / {:?} / {:?} / {:?}", + doi, wikidata_qid, isbn13, pmid, pmcid, core_id + ), + }) + } + Err(Error(ErrorKind::MalformedExternalId(e), _)) => { + LookupReleaseResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::MissingOrMultipleExternalId(e), _)) => { + LookupReleaseResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(e) => { + error!("{}", e); + LookupReleaseResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } + + /// For now, only implements updating username + fn update_editor( + &self, + editor_id: String, + editor: models::Editor, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + if Some(editor_id.clone()) != editor.editor_id { + return Err( + ErrorKind::OtherBadRequest("editor_id doesn't match".to_string()).into(), + ); + } + let auth_context = self.auth_confectionary.require_auth( + &conn, + &context.auth_data, + Some("update_editor"), + )?; + let editor_id = FatCatId::from_str(&editor_id)?; + // DANGER! these permissions are for username updates only! + if editor_id == auth_context.editor_id { + // self edit of username allowed + auth_context.require_role(FatcatRole::Editor)?; + } else { + // admin can update any username + auth_context.require_role(FatcatRole::Admin)?; + }; + update_editor_username(&conn, editor_id, editor.username).map(|e| e.into_model()) + }) { + Ok(editor) => UpdateEditorResponse::UpdatedEditor(editor), + Err(Error(ErrorKind::Diesel(e), _)) => { + UpdateEditorResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::Uuid(e), _)) => UpdateEditorResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => { + UpdateEditorResponse::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string(), + }) + } + Err(Error(ErrorKind::MalformedExternalId(e), _)) => { + UpdateEditorResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::InvalidCredentials(e), _)) => + // TODO: why can't I NotAuthorized here? + { + UpdateEditorResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { + UpdateEditorResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::OtherBadRequest(e), _)) => { + UpdateEditorResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(e) => { + error!("{}", e); + UpdateEditorResponse::GenericError(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } + + fn accept_editgroup( + &self, + editgroup_id: String, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let editgroup_id = FatCatId::from_str(&editgroup_id)?; + let auth_context = self.auth_confectionary.require_auth( + &conn, + &context.auth_data, + Some("accept_editgroup"), + )?; + auth_context.require_role(FatcatRole::Admin)?; + // NOTE: this is currently redundant, but zero-cost + auth_context.require_editgroup(&conn, editgroup_id)?; + self.accept_editgroup_handler(editgroup_id, &conn) + }) { + Ok(()) => AcceptEditgroupResponse::MergedSuccessfully(Success { + message: "horray!".to_string(), + }), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { + AcceptEditgroupResponse::NotFound(ErrorResponse { + message: format!("No such editgroup: {}", editgroup_id), + }) + } + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => { + AcceptEditgroupResponse::BadRequest(ErrorResponse { + message: ErrorKind::EditgroupAlreadyAccepted(e).to_string(), + }) + } + Err(Error(ErrorKind::InvalidCredentials(e), _)) => { + AcceptEditgroupResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { + AcceptEditgroupResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(e) => AcceptEditgroupResponse::GenericError(ErrorResponse { + message: e.to_string(), + }), + }; + Box::new(futures::done(Ok(ret))) + } + + fn get_editgroup( + &self, + editgroup_id: String, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let editgroup_id = FatCatId::from_str(&editgroup_id)?; + self.get_editgroup_handler(editgroup_id, &conn) + }) { + Ok(entity) => GetEditgroupResponse::Found(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { + GetEditgroupResponse::NotFound(ErrorResponse { + message: format!("No such editgroup: {}", editgroup_id), + }) + } + Err(e) => + // TODO: dig in to error type here + { + GetEditgroupResponse::GenericError(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } + + fn create_editgroup( + &self, + entity: models::Editgroup, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let auth_context = self.auth_confectionary.require_auth( + &conn, + &context.auth_data, + Some("create_editgroup"), + )?; + auth_context.require_role(FatcatRole::Editor)?; + let mut entity = entity.clone(); + match entity.editor_id.clone() { + Some(editor_id) => { + if !auth_context.has_role(FatcatRole::Admin) { + if editor_id != auth_context.editor_id.to_string() { + bail!("not authorized to create editgroups in others' names"); + } + } + } + None => { + entity.editor_id = Some(auth_context.editor_id.to_string()); + } + }; + self.create_editgroup_handler(entity, &conn) + }) { + Ok(eg) => CreateEditgroupResponse::SuccessfullyCreated(eg), + Err(Error(ErrorKind::InvalidCredentials(e), _)) => { + CreateEditgroupResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { + CreateEditgroupResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(e) => + // TODO: dig in to error type here + { + CreateEditgroupResponse::GenericError(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } + + fn get_changelog( + &self, + limit: Option, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET + let ret = match self.get_changelog_handler(limit, &conn) { + Ok(changelog) => GetChangelogResponse::Success(changelog), + Err(e) => { + error!("{}", e); + GetChangelogResponse::GenericError(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } + + fn get_changelog_entry( + &self, + id: i64, + _context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET + let ret = match self.get_changelog_entry_handler(id, &conn) { + Ok(entry) => GetChangelogEntryResponse::FoundChangelogEntry(entry), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { + GetChangelogEntryResponse::NotFound(ErrorResponse { + message: format!("No such changelog entry: {}", id), + }) + } + Err(e) => { + error!("{}", e); + GetChangelogEntryResponse::GenericError(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } + + fn auth_oidc( + &self, + params: models::AuthOidc, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let auth_context = self.auth_confectionary.require_auth( + &conn, + &context.auth_data, + Some("auth_oidc"), + )?; + auth_context.require_role(FatcatRole::Superuser)?; + let (editor, created) = self.auth_oidc_handler(params, &conn)?; + // create an auth token with 31 day duration + let token = self.auth_confectionary.create_token( + FatCatId::from_str(&editor.editor_id.clone().unwrap())?, + Some(chrono::Duration::days(31)), + )?; + let result = AuthOidcResult { editor, token }; + Ok((result, created)) + }) { + Ok((result, true)) => AuthOidcResponse::Created(result), + Ok((result, false)) => AuthOidcResponse::Found(result), + Err(Error(ErrorKind::Diesel(e), _)) => AuthOidcResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }), + Err(Error(ErrorKind::Uuid(e), _)) => AuthOidcResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => { + AuthOidcResponse::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string(), + }) + } + Err(Error(ErrorKind::MalformedExternalId(e), _)) => { + AuthOidcResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::MalformedChecksum(e), _)) => { + AuthOidcResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::NotInControlledVocabulary(e), _)) => { + AuthOidcResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => { + AuthOidcResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::InvalidCredentials(e), _)) => + // TODO: why can't I NotAuthorized here? + { + AuthOidcResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { + AuthOidcResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::OtherBadRequest(e), _)) => { + AuthOidcResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(e) => { + error!("{}", e); + AuthOidcResponse::GenericError(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } + + fn auth_check( + &self, + role: Option, + context: &Context, + ) -> Box + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| { + let auth_context = self.auth_confectionary.require_auth( + &conn, + &context.auth_data, + Some("auth_check"), + )?; + if let Some(role) = role { + let role = match role.to_lowercase().as_ref() { + "superuser" => FatcatRole::Superuser, + "admin" => FatcatRole::Admin, + "editor" => FatcatRole::Editor, + "bot" => FatcatRole::Bot, + "human" => FatcatRole::Human, + "public" => FatcatRole::Public, + _ => bail!("unknown auth role: {}", role), + }; + auth_context.require_role(role)?; + }; + Ok(()) + }) { + Ok(()) => AuthCheckResponse::Success(Success { + message: "auth check successful!".to_string(), + }), + Err(Error(ErrorKind::Diesel(e), _)) => AuthCheckResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }), + Err(Error(ErrorKind::Uuid(e), _)) => AuthCheckResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }), + Err(Error(ErrorKind::InvalidCredentials(e), _)) => + // TODO: why can't I NotAuthorized here? + { + AuthCheckResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::InsufficientPrivileges(e), _)) => { + AuthCheckResponse::Forbidden(ErrorResponse { + message: e.to_string(), + }) + } + Err(Error(ErrorKind::OtherBadRequest(e), _)) => { + AuthCheckResponse::BadRequest(ErrorResponse { + message: e.to_string(), + }) + } + Err(e) => { + error!("{}", e); + AuthCheckResponse::GenericError(ErrorResponse { + message: e.to_string(), + }) + } + }; + Box::new(futures::done(Ok(ret))) + } +} diff --git a/rust/src/entity_crud.rs b/rust/src/entity_crud.rs new file mode 100644 index 00000000..d5c8081b --- /dev/null +++ b/rust/src/entity_crud.rs @@ -0,0 +1,2162 @@ +use crate::database_models::*; +use crate::database_schema::*; +use crate::editing::*; +use crate::endpoint_handlers::get_release_files; +use crate::errors::*; +use crate::identifiers::*; +use crate::server::*; +use chrono; +use diesel::prelude::*; +use diesel::{self, insert_into}; +use fatcat_api_spec::models::*; +use sha1::Sha1; +use std::marker::Sized; +use std::str::FromStr; +use uuid::Uuid; + +/* One goal here is to abstract the non-entity-specific bits into generic traits or functions, + * instead of macros. + * + * Notably: + * + * db_get + * db_get_rev + * db_create + * db_create_batch + * db_update + * db_delete + * db_get_history + * db_get_edit + * db_delete_edit + * db_get_redirects + * db_accept_edits + * + * For now, these will probably be macros, until we can level up our trait/generics foo. + */ + +// Associated Type, not parametric +pub trait EntityCrud +where + Self: Sized, +{ + // TODO: could EditRow and IdentRow be generic structs? Or do they need to be bound to a + // specific table? + type EditRow; // EntityEditRow + type EditNewRow; + type IdentRow; // EntityIdentRow + type IdentNewRow; + type RevRow; + + // Generic Methods + fn from_deleted_row(ident_row: Self::IdentRow) -> Result; + fn db_get(conn: &DbConn, ident: FatCatId, hide: HideFlags) -> Result; + fn db_get_rev(conn: &DbConn, rev_id: Uuid, hide: HideFlags) -> Result; + fn db_expand(&mut self, conn: &DbConn, expand: ExpandFlags) -> Result<()>; + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result; + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result>; + fn db_update( + &self, + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result; + fn db_delete( + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result; + fn db_get_history( + conn: &DbConn, + ident: FatCatId, + limit: Option, + ) -> Result>; + fn db_get_edit(conn: &DbConn, edit_id: Uuid) -> Result; + fn db_delete_edit(conn: &DbConn, edit_id: Uuid) -> Result<()>; + fn db_get_redirects(conn: &DbConn, ident: FatCatId) -> Result>; + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result; + + // Entity-specific Methods + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + hide: HideFlags, + ) -> Result; + fn db_insert_rev(&self, conn: &DbConn) -> Result; + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result>; +} + +#[derive(Clone, Copy, PartialEq)] +pub struct ExpandFlags { + pub files: bool, + pub filesets: bool, + pub webcaptures: bool, + pub container: bool, + pub releases: bool, + pub creators: bool, +} + +impl FromStr for ExpandFlags { + type Err = Error; + fn from_str(param: &str) -> Result { + let list: Vec<&str> = param.split_terminator(",").collect(); + Ok(ExpandFlags::from_str_list(&list)) + } +} + +impl ExpandFlags { + pub fn from_str_list(list: &[&str]) -> ExpandFlags { + ExpandFlags { + files: list.contains(&"files"), + filesets: list.contains(&"filesets"), + webcaptures: list.contains(&"webcaptures"), + container: list.contains(&"container"), + releases: list.contains(&"releases"), + creators: list.contains(&"creators"), + } + } + pub fn none() -> ExpandFlags { + ExpandFlags { + files: false, + filesets: false, + webcaptures: false, + container: false, + releases: false, + creators: false, + } + } +} + +#[test] +fn test_expand_flags() { + assert!(ExpandFlags::from_str_list(&vec![]).files == false); + assert!(ExpandFlags::from_str_list(&vec!["files"]).files == true); + assert!(ExpandFlags::from_str_list(&vec!["file"]).files == false); + let all = ExpandFlags::from_str_list(&vec![ + "files", + "filesets", + "webcaptures", + "container", + "other_thing", + "releases", + "creators", + ]); + assert!( + all == ExpandFlags { + files: true, + filesets: true, + webcaptures: true, + container: true, + releases: true, + creators: true + } + ); + assert!(ExpandFlags::from_str("").unwrap().files == false); + assert!(ExpandFlags::from_str("files").unwrap().files == true); + assert!(ExpandFlags::from_str("something,,files").unwrap().files == true); + assert!(ExpandFlags::from_str("file").unwrap().files == false); + let all = + ExpandFlags::from_str("files,container,other_thing,releases,creators,filesets,webcaptures") + .unwrap(); + assert!( + all == ExpandFlags { + files: true, + filesets: true, + webcaptures: true, + container: true, + releases: true, + creators: true + } + ); +} + +#[derive(Clone, Copy, PartialEq)] +pub struct HideFlags { + // release + pub abstracts: bool, + pub refs: bool, + pub contribs: bool, + // fileset + pub manifest: bool, + // webcapture + pub cdx: bool, +} + +impl FromStr for HideFlags { + type Err = Error; + fn from_str(param: &str) -> Result { + let list: Vec<&str> = param.split_terminator(",").collect(); + Ok(HideFlags::from_str_list(&list)) + } +} + +impl HideFlags { + pub fn from_str_list(list: &[&str]) -> HideFlags { + HideFlags { + abstracts: list.contains(&"abstracts"), + refs: list.contains(&"refs"), + contribs: list.contains(&"contribs"), + manifest: list.contains(&"contribs"), + cdx: list.contains(&"contribs"), + } + } + pub fn none() -> HideFlags { + HideFlags { + abstracts: false, + refs: false, + contribs: false, + manifest: false, + cdx: false, + } + } +} + +#[test] +fn test_hide_flags() { + assert!(HideFlags::from_str_list(&vec![]).abstracts == false); + assert!(HideFlags::from_str_list(&vec!["abstracts"]).abstracts == true); + assert!(HideFlags::from_str_list(&vec!["abstract"]).abstracts == false); + let all = HideFlags::from_str_list(&vec![ + "abstracts", + "refs", + "other_thing", + "contribs", + "manifest", + "cdx", + ]); + assert!( + all == HideFlags { + abstracts: true, + refs: true, + contribs: true, + manifest: true, + cdx: true, + } + ); + assert!(HideFlags::from_str("").unwrap().abstracts == false); + assert!(HideFlags::from_str("abstracts").unwrap().abstracts == true); + assert!( + HideFlags::from_str("something,,abstracts") + .unwrap() + .abstracts + == true + ); + assert!(HideFlags::from_str("file").unwrap().abstracts == false); + let all = HideFlags::from_str("abstracts,cdx,refs,manifest,other_thing,contribs").unwrap(); + assert!( + all == HideFlags { + abstracts: true, + refs: true, + contribs: true, + manifest: true, + cdx: true, + } + ); +} + +macro_rules! generic_db_get { + ($ident_table:ident, $rev_table:ident) => { + fn db_get(conn: &DbConn, ident: FatCatId, hide: HideFlags) -> Result { + let res: Option<(Self::IdentRow, Self::RevRow)> = $ident_table::table + .find(ident.to_uuid()) + .inner_join($rev_table::table) + .first(conn) + .optional()?; + + match res { + Some((ident, rev)) => { + Self::db_from_row(conn, rev, Some(ident), hide) + }, + None => { + // return a stub (deleted) entity if it's just deleted state + let ident_row: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + if ident_row.rev_id.is_none() { + Self::from_deleted_row(ident_row) + } else { + bail!("unexpected condition: entity ident/rev join failed, yet row isn't in deleted state") + } + }, + } + } + }; +} + +macro_rules! generic_db_get_rev { + ($rev_table:ident) => { + fn db_get_rev(conn: &DbConn, rev_id: Uuid, hide: HideFlags) -> Result { + let rev = $rev_table::table.find(rev_id).first(conn)?; + + Self::db_from_row(conn, rev, None, hide) + } + }; +} + +macro_rules! generic_db_expand { + () => { + fn db_expand(&mut self, _conn: &DbConn, _expand: ExpandFlags) -> Result<()> { + Ok(()) + } + }; +} + +macro_rules! generic_db_create { + // TODO: this path should call generic_db_create_batch + ($ident_table: ident, $edit_table: ident) => { + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { + if self.redirect.is_some() { + return Err(ErrorKind::OtherBadRequest( + "can't create an entity that redirects from the start".to_string()).into()); + } + let rev_id = self.db_insert_rev(conn)?; + let ident: Uuid = insert_into($ident_table::table) + .values($ident_table::rev_id.eq(&rev_id)) + .returning($ident_table::id) + .get_result(conn)?; + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::ident_id.eq(&ident), + )) + .get_result(conn)?; + Ok(edit) + } + } +} + +macro_rules! generic_db_create_batch { + ($ident_table:ident, $edit_table:ident) => { + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result> { + if models.iter().any(|m| m.redirect.is_some()) { + return Err(ErrorKind::OtherBadRequest( + "can't create an entity that redirects from the start".to_string(), + ) + .into()); + } + let rev_ids: Vec = Self::db_insert_revs(conn, models)?; + let ident_ids: Vec = insert_into($ident_table::table) + .values( + rev_ids + .iter() + .map(|rev_id| Self::IdentNewRow { + rev_id: Some(rev_id.clone()), + is_live: edit_context.autoaccept, + redirect_id: None, + }) + .collect::>(), + ) + .returning($ident_table::id) + .get_results(conn)?; + let edits: Vec = insert_into($edit_table::table) + .values( + rev_ids + .into_iter() + .zip(ident_ids.into_iter()) + .map(|(rev_id, ident_id)| Self::EditNewRow { + editgroup_id: edit_context.editgroup_id.to_uuid(), + rev_id: Some(rev_id), + ident_id: ident_id, + redirect_id: None, + prev_rev: None, + extra_json: edit_context.extra_json.clone(), + }) + .collect::>(), + ) + .get_results(conn)?; + Ok(edits) + } + }; +} + +macro_rules! generic_db_update { + ($ident_table: ident, $edit_table: ident) => { + fn db_update(&self, conn: &DbConn, edit_context: &EditContext, ident: FatCatId) -> Result { + let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + let no_redirect: Option = None; + // TODO: is this actually true? or should we allow updates in the same editgroup? + if current.is_live != true { + return Err(ErrorKind::InvalidEntityStateTransform( + "can't update an entity that doesn't exist yet".to_string()).into()); + } + // Don't set prev_rev if current status is redirect + let prev_rev = match current.redirect_id { + Some(_) => None, + None => current.rev_id, + }; + + if self.state.is_none() { + + if Some(ident.to_string()) == self.redirect { + return Err(ErrorKind::OtherBadRequest( + "tried to redirect entity to itself".to_string()).into()); + } + // special case: redirect to another entity + if let Some(ref redirect_ident) = self.redirect { + let redirect_ident = FatCatId::from_str(&redirect_ident)?.to_uuid(); + if Some(redirect_ident) == current.redirect_id { + return Err(ErrorKind::OtherBadRequest( + "redundantly redirecting entity to it's current target currently isn't supported".to_string()).into()); + } + // TODO: if we get a diesel not-found here, should be a special error response? + let target: Self::IdentRow = $ident_table::table.find(redirect_ident).first(conn)?; + if target.is_live != true { + // there is no race condition on this check because WIP -> is_live=true is + // a one-way operation + // XXX: + return Err(ErrorKind::OtherBadRequest( + "attempted to redirect to a WIP entity".to_string()).into()); + } + // Note: there is a condition where the target is already a redirect, but we + // don't handle that here because the state of the redirect could change before + // we accept this editgroup + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(&ident.to_uuid()), + $edit_table::rev_id.eq(target.rev_id), + $edit_table::redirect_id.eq(redirect_ident), + $edit_table::prev_rev.eq(prev_rev), + $edit_table::extra_json.eq(&self.edit_extra), + )) + .get_result(conn)?; + return Ok(edit) + } + // special case: revert to point to an existing revision + if let Some(ref rev_id) = self.revision { + let rev_id = Uuid::from_str(&rev_id)?; + if Some(rev_id) == current.rev_id { + return Err(ErrorKind::OtherBadRequest( + "reverted entity to it's current state; this isn't currently supported".to_string()).into()); + } + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(&ident.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::redirect_id.eq(no_redirect), + $edit_table::prev_rev.eq(prev_rev), + $edit_table::extra_json.eq(&self.edit_extra), + )) + .get_result(conn)?; + return Ok(edit) + } + } + + // regular insert/update + let rev_id = self.db_insert_rev(conn)?; + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(&ident.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::redirect_id.eq(no_redirect), + $edit_table::prev_rev.eq(prev_rev), + $edit_table::extra_json.eq(&self.edit_extra), + )) + .get_result(conn)?; + Ok(edit) + } + } +} + +macro_rules! generic_db_delete { + ($ident_table:ident, $edit_table:ident) => { + fn db_delete( + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result { + let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + if current.is_live != true { + return Err(ErrorKind::InvalidEntityStateTransform( + "can't update an entity that doesn't exist yet; delete edit object instead" + .to_string(), + ) + .into()); + } + if current.state()? == EntityState::Deleted { + return Err(ErrorKind::InvalidEntityStateTransform( + "entity was already deleted".to_string(), + ) + .into()); + } + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(ident.to_uuid()), + $edit_table::rev_id.eq(None::), + $edit_table::redirect_id.eq(None::), + $edit_table::prev_rev.eq(current.rev_id), + $edit_table::extra_json.eq(&edit_context.extra_json), + )) + .get_result(conn)?; + + Ok(edit) + } + }; +} + +macro_rules! generic_db_get_history { + ($edit_table:ident) => { + fn db_get_history( + conn: &DbConn, + ident: FatCatId, + limit: Option, + ) -> Result> { + let limit = limit.unwrap_or(50); // TODO: make a static + + let rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table + .inner_join(changelog::table) + .inner_join($edit_table::table) + .filter($edit_table::ident_id.eq(ident.to_uuid())) + .order(changelog::id.desc()) + .limit(limit) + .get_results(conn)?; + + let history: Result> = rows + .into_iter() + .map(|(eg_row, cl_row, e_row)| { + Ok(EntityHistoryEntry { + edit: e_row.into_model()?, + editgroup: eg_row.into_model_partial(), + changelog_entry: cl_row.into_model(), + }) + }) + .collect(); + history + } + }; +} + +macro_rules! generic_db_get_edit { + ($edit_table:ident) => { + fn db_get_edit(conn: &DbConn, edit_id: Uuid) -> Result { + Ok($edit_table::table.find(edit_id).first(conn)?) + } + }; +} + +macro_rules! generic_db_delete_edit { + ($edit_table:ident) => { + /// This method assumes the connection is already in a transaction + fn db_delete_edit(conn: &DbConn, edit_id: Uuid) -> Result<()> { + // ensure that edit hasn't been accepted + let accepted_rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table + .inner_join(changelog::table) + .inner_join($edit_table::table) + .filter($edit_table::id.eq(edit_id)) + .limit(1) + .get_results(conn)?; + if accepted_rows.len() != 0 { + return Err(ErrorKind::EditgroupAlreadyAccepted( + "attempted to delete an already accepted edit".to_string(), + ) + .into()); + } + diesel::delete($edit_table::table.filter($edit_table::id.eq(edit_id))).execute(conn)?; + Ok(()) + } + }; +} + +macro_rules! generic_db_get_redirects { + ($ident_table:ident) => { + fn db_get_redirects(conn: &DbConn, ident: FatCatId) -> Result> { + let res: Vec = $ident_table::table + .select($ident_table::id) + .filter($ident_table::redirect_id.eq(ident.to_uuid())) + .get_results(conn)?; + Ok(res.iter().map(|u| FatCatId::from_uuid(u)).collect()) + } + }; +} + +/* +// This would be the clean and efficient way, but see: +// https://github.com/diesel-rs/diesel/issues/1478 +// + diesel::update(container_ident::table) + .inner_join(container_edit::table.on( + container_ident::id.eq(container_edit::ident_id) + )) + .filter(container_edit::editgroup_id.eq(editgroup_id)) + .values(( + container_ident::is_live.eq(true), + container_ident::rev_id.eq(container_edit::rev_id), + container_ident::redirect_id.eq(container_edit::redirect_id), + )) + .execute()?; + +// Was previously: + + for entity in &["container", "creator", "file", "work", "release"] { + diesel::sql_query(format!( + " + UPDATE {entity}_ident + SET + is_live = true, + rev_id = {entity}_edit.rev_id, + redirect_id = {entity}_edit.redirect_id + FROM {entity}_edit + WHERE + {entity}_ident.id = {entity}_edit.ident_id + AND {entity}_edit.editgroup_id = $1", + entity = entity + )).bind::(editgroup_id) + .execute(conn)?; +*/ + +// UPDATE FROM version: single query for many rows +// Works with Postgres, not Cockroach +#[allow(unused_macros)] +macro_rules! generic_db_accept_edits_batch { + ($entity_name_str:expr, $ident_table:ident, $edit_table:ident) => { + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result { + // NOTE: the checks and redirects can be skipped for accepts that are all inserts + // (which I guess we only know for batch inserts with auto-accept?) + + // assert that we aren't redirecting to anything which is a redirect already + let forward_recursive_redirects: i64 = $edit_table::table + .inner_join( + $ident_table::table + .on($edit_table::redirect_id.eq($ident_table::id.nullable())), + ) + .filter($edit_table::redirect_id.is_not_null()) + .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) + .filter($ident_table::redirect_id.is_not_null()) + .count() + .get_result(conn)?; + if forward_recursive_redirects != 0 { + // TODO: revert transaction? + return Err(ErrorKind::OtherBadRequest( + "one or more (forward) recurisve redirects".to_string(), + ) + .into()); + } + + // assert that we aren't redirecting while something already redirects to us + let backward_recursive_redirects: i64 = $ident_table::table + .inner_join( + $edit_table::table + .on($ident_table::redirect_id.eq($edit_table::ident_id.nullable())), + ) + .filter($ident_table::redirect_id.is_not_null()) + .filter($edit_table::editgroup_id.eq(editgroup_id.to_uuid())) + .filter($edit_table::redirect_id.is_not_null()) + .count() + .get_result(conn)?; + if backward_recursive_redirects != 0 { + // TODO: revert transaction? + return Err(ErrorKind::OtherBadRequest( + "one or more (backward) recurisve redirects".to_string(), + ) + .into()); + } + + let count = diesel::sql_query(format!( + " + UPDATE {entity}_ident + SET + is_live = true, + rev_id = {entity}_edit.rev_id, + redirect_id = {entity}_edit.redirect_id + FROM {entity}_edit + WHERE + {entity}_ident.id = {entity}_edit.ident_id + AND {entity}_edit.editgroup_id = $1", + entity = $entity_name_str + )) + .bind::(editgroup_id.to_uuid()) + .execute(conn)?; + + // update any/all redirects for updated entities + let _redir_count = diesel::sql_query(format!( + " + UPDATE {entity}_ident + SET + rev_id = {entity}_edit.rev_id + FROM {entity}_edit + WHERE + {entity}_ident.redirect_id = {entity}_edit.ident_id + AND {entity}_edit.editgroup_id = $1", + entity = $entity_name_str + )) + .bind::(editgroup_id.to_uuid()) + .execute(conn)?; + Ok(count as u64) + } + }; +} + +// UPDATE ROW version: single query per row +// CockroachDB version (slow, single query per row) +#[allow(unused_macros)] +macro_rules! generic_db_accept_edits_each { + ($ident_table:ident, $edit_table:ident) => { + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result { + // 1. select edit rows (in sql) + let edit_rows: Vec = $edit_table::table + .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) + .get_results(conn)?; + // 2. create ident rows (in rust) + let ident_rows: Vec = edit_rows + .iter() + .map(|edit| Self::IdentRow { + id: edit.ident_id, + is_live: true, + rev_id: edit.rev_id, + redirect_id: edit.redirect_id, + }) + .collect(); + /* + // 3. upsert ident rows (in sql) + let count: u64 = diesel::insert_into($ident_table::table) + .values(ident_rows) + .on_conflict() + .do_update() + .set(ident_rows) + .execute(conn)?; + */ + // 3. update every row individually + let count = ident_rows.len() as u64; + for row in ident_rows { + diesel::update(&row).set(&row).execute(conn)?; + } + Ok(count) + } + }; +} + +macro_rules! generic_db_insert_rev { + () => { + fn db_insert_rev(&self, conn: &DbConn) -> Result { + Self::db_insert_revs(conn, &[self]).map(|id_list| id_list[0]) + } + } +} + +impl EntityCrud for ContainerEntity { + type EditRow = ContainerEditRow; + type EditNewRow = ContainerEditNewRow; + type IdentRow = ContainerIdentRow; + type IdentNewRow = ContainerIdentNewRow; + type RevRow = ContainerRevRow; + + generic_db_get!(container_ident, container_rev); + generic_db_get_rev!(container_rev); + generic_db_expand!(); + generic_db_create!(container_ident, container_edit); + generic_db_create_batch!(container_ident, container_edit); + generic_db_update!(container_ident, container_edit); + generic_db_delete!(container_ident, container_edit); + generic_db_get_history!(container_edit); + generic_db_get_edit!(container_edit); + generic_db_delete_edit!(container_edit); + generic_db_get_redirects!(container_ident); + generic_db_accept_edits_batch!("container", container_ident, container_edit); + generic_db_insert_rev!(); + + fn from_deleted_row(ident_row: Self::IdentRow) -> Result { + if ident_row.rev_id.is_some() { + bail!("called from_deleted_row with a non-deleted-state row") + } + + Ok(ContainerEntity { + issnl: None, + wikidata_qid: None, + publisher: None, + name: None, + abbrev: None, + coden: None, + state: Some(ident_row.state().unwrap().shortname()), + ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), + revision: ident_row.rev_id.map(|u| u.to_string()), + redirect: ident_row + .redirect_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + extra: None, + edit_extra: None, + }) + } + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + _hide: HideFlags, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + Ok(ContainerEntity { + issnl: rev_row.issnl, + wikidata_qid: rev_row.wikidata_qid, + publisher: rev_row.publisher, + name: Some(rev_row.name), + abbrev: rev_row.abbrev, + coden: rev_row.coden, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + edit_extra: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + if let Some(ref extid) = entity.issnl { + check_issn(extid)?; + } + } + + if models.iter().any(|m| m.name.is_none()) { + return Err(ErrorKind::OtherBadRequest( + "name is required for all Container entities".to_string(), + ) + .into()); + } + + let rev_ids: Vec = insert_into(container_rev::table) + .values( + models + .iter() + .map(|model| ContainerRevNewRow { + name: model.name.clone().unwrap(), // unwrap checked above + publisher: model.publisher.clone(), + issnl: model.issnl.clone(), + wikidata_qid: model.wikidata_qid.clone(), + abbrev: model.abbrev.clone(), + coden: model.coden.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(container_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} + +impl EntityCrud for CreatorEntity { + type EditRow = CreatorEditRow; + type EditNewRow = CreatorEditNewRow; + type IdentRow = CreatorIdentRow; + type IdentNewRow = CreatorIdentNewRow; + type RevRow = CreatorRevRow; + + generic_db_get!(creator_ident, creator_rev); + generic_db_get_rev!(creator_rev); + generic_db_expand!(); + generic_db_create!(creator_ident, creator_edit); + generic_db_create_batch!(creator_ident, creator_edit); + generic_db_update!(creator_ident, creator_edit); + generic_db_delete!(creator_ident, creator_edit); + generic_db_get_history!(creator_edit); + generic_db_get_edit!(creator_edit); + generic_db_delete_edit!(creator_edit); + generic_db_get_redirects!(creator_ident); + generic_db_accept_edits_batch!("creator", creator_ident, creator_edit); + generic_db_insert_rev!(); + + fn from_deleted_row(ident_row: Self::IdentRow) -> Result { + if ident_row.rev_id.is_some() { + bail!("called from_deleted_row with a non-deleted-state row") + } + + Ok(CreatorEntity { + extra: None, + edit_extra: None, + display_name: None, + given_name: None, + surname: None, + orcid: None, + wikidata_qid: None, + state: Some(ident_row.state().unwrap().shortname()), + ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), + revision: ident_row.rev_id.map(|u| u.to_string()), + redirect: ident_row + .redirect_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + }) + } + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + _hide: HideFlags, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + Ok(CreatorEntity { + display_name: Some(rev_row.display_name), + given_name: rev_row.given_name, + surname: rev_row.surname, + orcid: rev_row.orcid, + wikidata_qid: rev_row.wikidata_qid, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + edit_extra: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.orcid { + check_orcid(extid)?; + } + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + } + + if models.iter().any(|m| m.display_name.is_none()) { + return Err(ErrorKind::OtherBadRequest( + "display_name is required for all Creator entities".to_string(), + ) + .into()); + } + + let rev_ids: Vec = insert_into(creator_rev::table) + .values( + models + .iter() + .map(|model| CreatorRevNewRow { + display_name: model.display_name.clone().unwrap(), // unwrapped checked above + given_name: model.given_name.clone(), + surname: model.surname.clone(), + orcid: model.orcid.clone(), + wikidata_qid: model.wikidata_qid.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(creator_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} + +impl EntityCrud for FileEntity { + type EditRow = FileEditRow; + type EditNewRow = FileEditNewRow; + type IdentRow = FileIdentRow; + type IdentNewRow = FileIdentNewRow; + type RevRow = FileRevRow; + + generic_db_get!(file_ident, file_rev); + generic_db_get_rev!(file_rev); + generic_db_expand!(); + generic_db_create!(file_ident, file_edit); + generic_db_create_batch!(file_ident, file_edit); + generic_db_update!(file_ident, file_edit); + generic_db_delete!(file_ident, file_edit); + generic_db_get_history!(file_edit); + generic_db_get_edit!(file_edit); + generic_db_delete_edit!(file_edit); + generic_db_get_redirects!(file_ident); + generic_db_accept_edits_batch!("file", file_ident, file_edit); + generic_db_insert_rev!(); + + fn from_deleted_row(ident_row: Self::IdentRow) -> Result { + if ident_row.rev_id.is_some() { + bail!("called from_deleted_row with a non-deleted-state row") + } + + Ok(FileEntity { + sha1: None, + sha256: None, + md5: None, + size: None, + urls: None, + mimetype: None, + release_ids: None, + state: Some(ident_row.state().unwrap().shortname()), + ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), + revision: ident_row.rev_id.map(|u| u.to_string()), + redirect: ident_row + .redirect_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + extra: None, + edit_extra: None, + }) + } + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + _hide: HideFlags, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let urls: Vec = file_rev_url::table + .filter(file_rev_url::file_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FileRevUrlRow| FileEntityUrls { + rel: r.rel, + url: r.url, + }) + .collect(); + + let release_ids: Vec = file_rev_release::table + .filter(file_rev_release::file_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FileRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) + .collect(); + + Ok(FileEntity { + sha1: rev_row.sha1, + sha256: rev_row.sha256, + md5: rev_row.md5, + size: rev_row.size_bytes.map(|v| v as i64), + urls: Some(urls), + mimetype: rev_row.mimetype, + release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + edit_extra: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify hash syntax + for entity in models { + if let Some(ref hash) = entity.md5 { + check_md5(hash)?; + } + if let Some(ref hash) = entity.sha1 { + check_sha1(hash)?; + } + if let Some(ref hash) = entity.sha256 { + check_sha256(hash)?; + } + } + + let rev_ids: Vec = insert_into(file_rev::table) + .values( + models + .iter() + .map(|model| FileRevNewRow { + size_bytes: model.size, + sha1: model.sha1.clone(), + sha256: model.sha256.clone(), + md5: model.md5.clone(), + mimetype: model.mimetype.clone(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(file_rev::id) + .get_results(conn)?; + + let mut file_rev_release_rows: Vec = vec![]; + let mut file_url_rows: Vec = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.release_ids { + None => (), + Some(release_list) => { + let these_release_rows: Result> = release_list + .iter() + .map(|r| { + Ok(FileRevReleaseRow { + file_rev: *rev_id, + target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), + }) + }) + .collect(); + file_rev_release_rows.extend(these_release_rows?); + } + }; + + match &model.urls { + None => (), + Some(url_list) => { + let these_url_rows: Vec = url_list + .into_iter() + .map(|u| FileRevUrlNewRow { + file_rev: *rev_id, + rel: u.rel.clone(), + url: u.url.clone(), + }) + .collect(); + file_url_rows.extend(these_url_rows); + } + }; + } + + if !file_rev_release_rows.is_empty() { + insert_into(file_rev_release::table) + .values(file_rev_release_rows) + .execute(conn)?; + } + + if !file_url_rows.is_empty() { + insert_into(file_rev_url::table) + .values(file_url_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for FilesetEntity { + type EditRow = FilesetEditRow; + type EditNewRow = FilesetEditNewRow; + type IdentRow = FilesetIdentRow; + type IdentNewRow = FilesetIdentNewRow; + type RevRow = FilesetRevRow; + + generic_db_get!(fileset_ident, fileset_rev); + generic_db_get_rev!(fileset_rev); + generic_db_expand!(); + generic_db_create!(fileset_ident, fileset_edit); + generic_db_create_batch!(fileset_ident, fileset_edit); + generic_db_update!(fileset_ident, fileset_edit); + generic_db_delete!(fileset_ident, fileset_edit); + generic_db_get_history!(fileset_edit); + generic_db_get_edit!(fileset_edit); + generic_db_delete_edit!(fileset_edit); + generic_db_get_redirects!(fileset_ident); + generic_db_accept_edits_batch!("fileset", fileset_ident, fileset_edit); + generic_db_insert_rev!(); + + fn from_deleted_row(ident_row: Self::IdentRow) -> Result { + if ident_row.rev_id.is_some() { + bail!("called from_deleted_row with a non-deleted-state row") + } + + Ok(FilesetEntity { + manifest: None, + urls: None, + release_ids: None, + state: Some(ident_row.state().unwrap().shortname()), + ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), + revision: ident_row.rev_id.map(|u| u.to_string()), + redirect: ident_row + .redirect_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + extra: None, + edit_extra: None, + }) + } + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + _hide: HideFlags, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let manifest: Vec = fileset_rev_file::table + .filter(fileset_rev_file::fileset_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FilesetRevFileRow| FilesetEntityManifest { + path: r.path_name, + size: r.size_bytes, + md5: r.md5, + sha1: r.sha1, + sha256: r.sha256, + extra: r.extra_json, + }) + .collect(); + + let urls: Vec = fileset_rev_url::table + .filter(fileset_rev_url::fileset_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FilesetRevUrlRow| FileEntityUrls { + rel: r.rel, + url: r.url, + }) + .collect(); + + let release_ids: Vec = fileset_rev_release::table + .filter(fileset_rev_release::fileset_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FilesetRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) + .collect(); + + Ok(FilesetEntity { + manifest: Some(manifest), + urls: Some(urls), + release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + edit_extra: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify hash syntax + for entity in models { + if let Some(ref manifest) = entity.manifest { + for file in manifest { + if let Some(ref hash) = file.md5 { + check_md5(hash)?; + } + if let Some(ref hash) = file.sha1 { + check_sha1(hash)?; + } + if let Some(ref hash) = file.sha256 { + check_sha256(hash)?; + } + } + } + } + + let rev_ids: Vec = insert_into(fileset_rev::table) + .values( + models + .iter() + .map(|model| FilesetRevNewRow { + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(fileset_rev::id) + .get_results(conn)?; + + let mut fileset_file_rows: Vec = vec![]; + let mut fileset_url_rows: Vec = vec![]; + let mut fileset_release_rows: Vec = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.manifest { + None => (), + Some(file_list) => { + let these_file_rows: Vec = file_list + .into_iter() + .map(|f| FilesetRevFileNewRow { + fileset_rev: *rev_id, + path_name: f.path.clone(), + size_bytes: f.size, + md5: f.md5.clone(), + sha1: f.sha1.clone(), + sha256: f.sha256.clone(), + extra_json: f.extra.clone(), + }) + .collect(); + fileset_file_rows.extend(these_file_rows); + } + }; + + match &model.urls { + None => (), + Some(url_list) => { + let these_url_rows: Vec = url_list + .into_iter() + .map(|u| FilesetRevUrlNewRow { + fileset_rev: *rev_id, + rel: u.rel.clone(), + url: u.url.clone(), + }) + .collect(); + fileset_url_rows.extend(these_url_rows); + } + }; + + match &model.release_ids { + None => (), + Some(release_list) => { + let these_release_rows: Result> = release_list + .iter() + .map(|r| { + Ok(FilesetRevReleaseRow { + fileset_rev: *rev_id, + target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), + }) + }) + .collect(); + fileset_release_rows.extend(these_release_rows?); + } + }; + } + + if !fileset_file_rows.is_empty() { + insert_into(fileset_rev_file::table) + .values(fileset_file_rows) + .execute(conn)?; + } + + if !fileset_url_rows.is_empty() { + insert_into(fileset_rev_url::table) + .values(fileset_url_rows) + .execute(conn)?; + } + + if !fileset_release_rows.is_empty() { + insert_into(fileset_rev_release::table) + .values(fileset_release_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for WebcaptureEntity { + type EditRow = WebcaptureEditRow; + type EditNewRow = WebcaptureEditNewRow; + type IdentRow = WebcaptureIdentRow; + type IdentNewRow = WebcaptureIdentNewRow; + type RevRow = WebcaptureRevRow; + + generic_db_get!(webcapture_ident, webcapture_rev); + generic_db_get_rev!(webcapture_rev); + generic_db_expand!(); + generic_db_create!(webcapture_ident, webcapture_edit); + generic_db_create_batch!(webcapture_ident, webcapture_edit); + generic_db_update!(webcapture_ident, webcapture_edit); + generic_db_delete!(webcapture_ident, webcapture_edit); + generic_db_get_history!(webcapture_edit); + generic_db_get_edit!(webcapture_edit); + generic_db_delete_edit!(webcapture_edit); + generic_db_get_redirects!(webcapture_ident); + generic_db_accept_edits_batch!("webcapture", webcapture_ident, webcapture_edit); + generic_db_insert_rev!(); + + fn from_deleted_row(ident_row: Self::IdentRow) -> Result { + if ident_row.rev_id.is_some() { + bail!("called from_deleted_row with a non-deleted-state row") + } + + Ok(WebcaptureEntity { + cdx: None, + archive_urls: None, + original_url: None, + timestamp: None, + release_ids: None, + state: Some(ident_row.state().unwrap().shortname()), + ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), + revision: ident_row.rev_id.map(|u| u.to_string()), + redirect: ident_row + .redirect_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + extra: None, + edit_extra: None, + }) + } + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + _hide: HideFlags, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let cdx: Vec = webcapture_rev_cdx::table + .filter(webcapture_rev_cdx::webcapture_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|c: WebcaptureRevCdxRow| WebcaptureEntityCdx { + surt: c.surt, + timestamp: c.timestamp, + url: c.url, + mimetype: c.mimetype, + status_code: c.status_code, + sha1: c.sha1, + sha256: c.sha256, + }) + .collect(); + + let archive_urls: Vec = webcapture_rev_url::table + .filter(webcapture_rev_url::webcapture_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: WebcaptureRevUrlRow| WebcaptureEntityArchiveUrls { + rel: r.rel, + url: r.url, + }) + .collect(); + + let release_ids: Vec = webcapture_rev_release::table + .filter(webcapture_rev_release::webcapture_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: WebcaptureRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) + .collect(); + + Ok(WebcaptureEntity { + cdx: Some(cdx), + archive_urls: Some(archive_urls), + original_url: Some(rev_row.original_url), + timestamp: Some(chrono::DateTime::from_utc(rev_row.timestamp, chrono::Utc)), + release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + edit_extra: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify hash syntax, and presence of required fields + for entity in models { + if let Some(ref cdx) = entity.cdx { + for row in cdx { + check_sha1(&row.sha1)?; + if let Some(ref hash) = row.sha256 { + check_sha256(hash)?; + } + } + } + if entity.timestamp.is_none() || entity.original_url.is_none() { + return Err(ErrorKind::OtherBadRequest( + "timestamp and original_url are required for webcapture entities".to_string(), + ) + .into()); + } + } + + let rev_ids: Vec = insert_into(webcapture_rev::table) + .values( + models + .iter() + .map(|model| WebcaptureRevNewRow { + // these unwraps safe because of check above + original_url: model.original_url.clone().unwrap(), + timestamp: model.timestamp.unwrap().naive_utc(), + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(webcapture_rev::id) + .get_results(conn)?; + + let mut webcapture_cdx_rows: Vec = vec![]; + let mut webcapture_url_rows: Vec = vec![]; + let mut webcapture_release_rows: Vec = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.cdx { + None => (), + Some(cdx_list) => { + let these_cdx_rows: Vec = cdx_list + .into_iter() + .map(|c| WebcaptureRevCdxNewRow { + webcapture_rev: *rev_id, + surt: c.surt.clone(), + timestamp: c.timestamp.clone(), + url: c.url.clone(), + mimetype: c.mimetype.clone(), + status_code: c.status_code, + sha1: c.sha1.clone(), + sha256: c.sha256.clone(), + }) + .collect(); + webcapture_cdx_rows.extend(these_cdx_rows); + } + }; + + match &model.archive_urls { + None => (), + Some(url_list) => { + let these_url_rows: Vec = url_list + .into_iter() + .map(|u| WebcaptureRevUrlNewRow { + webcapture_rev: *rev_id, + rel: u.rel.clone(), + url: u.url.clone(), + }) + .collect(); + webcapture_url_rows.extend(these_url_rows); + } + }; + + match &model.release_ids { + None => (), + Some(release_list) => { + let these_release_rows: Result> = release_list + .iter() + .map(|r| { + Ok(WebcaptureRevReleaseRow { + webcapture_rev: *rev_id, + target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), + }) + }) + .collect(); + webcapture_release_rows.extend(these_release_rows?); + } + }; + } + + if !webcapture_cdx_rows.is_empty() { + insert_into(webcapture_rev_cdx::table) + .values(webcapture_cdx_rows) + .execute(conn)?; + } + + if !webcapture_url_rows.is_empty() { + insert_into(webcapture_rev_url::table) + .values(webcapture_url_rows) + .execute(conn)?; + } + + if !webcapture_release_rows.is_empty() { + insert_into(webcapture_rev_release::table) + .values(webcapture_release_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for ReleaseEntity { + type EditRow = ReleaseEditRow; + type EditNewRow = ReleaseEditNewRow; + type IdentRow = ReleaseIdentRow; + type IdentNewRow = ReleaseIdentNewRow; + type RevRow = ReleaseRevRow; + + generic_db_get!(release_ident, release_rev); + generic_db_get_rev!(release_rev); + generic_db_update!(release_ident, release_edit); + generic_db_delete!(release_ident, release_edit); + generic_db_get_history!(release_edit); + generic_db_get_edit!(release_edit); + generic_db_delete_edit!(release_edit); + generic_db_get_redirects!(release_ident); + generic_db_accept_edits_batch!("release", release_ident, release_edit); + generic_db_insert_rev!(); + + fn from_deleted_row(ident_row: Self::IdentRow) -> Result { + if ident_row.rev_id.is_some() { + bail!("called from_deleted_row with a non-deleted-state row") + } + + Ok(ReleaseEntity { + title: None, + release_type: None, + release_status: None, + release_date: None, + release_year: None, + doi: None, + pmid: None, + pmcid: None, + isbn13: None, + core_id: None, + wikidata_qid: None, + volume: None, + issue: None, + pages: None, + files: None, + filesets: None, + webcaptures: None, + container: None, + container_id: None, + publisher: None, + language: None, + work_id: None, + refs: None, + contribs: None, + abstracts: None, + + state: Some(ident_row.state().unwrap().shortname()), + ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), + revision: ident_row.rev_id.map(|u| u.to_string()), + redirect: ident_row + .redirect_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + extra: None, + edit_extra: None, + }) + } + + fn db_expand(&mut self, conn: &DbConn, expand: ExpandFlags) -> Result<()> { + // Don't expand deleted entities + if self.state == Some("deleted".to_string()) { + return Ok(()); + } + // TODO: should clarify behavior here. Would hit this path, eg, expanding files on a + // release revision (not ident). Should we fail (Bad Request), or silently just not include + // any files? + if expand.files && self.ident.is_some() { + let ident = match &self.ident { + None => bail!("Can't expand files on a non-concrete entity"), // redundant with above is_some() + Some(ident) => match &self.redirect { + // If we're a redirect, then expand for the *target* identifier, not *our* + // identifier. Tricky! + None => FatCatId::from_str(&ident)?, + Some(redir) => FatCatId::from_str(&redir)?, + }, + }; + self.files = Some(get_release_files(ident, HideFlags::none(), conn)?); + } + if expand.container { + if let Some(ref cid) = self.container_id { + self.container = Some(ContainerEntity::db_get( + conn, + FatCatId::from_str(&cid)?, + HideFlags::none(), + )?); + } + } + if expand.creators { + if let Some(ref mut contribs) = self.contribs { + for contrib in contribs { + if let Some(ref creator_id) = contrib.creator_id { + contrib.creator = Some(CreatorEntity::db_get( + conn, + FatCatId::from_str(creator_id)?, + HideFlags::none(), + )?); + } + } + } + } + Ok(()) + } + + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result { + if self.redirect.is_some() { + return Err(ErrorKind::OtherBadRequest( + "can't create an entity that redirects from the start".to_string(), + ) + .into()); + } + let mut edits = Self::db_create_batch(conn, edit_context, &[self])?; + // probably a more elegant way to destroy the vec and take first element + Ok(edits.pop().unwrap()) + } + + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result> { + // This isn't the generic implementation because we need to create Work entities for each + // of the release entities passed (at least in the common case) + if models.iter().any(|m| m.redirect.is_some()) { + return Err(ErrorKind::OtherBadRequest( + "can't create an entity that redirects from the start".to_string(), + ) + .into()); + } + + // Generate the set of new work entities to insert (usually one for each release, but some + // releases might be pointed to a work already) + let mut new_work_models: Vec<&WorkEntity> = vec![]; + for entity in models { + if entity.work_id.is_none() { + new_work_models.push(&WorkEntity { + ident: None, + revision: None, + redirect: None, + state: None, + extra: None, + edit_extra: None, + }); + }; + } + + // create the works, then pluck the list of idents from the result + let new_work_edits = + WorkEntity::db_create_batch(conn, edit_context, new_work_models.as_slice())?; + let mut new_work_ids: Vec = new_work_edits.iter().map(|edit| edit.ident_id).collect(); + + // Copy all the release models, and ensure that each has work_id set, using the new work + // idents. There should be one new work ident for each release missing one. + let models_with_work_ids: Vec = models + .iter() + .map(|model| { + let mut model = (*model).clone(); + if model.work_id.is_none() { + model.work_id = + Some(FatCatId::from_uuid(&new_work_ids.pop().unwrap()).to_string()) + } + model + }) + .collect(); + let model_refs: Vec<&Self> = models_with_work_ids.iter().map(|s| s).collect(); + let models = model_refs.as_slice(); + + // The rest here is copy/pasta from the generic (how to avoid copypasta?) + let rev_ids: Vec = Self::db_insert_revs(conn, models)?; + let ident_ids: Vec = insert_into(release_ident::table) + .values( + rev_ids + .iter() + .map(|rev_id| Self::IdentNewRow { + rev_id: Some(*rev_id), + is_live: edit_context.autoaccept, + redirect_id: None, + }) + .collect::>(), + ) + .returning(release_ident::id) + .get_results(conn)?; + let edits: Vec = insert_into(release_edit::table) + .values( + rev_ids + .into_iter() + .zip(ident_ids.into_iter()) + .map(|(rev_id, ident_id)| Self::EditNewRow { + editgroup_id: edit_context.editgroup_id.to_uuid(), + rev_id: Some(rev_id), + ident_id: ident_id, + redirect_id: None, + prev_rev: None, + extra_json: edit_context.extra_json.clone(), + }) + .collect::>(), + ) + .get_results(conn)?; + Ok(edits) + } + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + hide: HideFlags, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let refs: Option> = match hide.refs { + true => None, + false => Some( + release_ref::table + .filter(release_ref::release_rev.eq(rev_row.id)) + .order(release_ref::index_val.asc()) + .get_results(conn)? + .into_iter() + .map(|r: ReleaseRefRow| ReleaseRef { + index: r.index_val.map(|v| v as i64), + key: r.key, + extra: r.extra_json, + container_name: r.container_name, + year: r.year.map(|v| v as i64), + title: r.title, + locator: r.locator, + target_release_id: r + .target_release_ident_id + .map(|v| FatCatId::from_uuid(&v).to_string()), + }) + .collect(), + ), + }; + + let contribs: Option> = match hide.contribs { + true => None, + false => Some( + release_contrib::table + .filter(release_contrib::release_rev.eq(rev_row.id)) + .order(( + release_contrib::role.asc(), + release_contrib::index_val.asc(), + )) + .get_results(conn)? + .into_iter() + .map(|c: ReleaseContribRow| ReleaseContrib { + index: c.index_val.map(|v| v as i64), + raw_name: c.raw_name, + role: c.role, + extra: c.extra_json, + creator_id: c + .creator_ident_id + .map(|v| FatCatId::from_uuid(&v).to_string()), + creator: None, + }) + .collect(), + ), + }; + + let abstracts: Option> = match hide.abstracts { + true => None, + false => Some( + release_rev_abstract::table + .inner_join(abstracts::table) + .filter(release_rev_abstract::release_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map( + |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts { + sha1: Some(r.0.abstract_sha1), + mimetype: r.0.mimetype, + lang: r.0.lang, + content: Some(r.1.content), + }, + ) + .collect(), + ), + }; + + Ok(ReleaseEntity { + title: Some(rev_row.title), + release_type: rev_row.release_type, + release_status: rev_row.release_status, + release_date: rev_row.release_date, + release_year: rev_row.release_year, + doi: rev_row.doi, + pmid: rev_row.pmid, + pmcid: rev_row.pmcid, + isbn13: rev_row.isbn13, + core_id: rev_row.core_id, + wikidata_qid: rev_row.wikidata_qid, + volume: rev_row.volume, + issue: rev_row.issue, + pages: rev_row.pages, + files: None, + filesets: None, + webcaptures: None, + container: None, + container_id: rev_row + .container_ident_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + publisher: rev_row.publisher, + language: rev_row.language, + work_id: Some(FatCatId::from_uuid(&rev_row.work_ident_id).to_string()), + refs: refs, + contribs: contribs, + abstracts: abstracts, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + edit_extra: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.doi { + check_doi(extid)?; + } + if let Some(ref extid) = entity.pmid { + check_pmid(extid)?; + } + if let Some(ref extid) = entity.pmcid { + check_pmcid(extid)?; + } + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + if let Some(ref release_type) = entity.release_type { + check_release_type(release_type)?; + } + if let Some(ref contribs) = entity.contribs { + for contrib in contribs { + if let Some(ref role) = contrib.role { + check_contrib_role(role)?; + } + } + } + } + + if models.iter().any(|m| m.title.is_none()) { + return Err(ErrorKind::OtherBadRequest( + "title is required for all Release entities".to_string(), + ) + .into()); + } + + let rev_ids: Vec = insert_into(release_rev::table) + .values( + models + .iter() + .map(|model| { + Ok(ReleaseRevNewRow { + title: model.title.clone().unwrap(), // titles checked above + release_type: model.release_type.clone(), + release_status: model.release_status.clone(), + release_date: model.release_date, + release_year: model.release_year, + doi: model.doi.clone(), + pmid: model.pmid.clone(), + pmcid: model.pmcid.clone(), + wikidata_qid: model.wikidata_qid.clone(), + isbn13: model.isbn13.clone(), + core_id: model.core_id.clone(), + volume: model.volume.clone(), + issue: model.issue.clone(), + pages: model.pages.clone(), + work_ident_id: match model.work_id.clone() { + None => bail!("release_revs must have a work_id by the time they are inserted; this is an internal soundness error"), + Some(s) => FatCatId::from_str(&s)?.to_uuid(), + }, + container_ident_id: match model.container_id.clone() { + None => None, + Some(s) => Some(FatCatId::from_str(&s)?.to_uuid()), + }, + publisher: model.publisher.clone(), + language: model.language.clone(), + extra_json: model.extra.clone() + }) + }) + .collect::>>()?, + ) + .returning(release_rev::id) + .get_results(conn)?; + + let mut release_ref_rows: Vec = vec![]; + let mut release_contrib_rows: Vec = vec![]; + let mut abstract_rows: Vec = vec![]; + let mut release_abstract_rows: Vec = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.refs { + None => (), + Some(ref_list) => { + let these_ref_rows: Vec = ref_list + .iter() + .map(|r| { + Ok(ReleaseRefNewRow { + release_rev: rev_id.clone(), + target_release_ident_id: match r.target_release_id.clone() { + None => None, + Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), + }, + index_val: r.index.map(|v| v as i32), + key: r.key.clone(), + container_name: r.container_name.clone(), + year: r.year.map(|v| v as i32), + title: r.title.clone(), + locator: r.locator.clone(), + extra_json: r.extra.clone(), + }) + }) + .collect::>>()?; + release_ref_rows.extend(these_ref_rows); + } + }; + + match &model.contribs { + None => (), + Some(contrib_list) => { + let these_contrib_rows: Vec = contrib_list + .iter() + .map(|c| { + Ok(ReleaseContribNewRow { + release_rev: rev_id.clone(), + creator_ident_id: match c.creator_id.clone() { + None => None, + Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), + }, + raw_name: c.raw_name.clone(), + index_val: c.index.map(|v| v as i32), + role: c.role.clone(), + extra_json: c.extra.clone(), + }) + }) + .collect::>>()?; + release_contrib_rows.extend(these_contrib_rows); + } + }; + + if let Some(abstract_list) = &model.abstracts { + // For rows that specify content, we need to insert the abstract if it doesn't exist + // already + let new_abstracts: Vec = abstract_list + .iter() + .filter(|ea| ea.content.is_some()) + .map(|c| AbstractsRow { + sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(), + content: c.content.clone().unwrap(), + }) + .collect(); + abstract_rows.extend(new_abstracts); + let new_release_abstract_rows: Vec = abstract_list + .into_iter() + .map(|c| { + Ok(ReleaseRevAbstractNewRow { + release_rev: *rev_id, + abstract_sha1: match c.content { + Some(ref content) => Sha1::from(content).hexdigest(), + None => match c.sha1.clone() { + Some(v) => v, + None => bail!("either abstract_sha1 or content is required"), + }, + }, + lang: c.lang.clone(), + mimetype: c.mimetype.clone(), + }) + }) + .collect::>>()?; + release_abstract_rows.extend(new_release_abstract_rows); + } + } + + if !release_ref_rows.is_empty() { + insert_into(release_ref::table) + .values(release_ref_rows) + .execute(conn)?; + } + + if !release_contrib_rows.is_empty() { + insert_into(release_contrib::table) + .values(release_contrib_rows) + .execute(conn)?; + } + + if !abstract_rows.is_empty() { + // Sort of an "upsert"; only inserts new abstract rows if they don't already exist + insert_into(abstracts::table) + .values(&abstract_rows) + .on_conflict(abstracts::sha1) + .do_nothing() + .execute(conn)?; + insert_into(release_rev_abstract::table) + .values(release_abstract_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for WorkEntity { + type EditRow = WorkEditRow; + type EditNewRow = WorkEditNewRow; + type IdentRow = WorkIdentRow; + type IdentNewRow = WorkIdentNewRow; + type RevRow = WorkRevRow; + + generic_db_get!(work_ident, work_rev); + generic_db_get_rev!(work_rev); + generic_db_expand!(); + generic_db_create!(work_ident, work_edit); + generic_db_create_batch!(work_ident, work_edit); + generic_db_update!(work_ident, work_edit); + generic_db_delete!(work_ident, work_edit); + generic_db_get_history!(work_edit); + generic_db_get_edit!(work_edit); + generic_db_delete_edit!(work_edit); + generic_db_get_redirects!(work_ident); + generic_db_accept_edits_batch!("work", work_ident, work_edit); + generic_db_insert_rev!(); + + fn from_deleted_row(ident_row: Self::IdentRow) -> Result { + if ident_row.rev_id.is_some() { + bail!("called from_deleted_row with a non-deleted-state row") + } + + Ok(WorkEntity { + state: Some(ident_row.state().unwrap().shortname()), + ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()), + revision: ident_row.rev_id.map(|u| u.to_string()), + redirect: ident_row + .redirect_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + extra: None, + edit_extra: None, + }) + } + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option, + _hide: HideFlags, + ) -> Result { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + Ok(WorkEntity { + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + edit_extra: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result> { + let rev_ids: Vec = insert_into(work_rev::table) + .values( + models + .iter() + .map(|model| WorkRevNewRow { + extra_json: model.extra.clone(), + }) + .collect::>(), + ) + .returning(work_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} diff --git a/rust/src/errors.rs b/rust/src/errors.rs new file mode 100644 index 00000000..0b966e93 --- /dev/null +++ b/rust/src/errors.rs @@ -0,0 +1,55 @@ +//! Crate-specific Result, Error, and ErrorKind types (using `error_chain`) + +error_chain! { + foreign_links { Fmt(::std::fmt::Error); + Diesel(::diesel::result::Error); + R2d2(::diesel::r2d2::Error); + Uuid(::uuid::ParseError); + Io(::std::io::Error) #[cfg(unix)]; + Serde(::serde_json::Error); + Utf8Decode(::std::string::FromUtf8Error); + StringDecode(::data_encoding::DecodeError); + } + errors { + InvalidFatcatId(id: String) { + description("invalid fatcat identifier syntax") + display("invalid fatcat identifier (expect 26-char base32 encoded): {}", id) + } + MalformedExternalId(id: String) { + description("external identifier doesn't match required pattern") + display("external identifier doesn't match required pattern: {}", id) + } + MalformedChecksum(hash: String) { + description("checksum doesn't match required pattern (hex encoding)") + display("checksum doesn't match required pattern (hex encoding): {}", hash) + } + NotInControlledVocabulary(word: String) { + description("word or type not correct for controlled vocabulary") + display("word or type not correct for controlled vocabulary") + } + EditgroupAlreadyAccepted(id: String) { + description("editgroup was already accepted") + display("attempted to accept or mutate an editgroup which was already accepted: {}", id) + } + MissingOrMultipleExternalId(message: String) { + description("external identifiers missing or multiple specified") + display("external identifiers missing or multiple specified; please supply exactly one") + } + InvalidEntityStateTransform(message: String) { + description("Invalid Entity State Transform") + display("tried to mutate an entity which was not in an appropriate state: {}", message) + } + InvalidCredentials(message: String) { + description("auth token was missing, expired, revoked, or corrupt") + display("auth token was missing, expired, revoked, or corrupt: {}", message) + } + InsufficientPrivileges(message: String) { + description("editor account doesn't have authorization") + display("editor account doesn't have authorization: {}", message) + } + OtherBadRequest(message: String) { + description("catch-all error for bad or unallowed requests") + display("broke a constraint or made an otherwise invalid request: {}", message) + } + } +} diff --git a/rust/src/identifiers.rs b/rust/src/identifiers.rs new file mode 100644 index 00000000..adb9f413 --- /dev/null +++ b/rust/src/identifiers.rs @@ -0,0 +1,376 @@ +use crate::errors::*; +use data_encoding::BASE32_NOPAD; +use regex::Regex; +use serde_json; +use std::str::FromStr; +use uuid::Uuid; + +#[derive(Clone, Copy, PartialEq, Debug)] +pub struct FatCatId(Uuid); + +impl ToString for FatCatId { + fn to_string(&self) -> String { + uuid2fcid(&self.to_uuid()) + } +} + +impl FromStr for FatCatId { + type Err = Error; + fn from_str(s: &str) -> Result { + fcid2uuid(s).map(|u| FatCatId(u)) + } +} + +impl FatCatId { + pub fn to_uuid(&self) -> Uuid { + self.0 + } + // TODO: just make it u: Uuid and clone (not by ref) + pub fn from_uuid(u: &Uuid) -> FatCatId { + FatCatId(*u) + } +} + +/// Convert fatcat IDs (base32 strings) to UUID +pub fn fcid2uuid(fcid: &str) -> Result { + if fcid.len() != 26 { + return Err(ErrorKind::InvalidFatcatId(fcid.to_string()).into()); + } + let mut raw = vec![0; 16]; + BASE32_NOPAD + .decode_mut(fcid.to_uppercase().as_bytes(), &mut raw) + .map_err(|_dp| ErrorKind::InvalidFatcatId(fcid.to_string()))?; + // unwrap() is safe here, because we know raw is always 16 bytes + Ok(Uuid::from_bytes(&raw).unwrap()) +} + +/// Convert UUID to fatcat ID string (base32 encoded) +pub fn uuid2fcid(id: &Uuid) -> String { + let raw = id.as_bytes(); + BASE32_NOPAD.encode(raw).to_lowercase() +} + +pub fn check_username(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^[A-Za-z][A-Za-z0-9._-]{2,24}$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedExternalId(format!( + "not a valid username: '{}' (expected, eg, 'AcidBurn')", + raw + )) + .into()) + } +} + +#[test] +fn test_check_username() { + assert!(check_username("bnewbold").is_ok()); + assert!(check_username("BNEWBOLD").is_ok()); + assert!(check_username("admin").is_ok()); + assert!(check_username("friend-bot").is_ok()); + assert!(check_username("dog").is_ok()); + assert!(check_username("g_____").is_ok()); + assert!(check_username("bnewbold2-archive").is_ok()); + assert!(check_username("bnewbold2-internetarchive").is_ok()); + + assert!(check_username("").is_err()); + assert!(check_username("_").is_err()); + assert!(check_username("gg").is_err()); + assert!(check_username("adminadminadminadminadminadminadmin").is_err()); + assert!(check_username("bryan newbold").is_err()); + assert!(check_username("01234567-3456-6780").is_err()); + assert!(check_username(".admin").is_err()); + assert!(check_username("-bot").is_err()); +} + +pub fn check_pmcid(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^PMC\d+$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedExternalId(format!( + "not a valid PubMed Central ID (PMCID): '{}' (expected, eg, 'PMC12345')", + raw + )) + .into()) + } +} + +pub fn check_pmid(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^\d+$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedExternalId(format!( + "not a valid PubMed ID (PMID): '{}' (expected, eg, '1234')", + raw + )) + .into()) + } +} + +pub fn check_wikidata_qid(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^Q\d+$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedExternalId(format!( + "not a valid Wikidata QID: '{}' (expected, eg, 'Q1234')", + raw + )) + .into()) + } +} + +pub fn check_doi(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^10.\d{3,6}/.+$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedExternalId(format!( + "not a valid DOI: '{}' (expected, eg, '10.1234/aksjdfh')", + raw + )) + .into()) + } +} + +pub fn check_issn(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^\d{4}-\d{3}[0-9X]$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedExternalId(format!( + "not a valid ISSN: '{}' (expected, eg, '1234-5678')", + raw + )) + .into()) + } +} + +pub fn check_orcid(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^\d{4}-\d{4}-\d{4}-\d{3}[\dX]$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedExternalId(format!( + "not a valid ORCID: '{}' (expected, eg, '0123-4567-3456-6789')", + raw + )) + .into()) + } +} + +#[test] +fn test_check_orcid() { + assert!(check_orcid("0123-4567-3456-6789").is_ok()); + assert!(check_orcid("0123-4567-3456-678X").is_ok()); + assert!(check_orcid("01234567-3456-6780").is_err()); + assert!(check_orcid("0x23-4567-3456-6780").is_err()); +} + +pub fn check_md5(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^[a-f0-9]{32}$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedChecksum(format!( + "not a valid MD5: '{}' (expected lower-case hex, eg, '1b39813549077b2347c0f370c3864b40')", + raw + )) + .into()) + } +} + +#[test] +fn test_check_md5() { + assert!(check_md5("1b39813549077b2347c0f370c3864b40").is_ok()); + assert!(check_md5("1g39813549077b2347c0f370c3864b40").is_err()); + assert!(check_md5("1B39813549077B2347C0F370c3864b40").is_err()); + assert!(check_md5("1b39813549077b2347c0f370c3864b4").is_err()); + assert!(check_md5("1b39813549077b2347c0f370c3864b411").is_err()); +} + +pub fn check_sha1(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^[a-f0-9]{40}$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedChecksum(format!( + "not a valid SHA-1: '{}' (expected lower-case hex, eg, 'e9dd75237c94b209dc3ccd52722de6931a310ba3')", + raw + )) + .into()) + } +} + +#[test] +fn test_check_sha1() { + assert!(check_sha1("e9dd75237c94b209dc3ccd52722de6931a310ba3").is_ok()); + assert!(check_sha1("g9dd75237c94b209dc3ccd52722de6931a310ba3").is_err()); + assert!(check_sha1("e9DD75237C94B209DC3CCD52722de6931a310ba3").is_err()); + assert!(check_sha1("e9dd75237c94b209dc3ccd52722de6931a310ba").is_err()); + assert!(check_sha1("e9dd75237c94b209dc3ccd52722de6931a310ba33").is_err()); +} + +pub fn check_sha256(raw: &str) -> Result<()> { + lazy_static! { + static ref RE: Regex = Regex::new(r"^[a-f0-9]{64}$").unwrap(); + } + if RE.is_match(raw) { + Ok(()) + } else { + Err(ErrorKind::MalformedChecksum(format!( + "not a valid SHA-256: '{}' (expected lower-case hex, eg, 'cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452')", + raw + )) + .into()) + } +} + +#[test] +fn test_check_sha256() { + assert!( + check_sha256("cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452").is_ok() + ); + assert!( + check_sha256("gb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452").is_err() + ); + assert!( + check_sha256("UB1C378F464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452").is_err() + ); + assert!( + check_sha256("cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e45").is_err() + ); + assert!( + check_sha256("cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e4522").is_err() + ); +} + +pub fn check_release_type(raw: &str) -> Result<()> { + let valid_types = vec![ + // Citation Style Language official types + "article", + "article-magazine", + "article-newspaper", + "article-journal", + "bill", + "book", + "broadcast", + "chapter", + "dataset", + "entry", + "entry-dictionary", + "entry-encyclopedia", + "figure", + "graphic", + "interview", + "legislation", + "legal_case", + "manuscript", + "map", + "motion_picture", + "musical_score", + "pamphlet", + "paper-conference", + "patent", + "post", + "post-weblog", + "personal_communication", + "report", + "review", + "review-book", + "song", + "speech", + "thesis", + "treaty", + "webpage", + // fatcat-specific extensions + "peer_review", + "software", + "standard", + ]; + for good in valid_types { + if raw == good { + return Ok(()); + } + } + Err(ErrorKind::NotInControlledVocabulary(format!( + "not a valid release_type: '{}' (expected a CSL type, eg, 'article-journal', 'book')", + raw + )) + .into()) +} + +#[test] +fn test_check_release_type() { + assert!(check_release_type("book").is_ok()); + assert!(check_release_type("article-journal").is_ok()); + assert!(check_release_type("standard").is_ok()); + assert!(check_release_type("journal-article").is_err()); + assert!(check_release_type("BOOK").is_err()); + assert!(check_release_type("book ").is_err()); +} + +pub fn check_contrib_role(raw: &str) -> Result<()> { + let valid_types = vec![ + // Citation Style Language official role types + "author", + "collection-editor", + "composer", + "container-author", + "director", + "editor", + "editorial-director", + "editortranslator", + "illustrator", + "interviewer", + "original-author", + "recipient", + "reviewed-author", + "translator", + // common extension (for conference proceeding chair) + //"chair", + ]; + for good in valid_types { + if raw == good { + return Ok(()); + } + } + Err(ErrorKind::NotInControlledVocabulary(format!( + "not a valid contrib.role: '{}' (expected a CSL type, eg, 'author', 'editor')", + raw + )) + .into()) +} + +#[test] +fn test_check_contrib_role() { + assert!(check_contrib_role("author").is_ok()); + assert!(check_contrib_role("editor").is_ok()); + assert!(check_contrib_role("chair").is_err()); + assert!(check_contrib_role("EDITOR").is_err()); + assert!(check_contrib_role("editor ").is_err()); +} + +// TODO: make the above checks "more correct" +// TODO: check ISBN-13 diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 18550a5d..df3d6f51 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -1,202 +1,25 @@ #![allow(proc_macro_derive_resolution_fallback)] #![recursion_limit = "128"] -extern crate chrono; -extern crate fatcat_api_spec; -#[macro_use] -extern crate diesel; -extern crate diesel_migrations; -extern crate dotenv; -extern crate futures; -extern crate uuid; -#[macro_use] -extern crate hyper; -extern crate swagger; #[macro_use] extern crate error_chain; -extern crate iron; -extern crate serde_json; +#[macro_use] +extern crate diesel; #[macro_use] extern crate log; -extern crate data_encoding; -extern crate regex; #[macro_use] extern crate lazy_static; -extern crate macaroon; -extern crate sha1; -extern crate rand; -pub mod api_entity_crud; -pub mod api_helpers; -pub mod api_server; -pub mod api_wrappers; pub mod auth; pub mod database_models; -pub mod database_schema; - -pub mod errors { - // Create the Error, ErrorKind, ResultExt, and Result types - error_chain! { - foreign_links { Fmt(::std::fmt::Error); - Diesel(::diesel::result::Error); - R2d2(::diesel::r2d2::Error); - Uuid(::uuid::ParseError); - Io(::std::io::Error) #[cfg(unix)]; - Serde(::serde_json::Error); - Utf8Decode(::std::string::FromUtf8Error); - StringDecode(::data_encoding::DecodeError); - } - errors { - InvalidFatcatId(id: String) { - description("invalid fatcat identifier syntax") - display("invalid fatcat identifier (expect 26-char base32 encoded): {}", id) - } - MalformedExternalId(id: String) { - description("external identifier doesn't match required pattern") - display("external identifier doesn't match required pattern: {}", id) - } - MalformedChecksum(hash: String) { - description("checksum doesn't match required pattern (hex encoding)") - display("checksum doesn't match required pattern (hex encoding): {}", hash) - } - NotInControlledVocabulary(word: String) { - description("word or type not correct for controlled vocabulary") - display("word or type not correct for controlled vocabulary") - } - EditgroupAlreadyAccepted(id: String) { - description("editgroup was already accepted") - display("attempted to accept or mutate an editgroup which was already accepted: {}", id) - } - MissingOrMultipleExternalId(message: String) { - description("external identifiers missing or multiple specified") - display("external identifiers missing or multiple specified; please supply exactly one") - } - InvalidEntityStateTransform(message: String) { - description("Invalid Entity State Transform") - display("tried to mutate an entity which was not in an appropriate state: {}", message) - } - InvalidCredentials(message: String) { - description("auth token was missing, expired, revoked, or corrupt") - display("auth token was missing, expired, revoked, or corrupt: {}", message) - } - InsufficientPrivileges(message: String) { - description("editor account doesn't have authorization") - display("editor account doesn't have authorization: {}", message) - } - OtherBadRequest(message: String) { - description("catch-all error for bad or unallowed requests") - display("broke a constraint or made an otherwise invalid request: {}", message) - } - } - } -} - -#[doc(hidden)] -pub use crate::errors::*; - -pub use self::errors::*; -use crate::auth::AuthConfectionary; -use diesel::pg::PgConnection; -use diesel::r2d2::ConnectionManager; -use dotenv::dotenv; -use iron::middleware::AfterMiddleware; -use iron::{Request, Response}; -use std::{env, thread, time}; -use std::process::Command; -use rand::Rng; - -#[cfg(feature = "postgres")] -embed_migrations!("../migrations/"); - -pub type ConnectionPool = diesel::r2d2::Pool>; - -/// Instantiate a new API server with a pooled database connection -pub fn database_worker_pool() -> Result { - dotenv().ok(); - let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set"); - let manager = ConnectionManager::::new(database_url); - let pool = diesel::r2d2::Pool::builder() - .build(manager) - .expect("Failed to create database pool."); - Ok(pool) -} - -pub fn env_confectionary() -> Result { - let auth_location = env::var("AUTH_LOCATION").expect("AUTH_LOCATION must be set"); - let auth_key = env::var("AUTH_SECRET_KEY").expect("AUTH_SECRET_KEY must be set"); - let auth_key_ident = env::var("AUTH_KEY_IDENT").expect("AUTH_KEY_IDENT must be set"); - info!("Loaded primary auth key: {}", auth_key_ident); - let mut confectionary = AuthConfectionary::new(auth_location, auth_key_ident, auth_key)?; - match env::var("AUTH_ALT_KEYS") { - Ok(var) => { - for pair in var.split(",") { - let pair: Vec<&str> = pair.split(":").collect(); - if pair.len() != 2 { - println!("{:#?}", pair); - bail!("couldn't parse keypair from AUTH_ALT_KEYS (expected 'ident:key' pairs separated by commas)"); - } - info!("Loading alt auth key: {}", pair[0]); - confectionary.add_keypair(pair[0].to_string(), pair[1].to_string())?; - } - } - Err(_) => (), - } - Ok(confectionary) -} - -/// Instantiate a new API server with a pooled database connection -pub fn server() -> Result { - dotenv().ok(); - let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set"); - let manager = ConnectionManager::::new(database_url); - let pool = diesel::r2d2::Pool::builder() - .build(manager) - .expect("Failed to create database pool."); - let confectionary = env_confectionary()?; - Ok(api_server::Server { - db_pool: pool, - auth_confectionary: confectionary, - }) -} - -/// Generates a server for testing. Calls an external bash script to generate a random postgres -/// database, which will be unique to this process but common across threads and connections. The -/// database will automagically get cleaned up (deleted) after 60 seconds. -/// Currently, start times are staggered by up to 200ms to prevent internal postgres concurrency -/// errors; if this fails run the tests serially (one at a time), which is slower but more robust. -/// CI should run tests serially. -pub fn test_server() -> Result { - dotenv().ok(); - // sleep a bit so we don't have thundering herd collisions, resuliting in - // "pg_extension_name_index" or "pg_proc_proname_args_nsp_index" or "pg_type_typname_nsp_index" - // duplicate key violations. - thread::sleep(time::Duration::from_millis(rand::thread_rng().gen_range(0, 200))); - let pg_tmp = Command::new("./tests/pg_tmp.sh") - .output() - .expect("run ./tests/pg_tmp.sh to get temporary postgres DB"); - let database_url = String::from_utf8_lossy(&pg_tmp.stdout).to_string(); - env::set_var("DATABASE_URL", database_url); - - let mut server = server()?; - server.auth_confectionary = AuthConfectionary::new_dummy(); - let conn = server.db_pool.get().expect("db_pool error"); - - // run migrations; this is a fresh/bare database - diesel_migrations::run_pending_migrations(&conn).unwrap(); - Ok(server) -} - -// TODO: move this to bin/fatcatd - -/// HTTP header middleware -header! { (XClacksOverhead, "X-Clacks-Overhead") => [String] } - -pub struct XClacksOverheadMiddleware; - -impl AfterMiddleware for XClacksOverheadMiddleware { - fn after(&self, _req: &mut Request, mut res: Response) -> iron::IronResult { - res.headers - .set(XClacksOverhead("GNU aaronsw, jpb".to_owned())); - Ok(res) - } -} +pub mod database_schema; // only public for tests +pub mod editing; +mod endpoint_handlers; +mod endpoints; +pub mod entity_crud; +pub mod errors; +pub mod identifiers; +pub mod server; + +// TODO: will probably remove these as a public export? +pub use crate::server::{create_server, create_test_server}; diff --git a/rust/src/server.rs b/rust/src/server.rs new file mode 100644 index 00000000..70e667be --- /dev/null +++ b/rust/src/server.rs @@ -0,0 +1,81 @@ +//! API endpoint handlers + +use crate::auth::*; +use crate::errors::*; +use chrono; +use diesel; +use diesel::pg::PgConnection; +use diesel::r2d2::ConnectionManager; +use dotenv::dotenv; +use rand::Rng; +use std::process::Command; +use std::{env, thread, time}; + +#[cfg(feature = "postgres")] +embed_migrations!("../migrations/"); + +pub type ConnectionPool = diesel::r2d2::Pool>; + +pub type DbConn = + diesel::r2d2::PooledConnection>; + +/// Instantiate a new API server with a pooled database connection +pub fn database_worker_pool() -> Result { + dotenv().ok(); + let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set"); + let manager = ConnectionManager::::new(database_url); + let pool = diesel::r2d2::Pool::builder() + .build(manager) + .expect("Failed to create database pool."); + Ok(pool) +} + +#[derive(Clone)] +pub struct Server { + pub db_pool: ConnectionPool, + pub auth_confectionary: AuthConfectionary, +} + +/// Instantiate a new API server with a pooled database connection +pub fn create_server() -> Result { + dotenv().ok(); + let database_url = env::var("DATABASE_URL").expect("DATABASE_URL must be set"); + let manager = ConnectionManager::::new(database_url); + let pool = diesel::r2d2::Pool::builder() + .build(manager) + .expect("Failed to create database pool."); + let confectionary = env_confectionary()?; + Ok(Server { + db_pool: pool, + auth_confectionary: confectionary, + }) +} + +/// Generates a server for testing. Calls an external bash script to generate a random postgres +/// database, which will be unique to this process but common across threads and connections. The +/// database will automagically get cleaned up (deleted) after 60 seconds. +/// Currently, start times are staggered by up to 200ms to prevent internal postgres concurrency +/// errors; if this fails run the tests serially (one at a time), which is slower but more robust. +/// CI should run tests serially. +pub fn create_test_server() -> Result { + dotenv().ok(); + // sleep a bit so we don't have thundering herd collisions, resuliting in + // "pg_extension_name_index" or "pg_proc_proname_args_nsp_index" or "pg_type_typname_nsp_index" + // duplicate key violations. + thread::sleep(time::Duration::from_millis( + rand::thread_rng().gen_range(0, 200), + )); + let pg_tmp = Command::new("./tests/pg_tmp.sh") + .output() + .expect("run ./tests/pg_tmp.sh to get temporary postgres DB"); + let database_url = String::from_utf8_lossy(&pg_tmp.stdout).to_string(); + env::set_var("DATABASE_URL", database_url); + + let mut server = create_server()?; + server.auth_confectionary = AuthConfectionary::new_dummy(); + let conn = server.db_pool.get().expect("db_pool error"); + + // run migrations; this is a fresh/bare database + diesel_migrations::run_pending_migrations(&conn).unwrap(); + Ok(server) +} diff --git a/rust/tests/helpers.rs b/rust/tests/helpers.rs index f5624dff..2ba94a5c 100644 --- a/rust/tests/helpers.rs +++ b/rust/tests/helpers.rs @@ -1,17 +1,12 @@ -extern crate diesel; -extern crate fatcat; -extern crate fatcat_api_spec; -extern crate iron; -extern crate iron_test; -extern crate uuid; - -use self::iron_test::response; -use fatcat::api_helpers::FatCatId; +use fatcat::auth::MacaroonAuthMiddleware; +use fatcat::identifiers::FatCatId; +use fatcat::server; use fatcat_api_spec::client::Client; use fatcat_api_spec::Context; use iron::headers::{Authorization, Bearer, ContentType}; use iron::mime::Mime; use iron::{status, Chain, Headers, Iron, Listening}; +use iron_test::response; use std::str::FromStr; // A current problem with this method is that if the test fails (eg, panics, assert fails), the @@ -20,7 +15,7 @@ use std::str::FromStr; // cleanup. #[allow(dead_code)] pub fn setup_client() -> (Client, Context, Listening) { - let server = fatcat::test_server().unwrap(); + let server = server::create_test_server().unwrap(); // setup auth as admin user let admin_id = FatCatId::from_str("aaaaaaaaaaaabkvkaaaaaaaaae").unwrap(); @@ -37,7 +32,7 @@ pub fn setup_client() -> (Client, Context, Listening) { let router = fatcat_api_spec::router(server); let mut chain = Chain::new(router); chain.link_before(fatcat_api_spec::server::ExtractAuthData); - chain.link_before(fatcat::auth::MacaroonAuthMiddleware::new()); + chain.link_before(MacaroonAuthMiddleware::new()); let mut iron_server = Iron::new(chain); iron_server.threads = 1; @@ -56,7 +51,7 @@ pub fn setup_http() -> ( iron::middleware::Chain, diesel::r2d2::PooledConnection>, ) { - let server = fatcat::test_server().unwrap(); + let server = fatcat::create_test_server().unwrap(); let conn = server.db_pool.get().expect("db_pool error"); // setup auth as admin user @@ -69,7 +64,7 @@ pub fn setup_http() -> ( let router = fatcat_api_spec::router(server); let mut chain = Chain::new(router); chain.link_before(fatcat_api_spec::server::ExtractAuthData); - chain.link_before(fatcat::auth::MacaroonAuthMiddleware::new()); + chain.link_before(MacaroonAuthMiddleware::new()); let mut headers = Headers::new(); let mime: Mime = "application/json".parse().unwrap(); headers.set(ContentType(mime)); diff --git a/rust/tests/test_api_server_client.rs b/rust/tests/test_api_server_client.rs index 3effc0a3..96d8d924 100644 --- a/rust/tests/test_api_server_client.rs +++ b/rust/tests/test_api_server_client.rs @@ -9,11 +9,6 @@ * middleware. */ -extern crate fatcat; -extern crate fatcat_api_spec; -extern crate iron; -extern crate uuid; - use fatcat_api_spec::{ApiNoContext, ContextWrapperExt, Future}; mod helpers; diff --git a/rust/tests/test_api_server_http.rs b/rust/tests/test_api_server_http.rs index f84e7e1f..2ea01658 100644 --- a/rust/tests/test_api_server_http.rs +++ b/rust/tests/test_api_server_http.rs @@ -6,16 +6,10 @@ * test basic serialization/deserialization, and take advantage of hard-coded example entities. */ -extern crate diesel; -extern crate fatcat; -extern crate fatcat_api_spec; -extern crate iron; -extern crate iron_test; -extern crate uuid; - use diesel::prelude::*; -use fatcat::api_helpers::*; use fatcat::database_schema::*; +use fatcat::editing::get_or_create_editgroup; +use fatcat::identifiers::*; use iron::status; use iron_test::request; use uuid::Uuid; diff --git a/rust/tests/test_auth.rs b/rust/tests/test_auth.rs index 82d9f981..d93051f2 100644 --- a/rust/tests/test_auth.rs +++ b/rust/tests/test_auth.rs @@ -1,16 +1,13 @@ -extern crate chrono; -extern crate fatcat; -extern crate uuid; - -use fatcat::api_helpers::*; -use fatcat::auth::*; +use fatcat::auth::AuthConfectionary; +use fatcat::identifiers::FatCatId; +use fatcat::{auth, server}; use std::str::FromStr; #[test] fn test_macaroons() { // Test everything we can without connecting to database - let c = fatcat::auth::AuthConfectionary::new_dummy(); + let c = AuthConfectionary::new_dummy(); let editor_id = FatCatId::from_str("q3nouwy3nnbsvo3h5klxsx4a7y").unwrap(); // create token w/o expiration @@ -25,9 +22,9 @@ fn test_macaroons() { fn test_auth_db() { // Test things that require database - let server = fatcat::test_server().unwrap(); + let server = server::create_test_server().unwrap(); let conn = server.db_pool.get().expect("db_pool error"); - let c = fatcat::auth::AuthConfectionary::new_dummy(); + let c = AuthConfectionary::new_dummy(); let editor_id = FatCatId::from_str("aaaaaaaaaaaabkvkaaaaaaaaae").unwrap(); // create token @@ -38,7 +35,7 @@ fn test_auth_db() { assert_eq!(editor_row.id, editor_id.to_uuid()); // revoke token - revoke_tokens(&conn, editor_id).unwrap(); + auth::revoke_tokens(&conn, editor_id).unwrap(); // verification should fail // XXX: one-second slop breaks this diff --git a/rust/tests/test_fcid.rs b/rust/tests/test_fcid.rs index 4feaef5d..aac27129 100644 --- a/rust/tests/test_fcid.rs +++ b/rust/tests/test_fcid.rs @@ -1,7 +1,7 @@ extern crate fatcat; extern crate uuid; -use fatcat::api_helpers::{fcid2uuid, uuid2fcid}; +use fatcat::identifiers::{fcid2uuid, uuid2fcid}; use uuid::Uuid; #[test] diff --git a/rust/tests/test_old_python_tests.rs b/rust/tests/test_old_python_tests.rs index d607fa42..0676a604 100644 --- a/rust/tests/test_old_python_tests.rs +++ b/rust/tests/test_old_python_tests.rs @@ -4,11 +4,6 @@ * a single editgroup. */ -extern crate fatcat; -extern crate fatcat_api_spec; -extern crate iron; -extern crate uuid; - use fatcat_api_spec::models::*; use fatcat_api_spec::*; -- cgit v1.2.3