aboutsummaryrefslogtreecommitdiffstats
path: root/rust/src/api_server.rs
diff options
context:
space:
mode:
Diffstat (limited to 'rust/src/api_server.rs')
-rw-r--r--rust/src/api_server.rs858
1 files changed, 224 insertions, 634 deletions
diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs
index b445d63a..31b71395 100644
--- a/rust/src/api_server.rs
+++ b/rust/src/api_server.rs
@@ -2,6 +2,7 @@
use api_helpers::*;
use chrono;
+use database_entity_crud::{EditContext, EntityCrud};
use database_models::*;
use database_schema::{
abstracts, changelog, container_edit, container_ident, container_rev, creator_edit,
@@ -14,54 +15,34 @@ use diesel::{self, insert_into};
use errors::*;
use fatcat_api::models;
use fatcat_api::models::*;
-use sha1::Sha1;
+use std::str::FromStr;
use uuid::Uuid;
use ConnectionPool;
-type DbConn = diesel::r2d2::PooledConnection<diesel::r2d2::ConnectionManager<diesel::PgConnection>>;
-
macro_rules! entity_batch_handler {
- ($post_handler:ident, $post_batch_handler:ident, $model:ident) => {
+ ($post_batch_handler:ident, $model:ident) => {
pub fn $post_batch_handler(
&self,
entity_list: &[models::$model],
+ autoaccept: bool,
+ editgroup: Option<String>,
conn: &DbConn,
) -> Result<Vec<EntityEdit>> {
- let mut ret: Vec<EntityEdit> = vec![];
- for entity in entity_list {
- ret.push(self.$post_handler(entity.clone(), conn)?);
- }
- Ok(ret)
- }
- }
-}
-
-macro_rules! entity_history_handler {
- ($history_handler:ident, $edit_row_type:ident, $edit_table:ident) => {
- pub fn $history_handler(
- &self,
- id: &Uuid,
- limit: Option<i64>,
- conn: &DbConn,
- ) -> Result<Vec<EntityHistoryEntry>> {
- let limit = limit.unwrap_or(50);
-
- let rows: Vec<(EditgroupRow, ChangelogRow, $edit_row_type)> = editgroup::table
- .inner_join(changelog::table)
- .inner_join($edit_table::table)
- .filter($edit_table::ident_id.eq(id))
- .order(changelog::id.desc())
- .limit(limit)
- .get_results(conn)?;
- let history: Vec<EntityHistoryEntry> = rows.into_iter()
- .map(|(eg_row, cl_row, e_row)| EntityHistoryEntry {
- edit: e_row.into_model().expect("edit row to model"),
- editgroup: eg_row.into_model_partial(),
- changelog_entry: cl_row.into_model(),
- })
- .collect();
- Ok(history)
+ let editgroup_id: Option<FatCatId> = match editgroup {
+ Some(s) => Some(FatCatId::from_str(&s)?),
+ None => None,
+ };
+ let edit_context = make_edit_context(conn, editgroup_id.clone(), autoaccept)?;
+ let model_list: Vec<&models::$model> = entity_list.iter().map(|e| e).collect();
+ let edits = $model::db_create_batch(conn, &edit_context, model_list.as_slice())?;
+
+ if autoaccept {
+ let _clr: ChangelogRow = diesel::insert_into(changelog::table)
+ .values((changelog::editgroup_id.eq(edit_context.editgroup_id.to_uuid()),))
+ .get_result(conn)?;
+ }
+ edits.into_iter().map(|e| e.into_model()).collect()
}
}
}
@@ -77,224 +58,30 @@ macro_rules! count_entity {
}};
}
-#[derive(Clone)]
-pub struct Server {
- pub db_pool: ConnectionPool,
-}
-
-fn container_row2entity(
- ident: Option<ContainerIdentRow>,
- rev: ContainerRevRow,
-) -> Result<ContainerEntity> {
- let (state, ident_id, redirect_id) = match ident {
- Some(i) => (
- Some(i.state().unwrap().shortname()),
- Some(uuid2fcid(&i.id)),
- i.redirect_id.map(|u| uuid2fcid(&u)),
- ),
- None => (None, None, None),
+fn make_edit_context(conn: &DbConn, editgroup_id: Option<FatCatId>, autoaccept: bool) -> Result<EditContext> {
+ let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth
+ let editgroup_id: FatCatId = match (editgroup_id, autoaccept) {
+ (Some(eg), _) => eg,
+ // If autoaccept and no editgroup_id passed, always create a new one for this transaction
+ (None, true) => {
+ let eg_row: EditgroupRow = diesel::insert_into(editgroup::table)
+ .values((editgroup::editor_id.eq(editor_id),))
+ .get_result(conn)?;
+ FatCatId::from_uuid(&eg_row.id)
+ },
+ (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?),
};
- Ok(ContainerEntity {
- issnl: rev.issnl,
- wikidata_qid: rev.wikidata_qid,
- publisher: rev.publisher,
- name: rev.name,
- abbrev: rev.abbrev,
- coden: rev.coden,
- state: state,
- ident: ident_id,
- revision: Some(rev.id.to_string()),
- redirect: redirect_id,
- extra: rev.extra_json,
- editgroup_id: None,
- })
-}
-
-fn creator_row2entity(ident: Option<CreatorIdentRow>, rev: CreatorRevRow) -> Result<CreatorEntity> {
- let (state, ident_id, redirect_id) = match ident {
- Some(i) => (
- Some(i.state().unwrap().shortname()),
- Some(uuid2fcid(&i.id)),
- i.redirect_id.map(|u| uuid2fcid(&u)),
- ),
- None => (None, None, None),
- };
- Ok(CreatorEntity {
- display_name: rev.display_name,
- given_name: rev.given_name,
- surname: rev.surname,
- orcid: rev.orcid,
- wikidata_qid: rev.wikidata_qid,
- state: state,
- ident: ident_id,
- revision: Some(rev.id.to_string()),
- redirect: redirect_id,
- editgroup_id: None,
- extra: rev.extra_json,
- })
-}
-
-fn file_row2entity(
- ident: Option<FileIdentRow>,
- rev: FileRevRow,
- conn: &DbConn,
-) -> Result<FileEntity> {
- let (state, ident_id, redirect_id) = match ident {
- Some(i) => (
- Some(i.state().unwrap().shortname()),
- Some(uuid2fcid(&i.id)),
- i.redirect_id.map(|u| uuid2fcid(&u)),
- ),
- None => (None, None, None),
- };
-
- let releases: Vec<String> = file_release::table
- .filter(file_release::file_rev.eq(rev.id))
- .get_results(conn)?
- .into_iter()
- .map(|r: FileReleaseRow| uuid2fcid(&r.target_release_ident_id))
- .collect();
-
- let urls: Vec<FileEntityUrls> = file_rev_url::table
- .filter(file_rev_url::file_rev.eq(rev.id))
- .get_results(conn)?
- .into_iter()
- .map(|r: FileRevUrlRow| FileEntityUrls {
- rel: r.rel,
- url: r.url,
- })
- .collect();
-
- Ok(FileEntity {
- sha1: rev.sha1,
- sha256: rev.sha256,
- md5: rev.md5,
- size: rev.size.map(|v| v as i64),
- urls: Some(urls),
- mimetype: rev.mimetype,
- releases: Some(releases),
- state: state,
- ident: ident_id,
- revision: Some(rev.id.to_string()),
- redirect: redirect_id,
- editgroup_id: None,
- extra: rev.extra_json,
+ Ok(EditContext {
+ editor_id: FatCatId::from_uuid(&editor_id),
+ editgroup_id: editgroup_id,
+ extra_json: None,
+ autoaccept: autoaccept,
})
}
-fn release_row2entity(
- ident: Option<ReleaseIdentRow>,
- rev: ReleaseRevRow,
- conn: &DbConn,
-) -> Result<ReleaseEntity> {
- let (state, ident_id, redirect_id) = match ident {
- Some(i) => (
- Some(i.state().unwrap().shortname()),
- Some(uuid2fcid(&i.id)),
- i.redirect_id.map(|u| uuid2fcid(&u)),
- ),
- None => (None, None, None),
- };
-
- let refs: Vec<ReleaseRef> = release_ref::table
- .filter(release_ref::release_rev.eq(rev.id))
- .order(release_ref::index_val.asc())
- .get_results(conn)
- .expect("fetch release refs")
- .into_iter()
- .map(|r: ReleaseRefRow| ReleaseRef {
- index: r.index_val,
- key: r.key,
- extra: r.extra_json,
- container_title: r.container_title,
- year: r.year,
- title: r.title,
- locator: r.locator,
- target_release_id: r.target_release_ident_id.map(|v| uuid2fcid(&v)),
- })
- .collect();
-
- let contribs: Vec<ReleaseContrib> = release_contrib::table
- .filter(release_contrib::release_rev.eq(rev.id))
- .order((release_contrib::role.asc(), release_contrib::index_val.asc()))
- .get_results(conn)
- .expect("fetch release refs")
- .into_iter()
- .map(|c: ReleaseContribRow| ReleaseContrib {
- index: c.index_val,
- raw_name: c.raw_name,
- role: c.role,
- extra: c.extra_json,
- creator_id: c.creator_ident_id.map(|v| uuid2fcid(&v)),
- creator: None,
- })
- .collect();
-
- let abstracts: Vec<ReleaseEntityAbstracts> = release_rev_abstract::table
- .inner_join(abstracts::table)
- .filter(release_rev_abstract::release_rev.eq(rev.id))
- .get_results(conn)?
- .into_iter()
- .map(
- |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts {
- sha1: Some(r.0.abstract_sha1),
- mimetype: r.0.mimetype,
- lang: r.0.lang,
- content: Some(r.1.content),
- },
- )
- .collect();
-
- Ok(ReleaseEntity {
- title: rev.title,
- release_type: rev.release_type,
- release_status: rev.release_status,
- release_date: rev.release_date
- .map(|v| chrono::DateTime::from_utc(v.and_hms(0, 0, 0), chrono::Utc)),
- doi: rev.doi,
- pmid: rev.pmid,
- pmcid: rev.pmcid,
- isbn13: rev.isbn13,
- core_id: rev.core_id,
- wikidata_qid: rev.wikidata_qid,
- volume: rev.volume,
- issue: rev.issue,
- pages: rev.pages,
- files: None,
- container: None,
- container_id: rev.container_ident_id.map(|u| uuid2fcid(&u)),
- publisher: rev.publisher,
- language: rev.language,
- work_id: Some(uuid2fcid(&rev.work_ident_id)),
- refs: Some(refs),
- contribs: Some(contribs),
- abstracts: Some(abstracts),
- state: state,
- ident: ident_id,
- revision: Some(rev.id.to_string()),
- redirect: redirect_id,
- editgroup_id: None,
- extra: rev.extra_json,
- })
-}
-
-fn work_row2entity(ident: Option<WorkIdentRow>, rev: WorkRevRow) -> Result<WorkEntity> {
- let (state, ident_id, redirect_id) = match ident {
- Some(i) => (
- Some(i.state().unwrap().shortname()),
- Some(uuid2fcid(&i.id)),
- i.redirect_id.map(|u| uuid2fcid(&u)),
- ),
- None => (None, None, None),
- };
- Ok(WorkEntity {
- state: state,
- ident: ident_id,
- revision: Some(rev.id.to_string()),
- redirect: redirect_id,
- editgroup_id: None,
- extra: rev.extra_json,
- })
+#[derive(Clone)]
+pub struct Server {
+ pub db_pool: ConnectionPool,
}
impl Server {
@@ -304,13 +91,7 @@ impl Server {
_expand: Option<String>,
conn: &DbConn,
) -> Result<ContainerEntity> {
- // TODO: handle Deletions
- let (ident, rev): (ContainerIdentRow, ContainerRevRow) = container_ident::table
- .find(id)
- .inner_join(container_rev::table)
- .first(conn)?;
-
- container_row2entity(Some(ident), rev)
+ ContainerEntity::db_get(conn, FatCatId::from_uuid(id))
}
pub fn lookup_container_handler(&self, issnl: &str, conn: &DbConn) -> Result<ContainerEntity> {
@@ -318,11 +99,14 @@ impl Server {
let (ident, rev): (ContainerIdentRow, ContainerRevRow) = container_ident::table
.inner_join(container_rev::table)
.filter(container_rev::issnl.eq(issnl))
+ // This NOT NULL is here to ensure the postgresql query planner that it can use an
+ // index
+ .filter(container_rev::issnl.is_not_null())
.filter(container_ident::is_live.eq(true))
.filter(container_ident::redirect_id.is_null())
.first(conn)?;
- container_row2entity(Some(ident), rev)
+ ContainerEntity::db_from_row(conn, rev, Some(ident))
}
pub fn get_creator_handler(
@@ -331,12 +115,7 @@ impl Server {
_expand: Option<String>,
conn: &DbConn,
) -> Result<CreatorEntity> {
- let (ident, rev): (CreatorIdentRow, CreatorRevRow) = creator_ident::table
- .find(id)
- .inner_join(creator_rev::table)
- .first(conn)?;
-
- creator_row2entity(Some(ident), rev)
+ CreatorEntity::db_get(conn, FatCatId::from_uuid(id))
}
pub fn lookup_creator_handler(&self, orcid: &str, conn: &DbConn) -> Result<CreatorEntity> {
@@ -344,11 +123,14 @@ impl Server {
let (ident, rev): (CreatorIdentRow, CreatorRevRow) = creator_ident::table
.inner_join(creator_rev::table)
.filter(creator_rev::orcid.eq(orcid))
+ // This NOT NULL is here to ensure the postgresql query planner that it can use an
+ // index
+ .filter(creator_rev::orcid.is_not_null())
.filter(creator_ident::is_live.eq(true))
.filter(creator_ident::redirect_id.is_null())
.first(conn)?;
- creator_row2entity(Some(ident), rev)
+ CreatorEntity::db_from_row(conn, rev, Some(ident))
}
pub fn get_creator_releases_handler(
@@ -367,8 +149,9 @@ impl Server {
.filter(release_ident::redirect_id.is_null())
.load(conn)?;
+ // TODO: from_rows, not from_row?
rows.into_iter()
- .map(|(rev, ident, _)| release_row2entity(Some(ident), rev, conn))
+ .map(|(rev, ident, _)| ReleaseEntity::db_from_row(conn, rev, Some(ident)))
.collect()
}
@@ -378,23 +161,21 @@ impl Server {
_expand: Option<String>,
conn: &DbConn,
) -> Result<FileEntity> {
- let (ident, rev): (FileIdentRow, FileRevRow) = file_ident::table
- .find(id)
- .inner_join(file_rev::table)
- .first(conn)?;
-
- file_row2entity(Some(ident), rev, conn)
+ FileEntity::db_get(conn, FatCatId::from_uuid(id))
}
pub fn lookup_file_handler(&self, sha1: &str, conn: &DbConn) -> Result<FileEntity> {
let (ident, rev): (FileIdentRow, FileRevRow) = file_ident::table
.inner_join(file_rev::table)
.filter(file_rev::sha1.eq(sha1))
+ // This NOT NULL is here to ensure the postgresql query planner that it can use an
+ // index
+ .filter(file_rev::sha1.is_not_null())
.filter(file_ident::is_live.eq(true))
.filter(file_ident::redirect_id.is_null())
.first(conn)?;
- file_row2entity(Some(ident), rev, conn)
+ FileEntity::db_from_row(conn, rev, Some(ident))
}
pub fn get_release_handler(
@@ -403,12 +184,7 @@ impl Server {
expand: Option<String>,
conn: &DbConn,
) -> Result<ReleaseEntity> {
- let (ident, rev): (ReleaseIdentRow, ReleaseRevRow) = release_ident::table
- .find(id)
- .inner_join(release_rev::table)
- .first(conn)?;
-
- let mut release = release_row2entity(Some(ident), rev, conn)?;
+ let mut release = ReleaseEntity::db_get(conn, FatCatId::from_uuid(id))?;
// For now, if there is any expand param we do them all
if expand.is_some() {
@@ -419,7 +195,6 @@ impl Server {
Some(self.get_container_handler(&fcid2uuid(&cid)?, None, conn)?);
}
}
-
Ok(release)
}
@@ -428,26 +203,29 @@ impl Server {
let (ident, rev): (ReleaseIdentRow, ReleaseRevRow) = release_ident::table
.inner_join(release_rev::table)
.filter(release_rev::doi.eq(doi))
+ // This NOT NULL is here to ensure the postgresql query planner that it can use an
+ // index
+ .filter(release_rev::doi.is_not_null())
.filter(release_ident::is_live.eq(true))
.filter(release_ident::redirect_id.is_null())
.first(conn)?;
- release_row2entity(Some(ident), rev, conn)
+ ReleaseEntity::db_from_row(conn, rev, Some(ident))
}
pub fn get_release_files_handler(&self, id: &str, conn: &DbConn) -> Result<Vec<FileEntity>> {
- let id = fcid2uuid(&id)?;
+ let ident = FatCatId::from_str(id)?;
let rows: Vec<(FileRevRow, FileIdentRow, FileReleaseRow)> = file_rev::table
.inner_join(file_ident::table)
.inner_join(file_release::table)
- .filter(file_release::target_release_ident_id.eq(&id))
+ .filter(file_release::target_release_ident_id.eq(&ident.to_uuid()))
.filter(file_ident::is_live.eq(true))
.filter(file_ident::redirect_id.is_null())
.load(conn)?;
rows.into_iter()
- .map(|(rev, ident, _)| file_row2entity(Some(ident), rev, conn))
+ .map(|(rev, ident, _)| FileEntity::db_from_row(conn, rev, Some(ident)))
.collect()
}
@@ -457,12 +235,7 @@ impl Server {
_expand: Option<String>,
conn: &DbConn,
) -> Result<WorkEntity> {
- let (ident, rev): (WorkIdentRow, WorkRevRow) = work_ident::table
- .find(id)
- .inner_join(work_rev::table)
- .first(conn)?;
-
- work_row2entity(Some(ident), rev)
+ WorkEntity::db_get(conn, FatCatId::from_uuid(id))
}
pub fn get_work_releases_handler(&self, id: &str, conn: &DbConn) -> Result<Vec<ReleaseEntity>> {
@@ -476,7 +249,7 @@ impl Server {
.load(conn)?;
rows.into_iter()
- .map(|(rev, ident)| release_row2entity(Some(ident), rev, conn))
+ .map(|(rev, ident)| ReleaseEntity::db_from_row(conn, rev, Some(ident)))
.collect()
}
@@ -485,37 +258,29 @@ impl Server {
entity: models::ContainerEntity,
conn: &DbConn,
) -> Result<EntityEdit> {
- let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth
- let editgroup_id: Uuid = match entity.editgroup_id {
- None => get_or_create_editgroup(editor_id, conn)?,
- Some(param) => fcid2uuid(&param)?,
- };
- if let Some(ref extid) = entity.wikidata_qid {
- check_wikidata_qid(extid)?;
- }
- if let Some(ref extid) = entity.issnl {
- check_issn(extid)?;
- }
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_create(conn, &edit_context)?;
+ edit.into_model()
+ }
- let rev_id: Uuid = insert_into(container_rev::table)
- .values((container_rev::name.eq(entity.name),
- container_rev::publisher.eq(entity.publisher),
- container_rev::issnl.eq(entity.issnl),
- container_rev::wikidata_qid.eq(entity.wikidata_qid),
- container_rev::abbrev.eq(entity.abbrev),
- container_rev::coden.eq(entity.coden),
- container_rev::extra_json.eq(entity.extra)))
- .returning(container_rev::id)
- .get_result(conn)?;
- let ident_id: Uuid = insert_into(container_ident::table)
- .values(container_ident::rev_id.eq(rev_id))
- .returning(container_ident::id)
- .get_result(conn)?;
- let edit: ContainerEditRow = insert_into(container_edit::table)
- .values((container_edit::editgroup_id.eq(editgroup_id),
- container_edit::ident_id.eq(ident_id),
- container_edit::rev_id.eq(rev_id)))
- .get_result(conn)?;
+ pub fn update_container_handler(
+ &self,
+ id: &Uuid,
+ entity: models::ContainerEntity,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?;
+ edit.into_model()
+ }
+ pub fn delete_container_handler(
+ &self,
+ id: &Uuid,
+ editgroup_id: Option<Uuid>,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?;
+ let edit = ContainerEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?;
edit.into_model()
}
@@ -524,37 +289,29 @@ impl Server {
entity: models::CreatorEntity,
conn: &DbConn,
) -> Result<EntityEdit> {
- let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth
- let editgroup_id = match entity.editgroup_id {
- None => get_or_create_editgroup(editor_id, conn).expect("current editgroup"),
- Some(param) => fcid2uuid(&param)?,
- };
- if let Some(ref extid) = entity.orcid {
- check_orcid(extid)?;
- }
- if let Some(ref extid) = entity.wikidata_qid {
- check_wikidata_qid(extid)?;
- }
-
- let rev_id: Uuid = insert_into(creator_rev::table)
- .values((creator_rev::display_name.eq(entity.display_name),
- creator_rev::given_name.eq(entity.given_name),
- creator_rev::surname.eq(entity.surname),
- creator_rev::orcid.eq(entity.orcid),
- creator_rev::wikidata_qid.eq(entity.wikidata_qid),
- creator_rev::extra_json.eq(entity.extra)))
- .returning(creator_rev::id)
- .get_result(conn)?;
- let ident_id: Uuid = insert_into(creator_ident::table)
- .values(creator_ident::rev_id.eq(rev_id))
- .returning(creator_ident::id)
- .get_result(conn)?;
- let edit: CreatorEditRow = insert_into(creator_edit::table)
- .values((creator_edit::editgroup_id.eq(editgroup_id),
- creator_edit::ident_id.eq(ident_id),
- creator_edit::rev_id.eq(rev_id)))
- .get_result(conn)?;
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_create(conn, &edit_context)?;
+ edit.into_model()
+ }
+ pub fn update_creator_handler(
+ &self,
+ id: &Uuid,
+ entity: models::CreatorEntity,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?;
+ edit.into_model()
+ }
+ pub fn delete_creator_handler(
+ &self,
+ id: &Uuid,
+ editgroup_id: Option<Uuid>,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?;
+ let edit = CreatorEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?;
edit.into_model()
}
@@ -563,77 +320,29 @@ impl Server {
entity: models::FileEntity,
conn: &DbConn,
) -> Result<EntityEdit> {
- let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth
- let editgroup_id = match entity.editgroup_id {
- None => get_or_create_editgroup(editor_id, conn).expect("current editgroup"),
- Some(param) => fcid2uuid(&param)?,
- };
-
- let rev_id: Uuid = insert_into(file_rev::table)
- .values((file_rev::size.eq(entity.size),
- file_rev::sha1.eq(entity.sha1),
- file_rev::sha256.eq(entity.sha256),
- file_rev::md5.eq(entity.md5),
- file_rev::mimetype.eq(entity.mimetype),
- file_rev::extra_json.eq(entity.extra)))
- .returning(file_rev::id)
- .get_result(conn)?;
- let ident_id: Uuid = insert_into(file_ident::table)
- .values(file_ident::rev_id.eq(rev_id))
- .returning(file_ident::id)
- .get_result(conn)?;
- let edit: FileEditRow = insert_into(file_edit::table)
- .values((file_edit::editgroup_id.eq(editgroup_id),
- file_edit::ident_id.eq(ident_id),
- file_edit::rev_id.eq(rev_id)))
- .get_result(conn)?;
-
- let _releases: Option<Vec<FileReleaseRow>> = match entity.releases {
- None => None,
- Some(release_list) => {
- if release_list.is_empty() {
- Some(vec![])
- } else {
- let release_rows: Vec<FileReleaseRow> = release_list
- .iter()
- .map(|r| FileReleaseRow {
- file_rev: edit.rev_id.unwrap(),
- target_release_ident_id: fcid2uuid(r)
- .expect("invalid fatcat identifier"),
- })
- .collect();
- let release_rows: Vec<FileReleaseRow> = insert_into(file_release::table)
- .values(release_rows)
- .get_results(conn)
- .expect("error inserting file_releases");
- Some(release_rows)
- }
- }
- };
-
- let _urls: Option<Vec<FileRevUrlRow>> = match entity.urls {
- None => None,
- Some(url_list) => {
- if url_list.is_empty() {
- Some(vec![])
- } else {
- let url_rows: Vec<FileRevUrlNewRow> = url_list
- .into_iter()
- .map(|u| FileRevUrlNewRow {
- file_rev: edit.rev_id.unwrap(),
- rel: u.rel,
- url: u.url,
- })
- .collect();
- let url_rows: Vec<FileRevUrlRow> = insert_into(file_rev_url::table)
- .values(url_rows)
- .get_results(conn)
- .expect("error inserting file_rev_url");
- Some(url_rows)
- }
- }
- };
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_create(conn, &edit_context)?;
+ edit.into_model()
+ }
+ pub fn update_file_handler(
+ &self,
+ id: &Uuid,
+ entity: models::FileEntity,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?;
+ edit.into_model()
+ }
+ pub fn delete_file_handler(
+ &self,
+ id: &Uuid,
+ editgroup_id: Option<Uuid>,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?;
+ let edit = FileEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?;
edit.into_model()
}
@@ -642,172 +351,29 @@ impl Server {
entity: models::ReleaseEntity,
conn: &DbConn,
) -> Result<EntityEdit> {
- let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth
- let editgroup_id = match entity.editgroup_id {
- None => get_or_create_editgroup(editor_id, conn).expect("current editgroup"),
- Some(param) => fcid2uuid(&param)?,
- };
- if let Some(ref extid) = entity.doi {
- check_doi(extid)?;
- }
- if let Some(ref extid) = entity.pmid {
- check_pmid(extid)?;
- }
- if let Some(ref extid) = entity.pmcid {
- check_pmcid(extid)?;
- }
- if let Some(ref extid) = entity.wikidata_qid {
- check_wikidata_qid(extid)?;
- }
-
- let work_id = match entity.work_id {
- Some(work_id) => fcid2uuid(&work_id)?,
- None => {
- // If a work_id wasn't passed, create a new work under the current editgroup
- let work_model = models::WorkEntity {
- ident: None,
- revision: None,
- redirect: None,
- state: None,
- editgroup_id: Some(uuid2fcid(&editgroup_id)),
- extra: None,
- };
- let new_entity = self.create_work_handler(work_model, conn)?;
- fcid2uuid(&new_entity.ident)?
- }
- };
-
- let container_id: Option<Uuid> = match entity.container_id {
- Some(id) => Some(fcid2uuid(&id)?),
- None => None,
- };
-
- let rev_id: Uuid = insert_into(release_rev::table)
- .values((release_rev::title.eq(entity.title),
- release_rev::release_type.eq(entity.release_type),
- release_rev::release_status.eq(entity.release_status),
- release_rev::release_date.eq(entity.release_date.map(|v| v.naive_utc().date())),
- release_rev::doi.eq(entity.doi),
- release_rev::pmid.eq(entity.pmid),
- release_rev::pmcid.eq(entity.pmcid),
- release_rev::wikidata_qid.eq(entity.wikidata_qid),
- release_rev::isbn13.eq(entity.isbn13),
- release_rev::core_id.eq(entity.core_id),
- release_rev::volume.eq(entity.volume),
- release_rev::issue.eq(entity.issue),
- release_rev::pages.eq(entity.pages),
- release_rev::work_ident_id.eq(work_id),
- release_rev::container_ident_id.eq(container_id),
- release_rev::publisher.eq(entity.publisher),
- release_rev::language.eq(entity.language),
- release_rev::extra_json.eq(entity.extra)))
- .returning(release_rev::id)
- .get_result(conn)?;
- let ident_id: Uuid = insert_into(release_ident::table)
- .values(release_ident::rev_id.eq(rev_id))
- .returning(release_ident::id)
- .get_result(conn)?;
- let edit: ReleaseEditRow = insert_into(release_edit::table)
- .values((release_edit::editgroup_id.eq(editgroup_id),
- release_edit::ident_id.eq(ident_id),
- release_edit::rev_id.eq(rev_id)))
- .get_result(conn)?;
-
- let _refs: Option<Vec<ReleaseRefRow>> = match entity.refs {
- None => None,
- Some(ref_list) => {
- if ref_list.is_empty() {
- Some(vec![])
- } else {
- let ref_rows: Vec<ReleaseRefNewRow> = ref_list
- .iter()
- .map(|r| ReleaseRefNewRow {
- release_rev: edit.rev_id.unwrap(),
- target_release_ident_id: r.target_release_id
- .clone()
- .map(|v| fcid2uuid(&v).expect("valid fatcat identifier")),
- index_val: r.index,
- key: r.key.clone(),
- container_title: r.container_title.clone(),
- year: r.year,
- title: r.title.clone(),
- locator: r.locator.clone(),
- extra_json: r.extra.clone(),
- })
- .collect();
- let ref_rows: Vec<ReleaseRefRow> = insert_into(release_ref::table)
- .values(ref_rows)
- .get_results(conn)
- .expect("error inserting release_refs");
- Some(ref_rows)
- }
- }
- };
-
- let _contribs: Option<Vec<ReleaseContribRow>> = match entity.contribs {
- None => None,
- Some(contrib_list) => {
- if contrib_list.is_empty() {
- Some(vec![])
- } else {
- let contrib_rows: Vec<ReleaseContribNewRow> = contrib_list
- .iter()
- .map(|c| ReleaseContribNewRow {
- release_rev: edit.rev_id.unwrap(),
- creator_ident_id: c.creator_id
- .clone()
- .map(|v| fcid2uuid(&v).expect("valid fatcat identifier")),
- raw_name: c.raw_name.clone(),
- index_val: c.index,
- role: c.role.clone(),
- extra_json: c.extra.clone(),
- })
- .collect();
- let contrib_rows: Vec<ReleaseContribRow> = insert_into(release_contrib::table)
- .values(contrib_rows)
- .get_results(conn)
- .expect("error inserting release_contribs");
- Some(contrib_rows)
- }
- }
- };
-
- if let Some(abstract_list) = entity.abstracts {
- // For rows that specify content, we need to insert the abstract if it doesn't exist
- // already
- let new_abstracts: Vec<AbstractsRow> = abstract_list
- .iter()
- .filter(|ea| ea.content.is_some())
- .map(|c| AbstractsRow {
- sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(),
- content: c.content.clone().unwrap(),
- })
- .collect();
- if !new_abstracts.is_empty() {
- // Sort of an "upsert"; only inserts new abstract rows if they don't already exist
- insert_into(abstracts::table)
- .values(&new_abstracts)
- .on_conflict(abstracts::sha1)
- .do_nothing()
- .execute(conn)?;
- }
- let release_abstract_rows: Vec<ReleaseRevAbstractNewRow> = abstract_list
- .into_iter()
- .map(|c| ReleaseRevAbstractNewRow {
- release_rev: edit.rev_id.unwrap(),
- abstract_sha1: match c.content {
- Some(ref content) => Sha1::from(content).hexdigest(),
- None => c.sha1.expect("either abstract_sha1 or content is required"),
- },
- lang: c.lang,
- mimetype: c.mimetype,
- })
- .collect();
- insert_into(release_rev_abstract::table)
- .values(release_abstract_rows)
- .execute(conn)?;
- }
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_create(conn, &edit_context)?;
+ edit.into_model()
+ }
+ pub fn update_release_handler(
+ &self,
+ id: &Uuid,
+ entity: models::ReleaseEntity,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?;
+ edit.into_model()
+ }
+ pub fn delete_release_handler(
+ &self,
+ id: &Uuid,
+ editgroup_id: Option<Uuid>,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?;
+ let edit = ReleaseEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?;
edit.into_model()
}
@@ -816,25 +382,31 @@ impl Server {
entity: models::WorkEntity,
conn: &DbConn,
) -> Result<EntityEdit> {
- let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth
- let editgroup_id = match entity.editgroup_id {
- None => get_or_create_editgroup(editor_id, conn).expect("current editgroup"),
- Some(param) => fcid2uuid(&param)?,
- };
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_create(conn, &edit_context)?;
+ edit.into_model()
+ }
+
+ pub fn update_work_handler(
+ &self,
+ id: &Uuid,
+ entity: models::WorkEntity,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?;
+ let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?;
+ edit.into_model()
+ }
+
+ pub fn delete_work_handler(
+ &self,
+ id: &Uuid,
+ editgroup_id: Option<Uuid>,
+ conn: &DbConn,
+ ) -> Result<EntityEdit> {
+ let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?;
+ let edit = WorkEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?;
- let rev_id: Uuid = insert_into(work_rev::table)
- .values(work_rev::extra_json.eq(entity.extra))
- .returning(work_rev::id)
- .get_result(conn)?;
- let ident_id: Uuid = insert_into(work_ident::table)
- .values(work_ident::rev_id.eq(rev_id))
- .returning(work_ident::id)
- .get_result(conn)?;
- let edit: WorkEditRow = insert_into(work_edit::table)
- .values((work_edit::editgroup_id.eq(editgroup_id),
- work_edit::ident_id.eq(ident_id),
- work_edit::rev_id.eq(rev_id)))
- .get_result(conn)?;
edit.into_model()
}
@@ -854,8 +426,7 @@ impl Server {
editgroup::description.eq(entity.description),
editgroup::extra_json.eq(entity.extra),
))
- .get_result(conn)
- .expect("error creating edit group");
+ .get_result(conn)?;
Ok(Editgroup {
id: Some(uuid2fcid(&row.id)),
@@ -1067,31 +638,50 @@ impl Server {
Ok(StatsResponse { extra: Some(val) })
}
- entity_batch_handler!(
- create_container_handler,
- create_container_batch_handler,
- ContainerEntity
- );
- entity_batch_handler!(
- create_creator_handler,
- create_creator_batch_handler,
- CreatorEntity
- );
- entity_batch_handler!(create_file_handler, create_file_batch_handler, FileEntity);
- entity_batch_handler!(
- create_release_handler,
- create_release_batch_handler,
- ReleaseEntity
- );
- entity_batch_handler!(create_work_handler, create_work_batch_handler, WorkEntity);
+ entity_batch_handler!(create_container_batch_handler, ContainerEntity);
+ entity_batch_handler!(create_creator_batch_handler, CreatorEntity);
+ entity_batch_handler!(create_file_batch_handler, FileEntity);
+ entity_batch_handler!(create_release_batch_handler, ReleaseEntity);
+ entity_batch_handler!(create_work_batch_handler, WorkEntity);
- entity_history_handler!(
- get_container_history_handler,
- ContainerEditRow,
- container_edit
- );
- entity_history_handler!(get_creator_history_handler, CreatorEditRow, creator_edit);
- entity_history_handler!(get_file_history_handler, FileEditRow, file_edit);
- entity_history_handler!(get_release_history_handler, ReleaseEditRow, release_edit);
- entity_history_handler!(get_work_history_handler, WorkEditRow, work_edit);
+ pub fn get_container_history_handler(
+ &self,
+ id: &Uuid,
+ limit: Option<i64>,
+ conn: &DbConn,
+ ) -> Result<Vec<EntityHistoryEntry>> {
+ ContainerEntity::db_get_history(conn, FatCatId::from_uuid(id), limit)
+ }
+ pub fn get_creator_history_handler(
+ &self,
+ id: &Uuid,
+ limit: Option<i64>,
+ conn: &DbConn,
+ ) -> Result<Vec<EntityHistoryEntry>> {
+ CreatorEntity::db_get_history(conn, FatCatId::from_uuid(id), limit)
+ }
+ pub fn get_file_history_handler(
+ &self,
+ id: &Uuid,
+ limit: Option<i64>,
+ conn: &DbConn,
+ ) -> Result<Vec<EntityHistoryEntry>> {
+ FileEntity::db_get_history(conn, FatCatId::from_uuid(id), limit)
+ }
+ pub fn get_release_history_handler(
+ &self,
+ id: &Uuid,
+ limit: Option<i64>,
+ conn: &DbConn,
+ ) -> Result<Vec<EntityHistoryEntry>> {
+ ReleaseEntity::db_get_history(conn, FatCatId::from_uuid(id), limit)
+ }
+ pub fn get_work_history_handler(
+ &self,
+ id: &Uuid,
+ limit: Option<i64>,
+ conn: &DbConn,
+ ) -> Result<Vec<EntityHistoryEntry>> {
+ WorkEntity::db_get_history(conn, FatCatId::from_uuid(id), limit)
+ }
}