aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2018-12-26 15:16:03 -0800
committerBryan Newbold <bnewbold@robocracy.org>2018-12-26 15:16:06 -0800
commit1e2dd5ce2d2afa1016366f9296c90759c54be623 (patch)
tree7fda85cd02d86938dc5fd5dab85971def136c60a
parent8e179c13a28b0f647b76fc05c5d6b25522866882 (diff)
downloadfatcat-1e2dd5ce2d2afa1016366f9296c90759c54be623.tar.gz
fatcat-1e2dd5ce2d2afa1016366f9296c90759c54be623.zip
fileset/web basic implementation
As well as small consistency and bugfix implementations. No new tests yet...
-rw-r--r--rust/src/api_entity_crud.rs466
-rw-r--r--rust/src/api_helpers.rs42
-rw-r--r--rust/src/api_server.rs82
-rw-r--r--rust/src/api_wrappers.rs58
-rw-r--r--rust/src/database_models.rs155
-rw-r--r--rust/src/database_schema.rs163
6 files changed, 928 insertions, 38 deletions
diff --git a/rust/src/api_entity_crud.rs b/rust/src/api_entity_crud.rs
index b5e37412..7220f27a 100644
--- a/rust/src/api_entity_crud.rs
+++ b/rust/src/api_entity_crud.rs
@@ -1,5 +1,6 @@
use api_helpers::*;
use api_server::get_release_files;
+use chrono;
use database_models::*;
use database_schema::*;
use diesel::prelude::*;
@@ -856,13 +857,6 @@ impl EntityCrud for FileEntity {
None => (None, None, None),
};
- let releases: Vec<FatCatId> = file_release::table
- .filter(file_release::file_rev.eq(rev_row.id))
- .get_results(conn)?
- .into_iter()
- .map(|r: FileReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id))
- .collect();
-
let urls: Vec<FileEntityUrls> = file_rev_url::table
.filter(file_rev_url::file_rev.eq(rev_row.id))
.get_results(conn)?
@@ -873,14 +867,21 @@ impl EntityCrud for FileEntity {
})
.collect();
+ let release_ids: Vec<FatCatId> = file_rev_release::table
+ .filter(file_rev_release::file_rev.eq(rev_row.id))
+ .get_results(conn)?
+ .into_iter()
+ .map(|r: FileRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id))
+ .collect();
+
Ok(FileEntity {
sha1: rev_row.sha1,
sha256: rev_row.sha256,
md5: rev_row.md5,
- size: rev_row.size.map(|v| v as i64),
+ size: rev_row.size_bytes.map(|v| v as i64),
urls: Some(urls),
mimetype: rev_row.mimetype,
- release_ids: Some(releases.iter().map(|fcid| fcid.to_string()).collect()),
+ release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()),
state: state,
ident: ident_id,
revision: Some(rev_row.id.to_string()),
@@ -909,7 +910,7 @@ impl EntityCrud for FileEntity {
models
.iter()
.map(|model| FileRevNewRow {
- size: model.size,
+ size_bytes: model.size,
sha1: model.sha1.clone(),
sha256: model.sha256.clone(),
md5: model.md5.clone(),
@@ -921,23 +922,23 @@ impl EntityCrud for FileEntity {
.returning(file_rev::id)
.get_results(conn)?;
- let mut file_release_rows: Vec<FileReleaseRow> = vec![];
+ let mut file_rev_release_rows: Vec<FileRevReleaseRow> = vec![];
let mut file_url_rows: Vec<FileRevUrlNewRow> = vec![];
for (model, rev_id) in models.iter().zip(rev_ids.iter()) {
match &model.release_ids {
None => (),
Some(release_list) => {
- let these_release_rows: Result<Vec<FileReleaseRow>> = release_list
+ let these_release_rows: Result<Vec<FileRevReleaseRow>> = release_list
.iter()
.map(|r| {
- Ok(FileReleaseRow {
+ Ok(FileRevReleaseRow {
file_rev: *rev_id,
target_release_ident_id: FatCatId::from_str(r)?.to_uuid(),
})
})
.collect();
- file_release_rows.extend(these_release_rows?);
+ file_rev_release_rows.extend(these_release_rows?);
}
};
@@ -957,10 +958,9 @@ impl EntityCrud for FileEntity {
};
}
- if !file_release_rows.is_empty() {
- // TODO: shouldn't it be "file_rev_release"?
- insert_into(file_release::table)
- .values(file_release_rows)
+ if !file_rev_release_rows.is_empty() {
+ insert_into(file_rev_release::table)
+ .values(file_rev_release_rows)
.execute(conn)?;
}
@@ -974,6 +974,432 @@ impl EntityCrud for FileEntity {
}
}
+impl EntityCrud for FilesetEntity {
+ type EditRow = FilesetEditRow;
+ type EditNewRow = FilesetEditNewRow;
+ type IdentRow = FilesetIdentRow;
+ type IdentNewRow = FilesetIdentNewRow;
+ type RevRow = FilesetRevRow;
+
+ generic_db_get!(fileset_ident, fileset_rev);
+ generic_db_get_rev!(fileset_rev);
+ generic_db_expand!();
+ generic_db_create!(fileset_ident, fileset_edit);
+ generic_db_create_batch!(fileset_ident, fileset_edit);
+ generic_db_update!(fileset_ident, fileset_edit);
+ generic_db_delete!(fileset_ident, fileset_edit);
+ generic_db_get_history!(fileset_edit);
+ generic_db_get_edit!(fileset_edit);
+ generic_db_delete_edit!(fileset_edit);
+ generic_db_get_redirects!(fileset_ident);
+ generic_db_accept_edits_batch!("file", fileset_ident, fileset_edit);
+ generic_db_insert_rev!();
+
+ fn from_deleted_row(ident_row: Self::IdentRow) -> Result<Self> {
+ if ident_row.rev_id.is_some() {
+ bail!("called from_deleted_row with a non-deleted-state row")
+ }
+
+ Ok(FilesetEntity {
+ manifest: None,
+ urls: None,
+ release_ids: None,
+ state: Some(ident_row.state().unwrap().shortname()),
+ ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()),
+ revision: ident_row.rev_id.map(|u| u.to_string()),
+ redirect: ident_row
+ .redirect_id
+ .map(|u| FatCatId::from_uuid(&u).to_string()),
+ extra: None,
+ edit_extra: None,
+ })
+ }
+
+ fn db_from_row(
+ conn: &DbConn,
+ rev_row: Self::RevRow,
+ ident_row: Option<Self::IdentRow>,
+ _hide: HideFlags,
+ ) -> Result<Self> {
+ let (state, ident_id, redirect_id) = match ident_row {
+ Some(i) => (
+ Some(i.state().unwrap().shortname()),
+ Some(FatCatId::from_uuid(&i.id).to_string()),
+ i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()),
+ ),
+ None => (None, None, None),
+ };
+
+ let manifest: Vec<FilesetEntityManifest> = fileset_rev_file::table
+ .filter(fileset_rev_file::fileset_rev.eq(rev_row.id))
+ .get_results(conn)?
+ .into_iter()
+ .map(|r: FilesetRevFileRow| FilesetEntityManifest {
+ path: r.path_name,
+ size: r.size_bytes,
+ md5: r.md5,
+ sha1: r.sha1,
+ sha256: r.sha256,
+ extra: r.extra_json,
+ })
+ .collect();
+
+ let urls: Vec<FileEntityUrls> = fileset_rev_url::table
+ .filter(fileset_rev_url::fileset_rev.eq(rev_row.id))
+ .get_results(conn)?
+ .into_iter()
+ .map(|r: FilesetRevUrlRow| FileEntityUrls {
+ rel: r.rel,
+ url: r.url,
+ })
+ .collect();
+
+ let release_ids: Vec<FatCatId> = fileset_rev_release::table
+ .filter(fileset_rev_release::fileset_rev.eq(rev_row.id))
+ .get_results(conn)?
+ .into_iter()
+ .map(|r: FilesetRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id))
+ .collect();
+
+ Ok(FilesetEntity {
+ manifest: Some(manifest),
+ urls: Some(urls),
+ release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()),
+ state: state,
+ ident: ident_id,
+ revision: Some(rev_row.id.to_string()),
+ redirect: redirect_id,
+ extra: rev_row.extra_json,
+ edit_extra: None,
+ })
+ }
+
+ fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>> {
+ // first verify hash syntax
+ for entity in models {
+ if let Some(ref manifest) = entity.manifest {
+ for file in manifest {
+ if let Some(ref hash) = file.md5 {
+ check_md5(hash)?;
+ }
+ if let Some(ref hash) = file.sha1 {
+ check_sha1(hash)?;
+ }
+ if let Some(ref hash) = file.sha256 {
+ check_sha256(hash)?;
+ }
+ }
+ }
+ }
+
+ let rev_ids: Vec<Uuid> = insert_into(fileset_rev::table)
+ .values(
+ models
+ .iter()
+ .map(|model| FilesetRevNewRow {
+ extra_json: model.extra.clone(),
+ })
+ .collect::<Vec<FilesetRevNewRow>>(),
+ )
+ .returning(fileset_rev::id)
+ .get_results(conn)?;
+
+ let mut fileset_file_rows: Vec<FilesetRevFileNewRow> = vec![];
+ let mut fileset_url_rows: Vec<FilesetRevUrlNewRow> = vec![];
+ let mut fileset_release_rows: Vec<FilesetRevReleaseRow> = vec![];
+
+ for (model, rev_id) in models.iter().zip(rev_ids.iter()) {
+ match &model.manifest {
+ None => (),
+ Some(file_list) => {
+ let these_file_rows: Vec<FilesetRevFileNewRow> = file_list
+ .into_iter()
+ .map(|f| FilesetRevFileNewRow {
+ fileset_rev: *rev_id,
+ path_name: f.path.clone(),
+ size_bytes: f.size,
+ md5: f.md5.clone(),
+ sha1: f.sha1.clone(),
+ sha256: f.sha256.clone(),
+ extra_json: f.extra.clone(),
+ })
+ .collect();
+ fileset_file_rows.extend(these_file_rows);
+ }
+ };
+
+ match &model.urls {
+ None => (),
+ Some(url_list) => {
+ let these_url_rows: Vec<FilesetRevUrlNewRow> = url_list
+ .into_iter()
+ .map(|u| FilesetRevUrlNewRow {
+ fileset_rev: *rev_id,
+ rel: u.rel.clone(),
+ url: u.url.clone(),
+ })
+ .collect();
+ fileset_url_rows.extend(these_url_rows);
+ }
+ };
+
+ match &model.release_ids {
+ None => (),
+ Some(release_list) => {
+ let these_release_rows: Result<Vec<FilesetRevReleaseRow>> = release_list
+ .iter()
+ .map(|r| {
+ Ok(FilesetRevReleaseRow {
+ fileset_rev: *rev_id,
+ target_release_ident_id: FatCatId::from_str(r)?.to_uuid(),
+ })
+ })
+ .collect();
+ fileset_release_rows.extend(these_release_rows?);
+ }
+ };
+ }
+
+ if !fileset_file_rows.is_empty() {
+ insert_into(fileset_rev_file::table)
+ .values(fileset_file_rows)
+ .execute(conn)?;
+ }
+
+ if !fileset_url_rows.is_empty() {
+ insert_into(fileset_rev_url::table)
+ .values(fileset_url_rows)
+ .execute(conn)?;
+ }
+
+ if !fileset_release_rows.is_empty() {
+ insert_into(fileset_rev_release::table)
+ .values(fileset_release_rows)
+ .execute(conn)?;
+ }
+
+ Ok(rev_ids)
+ }
+}
+
+impl EntityCrud for WebcaptureEntity {
+ type EditRow = WebcaptureEditRow;
+ type EditNewRow = WebcaptureEditNewRow;
+ type IdentRow = WebcaptureIdentRow;
+ type IdentNewRow = WebcaptureIdentNewRow;
+ type RevRow = WebcaptureRevRow;
+
+ generic_db_get!(webcapture_ident, webcapture_rev);
+ generic_db_get_rev!(webcapture_rev);
+ generic_db_expand!();
+ generic_db_create!(webcapture_ident, webcapture_edit);
+ generic_db_create_batch!(webcapture_ident, webcapture_edit);
+ generic_db_update!(webcapture_ident, webcapture_edit);
+ generic_db_delete!(webcapture_ident, webcapture_edit);
+ generic_db_get_history!(webcapture_edit);
+ generic_db_get_edit!(webcapture_edit);
+ generic_db_delete_edit!(webcapture_edit);
+ generic_db_get_redirects!(webcapture_ident);
+ generic_db_accept_edits_batch!("file", webcapture_ident, webcapture_edit);
+ generic_db_insert_rev!();
+
+ fn from_deleted_row(ident_row: Self::IdentRow) -> Result<Self> {
+ if ident_row.rev_id.is_some() {
+ bail!("called from_deleted_row with a non-deleted-state row")
+ }
+
+ Ok(WebcaptureEntity {
+ cdx: None,
+ archive_urls: None,
+ original_url: None,
+ timestamp: None,
+ release_ids: None,
+ state: Some(ident_row.state().unwrap().shortname()),
+ ident: Some(FatCatId::from_uuid(&ident_row.id).to_string()),
+ revision: ident_row.rev_id.map(|u| u.to_string()),
+ redirect: ident_row
+ .redirect_id
+ .map(|u| FatCatId::from_uuid(&u).to_string()),
+ extra: None,
+ edit_extra: None,
+ })
+ }
+
+ fn db_from_row(
+ conn: &DbConn,
+ rev_row: Self::RevRow,
+ ident_row: Option<Self::IdentRow>,
+ _hide: HideFlags,
+ ) -> Result<Self> {
+ let (state, ident_id, redirect_id) = match ident_row {
+ Some(i) => (
+ Some(i.state().unwrap().shortname()),
+ Some(FatCatId::from_uuid(&i.id).to_string()),
+ i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()),
+ ),
+ None => (None, None, None),
+ };
+
+ let cdx: Vec<WebcaptureEntityCdx> = webcapture_rev_cdx::table
+ .filter(webcapture_rev_cdx::webcapture_rev.eq(rev_row.id))
+ .get_results(conn)?
+ .into_iter()
+ .map(|c: WebcaptureRevCdxRow| WebcaptureEntityCdx {
+ surt: c.surt,
+ timestamp: c.timestamp,
+ url: c.url,
+ mimetype: c.mimetype,
+ status_code: c.status_code,
+ sha1: c.sha1,
+ sha256: c.sha256,
+ })
+ .collect();
+
+ let archive_urls: Vec<WebcaptureEntityArchiveUrls> = webcapture_rev_url::table
+ .filter(webcapture_rev_url::webcapture_rev.eq(rev_row.id))
+ .get_results(conn)?
+ .into_iter()
+ .map(|r: WebcaptureRevUrlRow| WebcaptureEntityArchiveUrls {
+ rel: r.rel,
+ url: r.url,
+ })
+ .collect();
+
+ let release_ids: Vec<FatCatId> = webcapture_rev_release::table
+ .filter(webcapture_rev_release::webcapture_rev.eq(rev_row.id))
+ .get_results(conn)?
+ .into_iter()
+ .map(|r: WebcaptureRevReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id))
+ .collect();
+
+ Ok(WebcaptureEntity {
+ cdx: Some(cdx),
+ archive_urls: Some(archive_urls),
+ original_url: Some(rev_row.original_url),
+ timestamp: Some(chrono::DateTime::from_utc(rev_row.timestamp, chrono::Utc)),
+ release_ids: Some(release_ids.iter().map(|fcid| fcid.to_string()).collect()),
+ state: state,
+ ident: ident_id,
+ revision: Some(rev_row.id.to_string()),
+ redirect: redirect_id,
+ extra: rev_row.extra_json,
+ edit_extra: None,
+ })
+ }
+
+ fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>> {
+ // first verify hash syntax, and presence of required fields
+ for entity in models {
+ if let Some(ref cdx) = entity.cdx {
+ for row in cdx {
+ check_sha1(&row.sha1)?;
+ if let Some(ref hash) = row.sha256 {
+ check_sha256(hash)?;
+ }
+ }
+ }
+ if entity.timestamp.is_none() || entity.original_url.is_none() {
+ return Err(ErrorKind::OtherBadRequest(
+ "timestamp and original_url are required for webcapture entities".to_string(),
+ )
+ .into());
+ }
+ }
+
+ let rev_ids: Vec<Uuid> = insert_into(webcapture_rev::table)
+ .values(
+ models
+ .iter()
+ .map(|model| WebcaptureRevNewRow {
+ // these unwraps safe because of check above
+ original_url: model.original_url.clone().unwrap(),
+ timestamp: model.timestamp.unwrap().naive_utc(),
+ extra_json: model.extra.clone(),
+ })
+ .collect::<Vec<WebcaptureRevNewRow>>(),
+ )
+ .returning(webcapture_rev::id)
+ .get_results(conn)?;
+
+ let mut webcapture_cdx_rows: Vec<WebcaptureRevCdxNewRow> = vec![];
+ let mut webcapture_url_rows: Vec<WebcaptureRevUrlNewRow> = vec![];
+ let mut webcapture_release_rows: Vec<WebcaptureRevReleaseRow> = vec![];
+
+ for (model, rev_id) in models.iter().zip(rev_ids.iter()) {
+ match &model.cdx {
+ None => (),
+ Some(cdx_list) => {
+ let these_cdx_rows: Vec<WebcaptureRevCdxNewRow> = cdx_list
+ .into_iter()
+ .map(|c| WebcaptureRevCdxNewRow {
+ webcapture_rev: *rev_id,
+ surt: c.surt.clone(),
+ timestamp: c.timestamp,
+ url: c.url.clone(),
+ mimetype: c.mimetype.clone(),
+ status_code: c.status_code,
+ sha1: c.sha1.clone(),
+ sha256: c.sha256.clone(),
+ })
+ .collect();
+ webcapture_cdx_rows.extend(these_cdx_rows);
+ }
+ };
+
+ match &model.archive_urls {
+ None => (),
+ Some(url_list) => {
+ let these_url_rows: Vec<WebcaptureRevUrlNewRow> = url_list
+ .into_iter()
+ .map(|u| WebcaptureRevUrlNewRow {
+ webcapture_rev: *rev_id,
+ rel: u.rel.clone(),
+ url: u.url.clone(),
+ })
+ .collect();
+ webcapture_url_rows.extend(these_url_rows);
+ }
+ };
+
+ match &model.release_ids {
+ None => (),
+ Some(release_list) => {
+ let these_release_rows: Result<Vec<WebcaptureRevReleaseRow>> = release_list
+ .iter()
+ .map(|r| {
+ Ok(WebcaptureRevReleaseRow {
+ webcapture_rev: *rev_id,
+ target_release_ident_id: FatCatId::from_str(r)?.to_uuid(),
+ })
+ })
+ .collect();
+ webcapture_release_rows.extend(these_release_rows?);
+ }
+ };
+ }
+
+ if !webcapture_cdx_rows.is_empty() {
+ insert_into(webcapture_rev_cdx::table)
+ .values(webcapture_cdx_rows)
+ .execute(conn)?;
+ }
+
+ if !webcapture_url_rows.is_empty() {
+ insert_into(webcapture_rev_url::table)
+ .values(webcapture_url_rows)
+ .execute(conn)?;
+ }
+
+ if !webcapture_release_rows.is_empty() {
+ insert_into(webcapture_rev_release::table)
+ .values(webcapture_release_rows)
+ .execute(conn)?;
+ }
+
+ Ok(rev_ids)
+ }
+}
+
impl EntityCrud for ReleaseEntity {
type EditRow = ReleaseEditRow;
type EditNewRow = ReleaseEditNewRow;
@@ -1013,6 +1439,8 @@ impl EntityCrud for ReleaseEntity {
issue: None,
pages: None,
files: None,
+ filesets: None,
+ webcaptures: None,
container: None,
container_id: None,
publisher: None,
@@ -1275,6 +1703,8 @@ impl EntityCrud for ReleaseEntity {
issue: rev_row.issue,
pages: rev_row.pages,
files: None,
+ filesets: None,
+ webcaptures: None,
container: None,
container_id: rev_row
.container_ident_id
diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs
index b837dfc2..ff164bef 100644
--- a/rust/src/api_helpers.rs
+++ b/rust/src/api_helpers.rs
@@ -38,6 +38,8 @@ impl EditContext {
#[derive(Clone, Copy, PartialEq)]
pub struct ExpandFlags {
pub files: bool,
+ pub filesets: bool,
+ pub webcaptures: bool,
pub container: bool,
pub releases: bool,
pub creators: bool,
@@ -55,6 +57,8 @@ impl ExpandFlags {
pub fn from_str_list(list: &[&str]) -> ExpandFlags {
ExpandFlags {
files: list.contains(&"files"),
+ filesets: list.contains(&"filesets"),
+ webcaptures: list.contains(&"webcaptures"),
container: list.contains(&"container"),
releases: list.contains(&"releases"),
creators: list.contains(&"creators"),
@@ -63,6 +67,8 @@ impl ExpandFlags {
pub fn none() -> ExpandFlags {
ExpandFlags {
files: false,
+ filesets: false,
+ webcaptures: false,
container: false,
releases: false,
creators: false,
@@ -77,6 +83,8 @@ fn test_expand_flags() {
assert!(ExpandFlags::from_str_list(&vec!["file"]).files == false);
let all = ExpandFlags::from_str_list(&vec![
"files",
+ "filesets",
+ "webcaptures",
"container",
"other_thing",
"releases",
@@ -85,6 +93,8 @@ fn test_expand_flags() {
assert!(
all == ExpandFlags {
files: true,
+ filesets: true,
+ webcaptures: true,
container: true,
releases: true,
creators: true
@@ -94,10 +104,14 @@ fn test_expand_flags() {
assert!(ExpandFlags::from_str("files").unwrap().files == true);
assert!(ExpandFlags::from_str("something,,files").unwrap().files == true);
assert!(ExpandFlags::from_str("file").unwrap().files == false);
- let all = ExpandFlags::from_str("files,container,other_thing,releases,creators").unwrap();
+ let all =
+ ExpandFlags::from_str("files,container,other_thing,releases,creators,filesets,webcaptures")
+ .unwrap();
assert!(
all == ExpandFlags {
files: true,
+ filesets: true,
+ webcaptures: true,
container: true,
releases: true,
creators: true
@@ -107,9 +121,14 @@ fn test_expand_flags() {
#[derive(Clone, Copy, PartialEq)]
pub struct HideFlags {
+ // release
pub abstracts: bool,
pub refs: bool,
pub contribs: bool,
+ // fileset
+ pub manifest: bool,
+ // webcapture
+ pub cdx: bool,
}
impl FromStr for HideFlags {
@@ -126,6 +145,8 @@ impl HideFlags {
abstracts: list.contains(&"abstracts"),
refs: list.contains(&"refs"),
contribs: list.contains(&"contribs"),
+ manifest: list.contains(&"contribs"),
+ cdx: list.contains(&"contribs"),
}
}
pub fn none() -> HideFlags {
@@ -133,6 +154,8 @@ impl HideFlags {
abstracts: false,
refs: false,
contribs: false,
+ manifest: false,
+ cdx: false,
}
}
}
@@ -142,12 +165,21 @@ fn test_hide_flags() {
assert!(HideFlags::from_str_list(&vec![]).abstracts == false);
assert!(HideFlags::from_str_list(&vec!["abstracts"]).abstracts == true);
assert!(HideFlags::from_str_list(&vec!["abstract"]).abstracts == false);
- let all = HideFlags::from_str_list(&vec!["abstracts", "refs", "other_thing", "contribs"]);
+ let all = HideFlags::from_str_list(&vec![
+ "abstracts",
+ "refs",
+ "other_thing",
+ "contribs",
+ "manifest",
+ "cdx",
+ ]);
assert!(
all == HideFlags {
abstracts: true,
refs: true,
contribs: true,
+ manifest: true,
+ cdx: true,
}
);
assert!(HideFlags::from_str("").unwrap().abstracts == false);
@@ -159,12 +191,14 @@ fn test_hide_flags() {
== true
);
assert!(HideFlags::from_str("file").unwrap().abstracts == false);
- let all = HideFlags::from_str("abstracts,refs,other_thing,contribs").unwrap();
+ let all = HideFlags::from_str("abstracts,cdx,refs,manifest,other_thing,contribs").unwrap();
assert!(
all == HideFlags {
abstracts: true,
refs: true,
contribs: true,
+ manifest: true,
+ cdx: true,
}
);
}
@@ -229,6 +263,8 @@ pub fn accept_editgroup(editgroup_id: FatCatId, conn: &DbConn) -> Result<Changel
ContainerEntity::db_accept_edits(conn, editgroup_id)?;
CreatorEntity::db_accept_edits(conn, editgroup_id)?;
FileEntity::db_accept_edits(conn, editgroup_id)?;
+ FilesetEntity::db_accept_edits(conn, editgroup_id)?;
+ WebcaptureEntity::db_accept_edits(conn, editgroup_id)?;
ReleaseEntity::db_accept_edits(conn, editgroup_id)?;
WorkEntity::db_accept_edits(conn, editgroup_id)?;
diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs
index 5b95f149..d264afbc 100644
--- a/rust/src/api_server.rs
+++ b/rust/src/api_server.rs
@@ -59,10 +59,10 @@ pub fn get_release_files(
hide_flags: HideFlags,
conn: &DbConn,
) -> Result<Vec<FileEntity>> {
- let rows: Vec<(FileRevRow, FileIdentRow, FileReleaseRow)> = file_rev::table
+ let rows: Vec<(FileRevRow, FileIdentRow, FileRevReleaseRow)> = file_rev::table
.inner_join(file_ident::table)
- .inner_join(file_release::table)
- .filter(file_release::target_release_ident_id.eq(&ident.to_uuid()))
+ .inner_join(file_rev_release::table)
+ .filter(file_rev_release::target_release_ident_id.eq(&ident.to_uuid()))
.filter(file_ident::is_live.eq(true))
.filter(file_ident::redirect_id.is_null())
.load(conn)?;
@@ -72,6 +72,46 @@ pub fn get_release_files(
.collect()
}
+pub fn get_release_filesets(
+ ident: FatCatId,
+ hide_flags: HideFlags,
+ conn: &DbConn,
+) -> Result<Vec<FilesetEntity>> {
+ let rows: Vec<(FilesetRevRow, FilesetIdentRow, FilesetRevReleaseRow)> = fileset_rev::table
+ .inner_join(fileset_ident::table)
+ .inner_join(fileset_rev_release::table)
+ .filter(fileset_rev_release::target_release_ident_id.eq(&ident.to_uuid()))
+ .filter(fileset_ident::is_live.eq(true))
+ .filter(fileset_ident::redirect_id.is_null())
+ .load(conn)?;
+
+ rows.into_iter()
+ .map(|(rev, ident, _)| FilesetEntity::db_from_row(conn, rev, Some(ident), hide_flags))
+ .collect()
+}
+
+pub fn get_release_webcaptures(
+ ident: FatCatId,
+ hide_flags: HideFlags,
+ conn: &DbConn,
+) -> Result<Vec<WebcaptureEntity>> {
+ let rows: Vec<(
+ WebcaptureRevRow,
+ WebcaptureIdentRow,
+ WebcaptureRevReleaseRow,
+ )> = webcapture_rev::table
+ .inner_join(webcapture_ident::table)
+ .inner_join(webcapture_rev_release::table)
+ .filter(webcapture_rev_release::target_release_ident_id.eq(&ident.to_uuid()))
+ .filter(webcapture_ident::is_live.eq(true))
+ .filter(webcapture_ident::redirect_id.is_null())
+ .load(conn)?;
+
+ rows.into_iter()
+ .map(|(rev, ident, _)| WebcaptureEntity::db_from_row(conn, rev, Some(ident), hide_flags))
+ .collect()
+}
+
impl Server {
pub fn lookup_container_handler(
&self,
@@ -304,6 +344,24 @@ impl Server {
get_release_files(ident, hide_flags, conn)
}
+ pub fn get_release_filesets_handler(
+ &self,
+ ident: FatCatId,
+ hide_flags: HideFlags,
+ conn: &DbConn,
+ ) -> Result<Vec<FilesetEntity>> {
+ get_release_filesets(ident, hide_flags, conn)
+ }
+
+ pub fn get_release_webcaptures_handler(
+ &self,
+ ident: FatCatId,
+ hide_flags: HideFlags,
+ conn: &DbConn,
+ ) -> Result<Vec<WebcaptureEntity>> {
+ get_release_webcaptures(ident, hide_flags, conn)
+ }
+
pub fn get_work_releases_handler(
&self,
ident: FatCatId,
@@ -381,6 +439,22 @@ impl Server {
.map(|e: FileEditRow| e.into_model().unwrap())
.collect(),
),
+ filesets: Some(
+ fileset_edit::table
+ .filter(fileset_edit::editgroup_id.eq(editgroup_id.to_uuid()))
+ .get_results(conn)?
+ .into_iter()
+ .map(|e: FilesetEditRow| e.into_model().unwrap())
+ .collect(),
+ ),
+ webcaptures: Some(
+ webcapture_edit::table
+ .filter(webcapture_edit::editgroup_id.eq(editgroup_id.to_uuid()))
+ .get_results(conn)?
+ .into_iter()
+ .map(|e: WebcaptureEditRow| e.into_model().unwrap())
+ .collect(),
+ ),
releases: Some(
release_edit::table
.filter(release_edit::editgroup_id.eq(editgroup_id.to_uuid()))
@@ -481,6 +555,8 @@ impl Server {
entity_batch_handler!(create_container_batch_handler, ContainerEntity);
entity_batch_handler!(create_creator_batch_handler, CreatorEntity);
entity_batch_handler!(create_file_batch_handler, FileEntity);
+ entity_batch_handler!(create_fileset_batch_handler, FilesetEntity);
+ entity_batch_handler!(create_webcapture_batch_handler, WebcaptureEntity);
entity_batch_handler!(create_release_batch_handler, ReleaseEntity);
entity_batch_handler!(create_work_batch_handler, WorkEntity);
}
diff --git a/rust/src/api_wrappers.rs b/rust/src/api_wrappers.rs
index 85b698aa..aa168076 100644
--- a/rust/src/api_wrappers.rs
+++ b/rust/src/api_wrappers.rs
@@ -599,6 +599,54 @@ impl Api for Server {
FileEntity
);
wrap_entity_handlers!(
+ get_fileset,
+ GetFilesetResponse,
+ create_fileset,
+ CreateFilesetResponse,
+ create_fileset_batch,
+ create_fileset_batch_handler,
+ CreateFilesetBatchResponse,
+ update_fileset,
+ UpdateFilesetResponse,
+ delete_fileset,
+ DeleteFilesetResponse,
+ get_fileset_history,
+ GetFilesetHistoryResponse,
+ get_fileset_edit,
+ GetFilesetEditResponse,
+ delete_fileset_edit,
+ DeleteFilesetEditResponse,
+ get_fileset_revision,
+ GetFilesetRevisionResponse,
+ get_fileset_redirects,
+ GetFilesetRedirectsResponse,
+ FilesetEntity
+ );
+ wrap_entity_handlers!(
+ get_webcapture,
+ GetWebcaptureResponse,
+ create_webcapture,
+ CreateWebcaptureResponse,
+ create_webcapture_batch,
+ create_webcapture_batch_handler,
+ CreateWebcaptureBatchResponse,
+ update_webcapture,
+ UpdateWebcaptureResponse,
+ delete_webcapture,
+ DeleteWebcaptureResponse,
+ get_webcapture_history,
+ GetWebcaptureHistoryResponse,
+ get_webcapture_edit,
+ GetWebcaptureEditResponse,
+ delete_webcapture_edit,
+ DeleteWebcaptureEditResponse,
+ get_webcapture_revision,
+ GetWebcaptureRevisionResponse,
+ get_webcapture_redirects,
+ GetWebcaptureRedirectsResponse,
+ WebcaptureEntity
+ );
+ wrap_entity_handlers!(
get_release,
GetReleaseResponse,
create_release,
@@ -666,6 +714,16 @@ impl Api for Server {
GetReleaseFilesResponse
);
wrap_fcid_hide_handler!(
+ get_release_filesets,
+ get_release_filesets_handler,
+ GetReleaseFilesetsResponse
+ );
+ wrap_fcid_hide_handler!(
+ get_release_webcaptures,
+ get_release_webcaptures_handler,
+ GetReleaseWebcapturesResponse
+ );
+ wrap_fcid_hide_handler!(
get_work_releases,
get_work_releases_handler,
GetWorkReleasesResponse
diff --git a/rust/src/database_models.rs b/rust/src/database_models.rs
index 2431e0fe..c913b98e 100644
--- a/rust/src/database_models.rs
+++ b/rust/src/database_models.rs
@@ -209,7 +209,7 @@ pub struct FileRevUrlNewRow {
pub struct FileRevRow {
pub id: Uuid,
pub extra_json: Option<serde_json::Value>,
- pub size: Option<i64>,
+ pub size_bytes: Option<i64>,
pub sha1: Option<String>,
pub sha256: Option<String>,
pub md5: Option<String>,
@@ -220,7 +220,7 @@ pub struct FileRevRow {
#[table_name = "file_rev"]
pub struct FileRevNewRow {
pub extra_json: Option<serde_json::Value>,
- pub size: Option<i64>,
+ pub size_bytes: Option<i64>,
pub sha1: Option<String>,
pub sha256: Option<String>,
pub md5: Option<String>,
@@ -237,6 +237,139 @@ entity_structs!(
);
#[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)]
+#[table_name = "fileset_rev_file"]
+pub struct FilesetRevFileRow {
+ pub id: i64,
+ pub fileset_rev: Uuid,
+ pub path_name: String,
+ pub size_bytes: i64,
+ pub md5: Option<String>,
+ pub sha1: Option<String>,
+ pub sha256: Option<String>,
+ pub extra_json: Option<serde_json::Value>,
+}
+
+#[derive(Debug, Queryable, Associations, AsChangeset, Insertable)]
+#[table_name = "fileset_rev_file"]
+pub struct FilesetRevFileNewRow {
+ pub fileset_rev: Uuid,
+ pub path_name: String,
+ pub size_bytes: i64,
+ pub md5: Option<String>,
+ pub sha1: Option<String>,
+ pub sha256: Option<String>,
+ pub extra_json: Option<serde_json::Value>,
+}
+
+#[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)]
+#[table_name = "fileset_rev_url"]
+pub struct FilesetRevUrlRow {
+ pub id: i64,
+ pub fileset_rev: Uuid,
+ pub rel: String,
+ pub url: String,
+}
+
+#[derive(Debug, Queryable, Associations, AsChangeset, Insertable)]
+#[table_name = "fileset_rev_url"]
+pub struct FilesetRevUrlNewRow {
+ pub fileset_rev: Uuid,
+ pub rel: String,
+ pub url: String,
+}
+
+#[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)]
+#[table_name = "fileset_rev"]
+pub struct FilesetRevRow {
+ pub id: Uuid,
+ pub extra_json: Option<serde_json::Value>,
+}
+
+#[derive(Debug, Associations, AsChangeset, Insertable)]
+#[table_name = "fileset_rev"]
+pub struct FilesetRevNewRow {
+ pub extra_json: Option<serde_json::Value>,
+}
+
+entity_structs!(
+ "fileset_edit",
+ FilesetEditRow,
+ FilesetEditNewRow,
+ "fileset_ident",
+ FilesetIdentRow,
+ FilesetIdentNewRow
+);
+#[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)]
+#[table_name = "webcapture_rev_cdx"]
+pub struct WebcaptureRevCdxRow {
+ pub id: i64,
+ pub webcapture_rev: Uuid,
+ pub surt: String,
+ pub timestamp: i64,
+ pub url: String,
+ pub mimetype: Option<String>,
+ pub status_code: i64,
+ pub sha1: String,
+ pub sha256: Option<String>,
+}
+
+#[derive(Debug, Queryable, Associations, AsChangeset, Insertable)]
+#[table_name = "webcapture_rev_cdx"]
+pub struct WebcaptureRevCdxNewRow {
+ pub webcapture_rev: Uuid,
+ pub surt: String,
+ pub timestamp: i64,
+ pub url: String,
+ pub mimetype: Option<String>,
+ pub status_code: i64,
+ pub sha1: String,
+ pub sha256: Option<String>,
+}
+
+#[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)]
+#[table_name = "webcapture_rev_url"]
+pub struct WebcaptureRevUrlRow {
+ pub id: i64,
+ pub webcapture_rev: Uuid,
+ pub rel: String,
+ pub url: String,
+}
+
+#[derive(Debug, Queryable, Associations, AsChangeset, Insertable)]
+#[table_name = "webcapture_rev_url"]
+pub struct WebcaptureRevUrlNewRow {
+ pub webcapture_rev: Uuid,
+ pub rel: String,
+ pub url: String,
+}
+
+#[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)]
+#[table_name = "webcapture_rev"]
+pub struct WebcaptureRevRow {
+ pub id: Uuid,
+ pub extra_json: Option<serde_json::Value>,
+ pub original_url: String,
+ pub timestamp: chrono::NaiveDateTime,
+}
+
+#[derive(Debug, Associations, AsChangeset, Insertable)]
+#[table_name = "webcapture_rev"]
+pub struct WebcaptureRevNewRow {
+ pub extra_json: Option<serde_json::Value>,
+ pub original_url: String,
+ pub timestamp: chrono::NaiveDateTime,
+}
+
+entity_structs!(
+ "webcapture_edit",
+ WebcaptureEditRow,
+ WebcaptureEditNewRow,
+ "webcapture_ident",
+ WebcaptureIdentRow,
+ WebcaptureIdentNewRow
+);
+
+#[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)]
#[table_name = "release_rev"]
pub struct ReleaseRevRow {
pub id: Uuid,
@@ -388,13 +521,27 @@ pub struct ReleaseRefNewRow {
}
#[derive(Debug, Queryable, Insertable, Associations, AsChangeset)]
-#[table_name = "file_release"]
-pub struct FileReleaseRow {
+#[table_name = "file_rev_release"]
+pub struct FileRevReleaseRow {
pub file_rev: Uuid,
pub target_release_ident_id: Uuid,
}
#[derive(Debug, Queryable, Insertable, Associations, AsChangeset)]
+#[table_name = "fileset_rev_release"]
+pub struct FilesetRevReleaseRow {
+ pub fileset_rev: Uuid,
+ pub target_release_ident_id: Uuid,
+}
+
+#[derive(Debug, Queryable, Insertable, Associations, AsChangeset)]
+#[table_name = "webcapture_rev_release"]
+pub struct WebcaptureRevReleaseRow {
+ pub webcapture_rev: Uuid,
+ pub target_release_ident_id: Uuid,
+}
+
+#[derive(Debug, Queryable, Insertable, Associations, AsChangeset)]
#[table_name = "abstracts"]
pub struct AbstractsRow {
pub sha1: String,
diff --git a/rust/src/database_schema.rs b/rust/src/database_schema.rs
index 6c1fb929..436f2989 100644
--- a/rust/src/database_schema.rs
+++ b/rust/src/database_schema.rs
@@ -125,28 +125,86 @@ table! {
}
table! {
- file_release (file_rev, target_release_ident_id) {
+ file_rev (id) {
+ id -> Uuid,
+ extra_json -> Nullable<Jsonb>,
+ size_bytes -> Nullable<Int8>,
+ sha1 -> Nullable<Text>,
+ sha256 -> Nullable<Text>,
+ md5 -> Nullable<Text>,
+ mimetype -> Nullable<Text>,
+ }
+}
+
+table! {
+ file_rev_release (file_rev, target_release_ident_id) {
file_rev -> Uuid,
target_release_ident_id -> Uuid,
}
}
table! {
- file_rev (id) {
+ file_rev_url (id) {
+ id -> Int8,
+ file_rev -> Uuid,
+ rel -> Text,
+ url -> Text,
+ }
+}
+
+table! {
+ fileset_edit (id) {
+ id -> Int8,
+ editgroup_id -> Uuid,
+ updated -> Timestamptz,
+ ident_id -> Uuid,
+ rev_id -> Nullable<Uuid>,
+ redirect_id -> Nullable<Uuid>,
+ prev_rev -> Nullable<Uuid>,
+ extra_json -> Nullable<Jsonb>,
+ }
+}
+
+table! {
+ fileset_ident (id) {
+ id -> Uuid,
+ is_live -> Bool,
+ rev_id -> Nullable<Uuid>,
+ redirect_id -> Nullable<Uuid>,
+ }
+}
+
+table! {
+ fileset_rev (id) {
id -> Uuid,
extra_json -> Nullable<Jsonb>,
- size -> Nullable<Int8>,
+ }
+}
+
+table! {
+ fileset_rev_file (id) {
+ id -> Int8,
+ fileset_rev -> Uuid,
+ path_name -> Text,
+ size_bytes -> Int8,
+ md5 -> Nullable<Text>,
sha1 -> Nullable<Text>,
sha256 -> Nullable<Text>,
- md5 -> Nullable<Text>,
- mimetype -> Nullable<Text>,
+ extra_json -> Nullable<Jsonb>,
}
}
table! {
- file_rev_url (id) {
+ fileset_rev_release (fileset_rev, target_release_ident_id) {
+ fileset_rev -> Uuid,
+ target_release_ident_id -> Uuid,
+ }
+}
+
+table! {
+ fileset_rev_url (id) {
id -> Int8,
- file_rev -> Uuid,
+ fileset_rev -> Uuid,
rel -> Text,
url -> Text,
}
@@ -237,6 +295,67 @@ table! {
}
table! {
+ webcapture_edit (id) {
+ id -> Int8,
+ editgroup_id -> Uuid,
+ updated -> Timestamptz,
+ ident_id -> Uuid,
+ rev_id -> Nullable<Uuid>,
+ redirect_id -> Nullable<Uuid>,
+ prev_rev -> Nullable<Uuid>,
+ extra_json -> Nullable<Jsonb>,
+ }
+}
+
+table! {
+ webcapture_ident (id) {
+ id -> Uuid,
+ is_live -> Bool,
+ rev_id -> Nullable<Uuid>,
+ redirect_id -> Nullable<Uuid>,
+ }
+}
+
+table! {
+ webcapture_rev (id) {
+ id -> Uuid,
+ extra_json -> Nullable<Jsonb>,
+ original_url -> Text,
+ timestamp -> Timestamptz,
+ }
+}
+
+table! {
+ webcapture_rev_cdx (id) {
+ id -> Int8,
+ webcapture_rev -> Uuid,
+ surt -> Text,
+ timestamp -> Int8,
+ url -> Text,
+ mimetype -> Nullable<Text>,
+ status_code -> Int8,
+ sha1 -> Text,
+ sha256 -> Nullable<Text>,
+ }
+}
+
+table! {
+ webcapture_rev_release (webcapture_rev, target_release_ident_id) {
+ webcapture_rev -> Uuid,
+ target_release_ident_id -> Uuid,
+ }
+}
+
+table! {
+ webcapture_rev_url (id) {
+ id -> Int8,
+ webcapture_rev -> Uuid,
+ rel -> Text,
+ url -> Text,
+ }
+}
+
+table! {
work_edit (id) {
id -> Int8,
editgroup_id -> Uuid,
@@ -272,9 +391,15 @@ joinable!(creator_edit -> editgroup (editgroup_id));
joinable!(creator_ident -> creator_rev (rev_id));
joinable!(file_edit -> editgroup (editgroup_id));
joinable!(file_ident -> file_rev (rev_id));
-joinable!(file_release -> file_rev (file_rev));
-joinable!(file_release -> release_ident (target_release_ident_id));
+joinable!(file_rev_release -> file_rev (file_rev));
+joinable!(file_rev_release -> release_ident (target_release_ident_id));
joinable!(file_rev_url -> file_rev (file_rev));
+joinable!(fileset_edit -> editgroup (editgroup_id));
+joinable!(fileset_ident -> fileset_rev (rev_id));
+joinable!(fileset_rev_file -> fileset_rev (fileset_rev));
+joinable!(fileset_rev_release -> fileset_rev (fileset_rev));
+joinable!(fileset_rev_release -> release_ident (target_release_ident_id));
+joinable!(fileset_rev_url -> fileset_rev (fileset_rev));
joinable!(release_contrib -> creator_ident (creator_ident_id));
joinable!(release_contrib -> release_rev (release_rev));
joinable!(release_edit -> editgroup (editgroup_id));
@@ -285,6 +410,12 @@ joinable!(release_rev -> container_ident (container_ident_id));
joinable!(release_rev -> work_ident (work_ident_id));
joinable!(release_rev_abstract -> abstracts (abstract_sha1));
joinable!(release_rev_abstract -> release_rev (release_rev));
+joinable!(webcapture_edit -> editgroup (editgroup_id));
+joinable!(webcapture_ident -> webcapture_rev (rev_id));
+joinable!(webcapture_rev_cdx -> webcapture_rev (webcapture_rev));
+joinable!(webcapture_rev_release -> release_ident (target_release_ident_id));
+joinable!(webcapture_rev_release -> webcapture_rev (webcapture_rev));
+joinable!(webcapture_rev_url -> webcapture_rev (webcapture_rev));
joinable!(work_edit -> editgroup (editgroup_id));
joinable!(work_ident -> work_rev (rev_id));
@@ -301,15 +432,27 @@ allow_tables_to_appear_in_same_query!(
editor,
file_edit,
file_ident,
- file_release,
file_rev,
+ file_rev_release,
file_rev_url,
+ fileset_edit,
+ fileset_ident,
+ fileset_rev,
+ fileset_rev_file,
+ fileset_rev_release,
+ fileset_rev_url,
release_contrib,
release_edit,
release_ident,
release_ref,
release_rev,
release_rev_abstract,
+ webcapture_edit,
+ webcapture_ident,
+ webcapture_rev,
+ webcapture_rev_cdx,
+ webcapture_rev_release,
+ webcapture_rev_url,
work_edit,
work_ident,
work_rev,