summaryrefslogtreecommitdiffstats
path: root/rust
diff options
context:
space:
mode:
Diffstat (limited to 'rust')
-rw-r--r--rust/Cargo.lock3
-rw-r--r--rust/Cargo.toml3
-rw-r--r--rust/README.export.md2
-rwxr-xr-xrust/codegen_openapi2.sh12
-rw-r--r--rust/fatcat-api-spec/README.md2
-rw-r--r--rust/fatcat-api-spec/api.yaml103
-rw-r--r--rust/fatcat-api-spec/api/swagger.yaml168
-rw-r--r--rust/fatcat-api-spec/examples/client.rs72
-rw-r--r--rust/fatcat-api-spec/examples/server_lib/server.rs48
-rw-r--r--rust/fatcat-api-spec/src/client.rs78
-rw-r--r--rust/fatcat-api-spec/src/lib.rs93
-rw-r--r--rust/fatcat-api-spec/src/models.rs62
-rw-r--r--rust/fatcat-api-spec/src/server.rs240
-rw-r--r--rust/migrations/2019-01-01-000000_init/down.sql1
-rw-r--r--rust/migrations/2019-01-01-000000_init/up.sql169
-rw-r--r--rust/src/bin/fatcatd.rs1
-rw-r--r--rust/src/database_models.rs131
-rw-r--r--rust/src/database_schema.rs33
-rw-r--r--rust/src/editing.rs6
-rw-r--r--rust/src/endpoint_handlers.rs156
-rw-r--r--rust/src/endpoints.rs26
-rw-r--r--rust/src/entity_crud.rs175
-rw-r--r--rust/src/lib.rs2
-rw-r--r--rust/tests/test_api_server_http.rs89
-rw-r--r--rust/tests/test_refs.rs169
25 files changed, 1479 insertions, 365 deletions
diff --git a/rust/Cargo.lock b/rust/Cargo.lock
index e89954ad..c0df5a2a 100644
--- a/rust/Cargo.lock
+++ b/rust/Cargo.lock
@@ -538,6 +538,9 @@ dependencies = [
"rand 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)",
"regex 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"sentry 0.12.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.84 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_ignored 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_json 1.0.34 (registry+https://github.com/rust-lang/crates.io-index)",
"sha1 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
"slog 2.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
diff --git a/rust/Cargo.toml b/rust/Cargo.toml
index 155e3c8a..c5a52845 100644
--- a/rust/Cargo.toml
+++ b/rust/Cargo.toml
@@ -37,6 +37,9 @@ slog = "^2.0"
slog-term = "*"
slog-async = "*"
serde_json = "1.0"
+serde = "*"
+serde_derive = "1.0"
+serde_ignored = "0.0.4"
sentry = { version = "^0.12", default-features = false, features = ["with_client_implementation", "with_backtrace", "with_panic", "with_log", "with_rust_info", "with_failure"] }
cadence = "^0.16"
diff --git a/rust/README.export.md b/rust/README.export.md
index cee361c0..97c2c028 100644
--- a/rust/README.export.md
+++ b/rust/README.export.md
@@ -9,5 +9,5 @@ Then dump:
Or, perhaps, in production:
- cat /tmp/fatcat_ident_releases.tsv | ./target/release/fatcat-export release --expand files,container -j8 | pv -l | gzip > release_export_expanded.json.gz
+ cat /tmp/fatcat_ident_releases.tsv | ./target/release/fatcat-export release --expand files,filesets,webcaptures,container -j8 | pv -l | gzip > release_export_expanded.json.gz
diff --git a/rust/codegen_openapi2.sh b/rust/codegen_openapi2.sh
index f7d1df53..a3950c58 100755
--- a/rust/codegen_openapi2.sh
+++ b/rust/codegen_openapi2.sh
@@ -11,4 +11,16 @@ sed -i 's/extern crate uuid;/extern crate serde_json;\nextern crate uuid;/g' fat
# Hack to fix "release_date" as Date, not DateTime
sed -i 's/release_date: Option<chrono::DateTime<chrono::Utc>>/release_date: Option<chrono::NaiveDate>/g' fatcat-api-spec/src/models.rs
+# Hack to require that optional params parse correctly (boolean, integer, datetime)
+# If we reformat, this this should basically go from, eg:
+# .and_then(|x| x.parse::<i64>()
+# .ok());
+# To:
+# .and_then(|x| Some(x.parse::<i64>()))
+# .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+# .map_err(|x| Response::with((status::InternalServerError, "unparsable query parameter (expected integer)".to_string())))?;
+sed -i 's/.and_then(|x| x.parse::<i64>().ok());$/.and_then(|x| Some(x.parse::<i64>())).map_or_else(|| Ok(None), |x| x.map(|v| Some(v))).map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;/g' fatcat-api-spec/src/server.rs
+sed -i 's/.and_then(|x| x.parse::<bool>().ok());$/.and_then(|x| Some(x.to_lowercase().parse::<bool>())).map_or_else(|| Ok(None), |x| x.map(|v| Some(v))).map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;/g' fatcat-api-spec/src/server.rs
+sed -i 's/.and_then(|x| x.parse::<chrono::DateTime<chrono::Utc>>().ok());$/.and_then(|x| Some(x.parse::<chrono::DateTime<chrono::Utc>>())).map_or_else(|| Ok(None), |x| x.map(|v| Some(v))).map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected UTC datetime in ISO\/RFC format)".to_string())))?;/g' fatcat-api-spec/src/server.rs
+
cargo fmt
diff --git a/rust/fatcat-api-spec/README.md b/rust/fatcat-api-spec/README.md
index ee031f66..cacded6a 100644
--- a/rust/fatcat-api-spec/README.md
+++ b/rust/fatcat-api-spec/README.md
@@ -13,7 +13,7 @@ To see how to make this your own, look here:
[README](https://github.com/swagger-api/swagger-codegen/blob/master/README.md)
- API version: 0.1.0
-- Build date: 2019-01-11T23:46:50.303Z
+- Build date: 2019-01-23T05:30:23.378Z
This autogenerated project defines an API crate `fatcat` which contains:
* An `Api` trait defining the API in Rust.
diff --git a/rust/fatcat-api-spec/api.yaml b/rust/fatcat-api-spec/api.yaml
index 70a27b76..14b70c39 100644
--- a/rust/fatcat-api-spec/api.yaml
+++ b/rust/fatcat-api-spec/api.yaml
@@ -26,6 +26,10 @@ tags: # TAGLINE
descriptions: "Creator entities: such as authors" # TAGLINE
- name: files # TAGLINE
descriptions: "File entities" # TAGLINE
+ - name: filesets # TAGLINE
+ descriptions: "Fileset entities" # TAGLINE
+ - name: webcaptures # TAGLINE
+ descriptions: "Webcapture entities" # TAGLINE
- name: releases # TAGLINE
descriptions: "Release entities: individual articles, pre-prints, books" # TAGLINE
- name: works # TAGLINE
@@ -132,6 +136,9 @@ definitions:
type: string
example: "Journal of Important Results"
description: "Required for valid entities"
+ container_type:
+ type: string
+ description: "Eg, 'journal'"
publisher:
type: string
example: "Society of Curious Students"
@@ -139,10 +146,6 @@ definitions:
<<: *FATCATISSN
wikidata_qid:
type: string
- abbrev:
- type: string
- coden:
- type: string
creator_entity:
type: object
# required for creation: display_name
@@ -249,7 +252,7 @@ definitions:
properties:
<<: *ENTITYPROPS
cdx:
- # limit of 200 CDX lines, at least to start
+ # limit of 200 CDX lines, at least to start?
type: array
items:
type: object
@@ -264,7 +267,9 @@ definitions:
example: "org,asheesh)/apus/ch1/node15.html"
timestamp:
type: string
- example: "20020429162520"
+ format: date-time
+ example: "2016-09-19T17:20:24Z"
+ description: "UTC, 'Z'-terminated, second (or better) precision"
url:
type: string
# NOTE: not format:url to allow alternatives
@@ -302,6 +307,7 @@ definitions:
timestamp:
type: string
format: date-time
+ description: "same format as CDX line timestamp (UTC, etc). Corresponds to the overall capture timestamp. Can be the earliest or average of CDX timestamps if that makes sense."
release_ids:
type: array
items:
@@ -313,7 +319,10 @@ definitions:
<<: *ENTITYPROPS
title:
type: string
- description: "Required for valid entities"
+ description: "Required for valid entities. The title used in citations and for display; usually English"
+ original_title:
+ type: string
+ description: "Title in original language (or, the language of the full text of this release)"
work_id:
type: string
example: "q3nouwy3nnbsvo3h5klxsx4a7y"
@@ -343,7 +352,7 @@ definitions:
example: "book"
release_status:
type: string
- example: "preprint"
+ example: "preprint, retracted"
release_date:
type: string
format: date
@@ -367,6 +376,10 @@ definitions:
core_id:
type: string
#format: custom
+ arxiv_id:
+ type: string
+ jstor_id:
+ type: string
volume:
type: string
issue:
@@ -379,6 +392,9 @@ definitions:
language:
description: "Two-letter RFC1766/ISO639-1 language code, with extensions"
type: string
+ license_slug:
+ type: string
+ description: "Short version of license name. Eg, 'CC-BY'"
contribs:
type: array
items:
@@ -588,11 +604,14 @@ definitions:
description: "Optional; GET-only"
raw_name:
type: string
+ role:
+ type: string
+ raw_affiliation:
+ type: string
+ description: "Raw affiliation string as displayed in text"
extra:
type: object
additionalProperties: {}
- role:
- type: string
auth_oidc:
type: object
required:
@@ -687,6 +706,14 @@ paths:
type: string
required: false
description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)"
+ - name: description
+ in: query
+ type: string
+ required: false
+ - name: extra
+ in: query
+ type: string
+ required: false
- name: entity_list
in: body
required: true
@@ -945,6 +972,14 @@ paths:
type: string
required: false
description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)"
+ - name: description
+ in: query
+ type: string
+ required: false
+ - name: extra
+ in: query
+ type: string
+ required: false
- name: entity_list
in: body
required: true
@@ -1226,6 +1261,14 @@ paths:
type: string
required: false
description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)"
+ - name: description
+ in: query
+ type: string
+ required: false
+ - name: extra
+ in: query
+ type: string
+ required: false
- name: entity_list
in: body
required: true
@@ -1489,6 +1532,14 @@ paths:
type: string
required: false
description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)"
+ - name: description
+ in: query
+ type: string
+ required: false
+ - name: extra
+ in: query
+ type: string
+ required: false
- name: entity_list
in: body
required: true
@@ -1718,6 +1769,14 @@ paths:
type: string
required: false
description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)"
+ - name: description
+ in: query
+ type: string
+ required: false
+ - name: extra
+ in: query
+ type: string
+ required: false
- name: entity_list
in: body
required: true
@@ -1947,6 +2006,14 @@ paths:
type: string
required: false
description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)"
+ - name: description
+ in: query
+ type: string
+ required: false
+ - name: extra
+ in: query
+ type: string
+ required: false
- name: entity_list
in: body
required: true
@@ -2201,6 +2268,14 @@ paths:
in: query
type: string
required: false
+ - name: arxiv_id
+ in: query
+ type: string
+ required: false
+ - name: jstor_id
+ in: query
+ type: string
+ required: false
- name: expand
in: query
type: string
@@ -2291,6 +2366,14 @@ paths:
type: string
required: false
description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)"
+ - name: description
+ in: query
+ type: string
+ required: false
+ - name: extra
+ in: query
+ type: string
+ required: false
- name: entity_list
in: body
required: true
diff --git a/rust/fatcat-api-spec/api/swagger.yaml b/rust/fatcat-api-spec/api/swagger.yaml
index 7a1c2755..927bb941 100644
--- a/rust/fatcat-api-spec/api/swagger.yaml
+++ b/rust/fatcat-api-spec/api/swagger.yaml
@@ -11,6 +11,8 @@ tags:
- name: "containers"
- name: "creators"
- name: "files"
+- name: "filesets"
+- name: "webcaptures"
- name: "releases"
- name: "works"
- name: "edit-lifecycle"
@@ -133,6 +135,18 @@ paths:
type: "string"
formatString: "{:?}"
example: "Some(\"editgroup_id_example\".to_string())"
+ - name: "description"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"description_example\".to_string())"
+ - name: "extra"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"extra_example\".to_string())"
- in: "body"
name: "entity_list"
required: true
@@ -979,6 +993,18 @@ paths:
type: "string"
formatString: "{:?}"
example: "Some(\"editgroup_id_example\".to_string())"
+ - name: "description"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"description_example\".to_string())"
+ - name: "extra"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"extra_example\".to_string())"
- in: "body"
name: "entity_list"
required: true
@@ -1889,6 +1915,18 @@ paths:
type: "string"
formatString: "{:?}"
example: "Some(\"editgroup_id_example\".to_string())"
+ - name: "description"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"description_example\".to_string())"
+ - name: "extra"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"extra_example\".to_string())"
- in: "body"
name: "entity_list"
required: true
@@ -2748,6 +2786,18 @@ paths:
type: "string"
formatString: "{:?}"
example: "Some(\"editgroup_id_example\".to_string())"
+ - name: "description"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"description_example\".to_string())"
+ - name: "extra"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"extra_example\".to_string())"
- in: "body"
name: "entity_list"
required: true
@@ -3517,6 +3567,18 @@ paths:
type: "string"
formatString: "{:?}"
example: "Some(\"editgroup_id_example\".to_string())"
+ - name: "description"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"description_example\".to_string())"
+ - name: "extra"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"extra_example\".to_string())"
- in: "body"
name: "entity_list"
required: true
@@ -4286,6 +4348,18 @@ paths:
type: "string"
formatString: "{:?}"
example: "Some(\"editgroup_id_example\".to_string())"
+ - name: "description"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"description_example\".to_string())"
+ - name: "extra"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"extra_example\".to_string())"
- in: "body"
name: "entity_list"
required: true
@@ -5039,6 +5113,18 @@ paths:
type: "string"
formatString: "{:?}"
example: "Some(\"core_id_example\".to_string())"
+ - name: "arxiv_id"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"arxiv_id_example\".to_string())"
+ - name: "jstor_id"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"jstor_id_example\".to_string())"
- name: "expand"
in: "query"
description: "List of sub-entities to expand in response."
@@ -5346,6 +5432,18 @@ paths:
type: "string"
formatString: "{:?}"
example: "Some(\"editgroup_id_example\".to_string())"
+ - name: "description"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"description_example\".to_string())"
+ - name: "extra"
+ in: "query"
+ required: false
+ type: "string"
+ formatString: "{:?}"
+ example: "Some(\"extra_example\".to_string())"
- in: "body"
name: "entity_list"
required: true
@@ -7273,10 +7371,6 @@ definitions:
container_entity:
type: "object"
properties:
- coden:
- type: "string"
- abbrev:
- type: "string"
wikidata_qid:
type: "string"
issnl:
@@ -7288,6 +7382,9 @@ definitions:
publisher:
type: "string"
example: "Society of Curious Students"
+ container_type:
+ type: "string"
+ description: "Eg, 'journal'"
name:
type: "string"
example: "Journal of Important Results"
@@ -7326,13 +7423,12 @@ definitions:
- "deleted"
example:
redirect: "q3nouwy3nnbsvo3h5klxsx4a7y"
- coden: "coden"
ident: "q3nouwy3nnbsvo3h5klxsx4a7y"
extra: "{}"
+ container_type: "container_type"
name: "Journal of Important Results"
publisher: "Society of Curious Students"
issnl: "1234-5678"
- abbrev: "abbrev"
wikidata_qid: "wikidata_qid"
state: "wip"
edit_extra: "{}"
@@ -7592,6 +7688,9 @@ definitions:
timestamp:
type: "string"
format: "date-time"
+ description: "same format as CDX line timestamp (UTC, etc). Corresponds to\
+ \ the overall capture timestamp. Can be the earliest or average of CDX timestamps\
+ \ if that makes sense."
original_url:
type: "string"
format: "url"
@@ -7651,14 +7750,14 @@ definitions:
sha256: "cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452"
mimetype: "text/html"
url: "http://www.asheesh.org:80/APUS/ch1/node15.html"
- timestamp: "20020429162520"
+ timestamp: "2016-09-19T17:20:24Z"
- sha1: "e9dd75237c94b209dc3ccd52722de6931a310ba3"
surt: "org,asheesh)/apus/ch1/node15.html"
status_code: 200
sha256: "cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452"
mimetype: "text/html"
url: "http://www.asheesh.org:80/APUS/ch1/node15.html"
- timestamp: "20020429162520"
+ timestamp: "2016-09-19T17:20:24Z"
ident: "q3nouwy3nnbsvo3h5klxsx4a7y"
extra: "{}"
state: "wip"
@@ -7684,6 +7783,9 @@ definitions:
type: "array"
items:
$ref: "#/definitions/release_contrib"
+ license_slug:
+ type: "string"
+ description: "Short version of license name. Eg, 'CC-BY'"
language:
type: "string"
description: "Two-letter RFC1766/ISO639-1 language code, with extensions"
@@ -7696,6 +7798,10 @@ definitions:
example: "12"
volume:
type: "string"
+ jstor_id:
+ type: "string"
+ arxiv_id:
+ type: "string"
core_id:
type: "string"
pmcid:
@@ -7718,7 +7824,7 @@ definitions:
format: "date"
release_status:
type: "string"
- example: "preprint"
+ example: "preprint, retracted"
release_type:
type: "string"
example: "book"
@@ -7746,9 +7852,14 @@ definitions:
work_id:
type: "string"
example: "q3nouwy3nnbsvo3h5klxsx4a7y"
+ original_title:
+ type: "string"
+ description: "Title in original language (or, the language of the full text\
+ \ of this release)"
title:
type: "string"
- description: "Required for valid entities"
+ description: "Required for valid entities. The title used in citations and\
+ \ for display; usually English"
state:
type: "string"
enum:
@@ -7784,13 +7895,12 @@ definitions:
example:
container:
redirect: "q3nouwy3nnbsvo3h5klxsx4a7y"
- coden: "coden"
ident: "q3nouwy3nnbsvo3h5klxsx4a7y"
extra: "{}"
+ container_type: "container_type"
name: "Journal of Important Results"
publisher: "Society of Curious Students"
issnl: "1234-5678"
- abbrev: "abbrev"
wikidata_qid: "wikidata_qid"
state: "wip"
edit_extra: "{}"
@@ -7810,14 +7920,14 @@ definitions:
sha256: "cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452"
mimetype: "text/html"
url: "http://www.asheesh.org:80/APUS/ch1/node15.html"
- timestamp: "20020429162520"
+ timestamp: "2016-09-19T17:20:24Z"
- sha1: "e9dd75237c94b209dc3ccd52722de6931a310ba3"
surt: "org,asheesh)/apus/ch1/node15.html"
status_code: 200
sha256: "cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452"
mimetype: "text/html"
url: "http://www.asheesh.org:80/APUS/ch1/node15.html"
- timestamp: "20020429162520"
+ timestamp: "2016-09-19T17:20:24Z"
ident: "q3nouwy3nnbsvo3h5klxsx4a7y"
extra: "{}"
state: "wip"
@@ -7841,14 +7951,14 @@ definitions:
sha256: "cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452"
mimetype: "text/html"
url: "http://www.asheesh.org:80/APUS/ch1/node15.html"
- timestamp: "20020429162520"
+ timestamp: "2016-09-19T17:20:24Z"
- sha1: "e9dd75237c94b209dc3ccd52722de6931a310ba3"
surt: "org,asheesh)/apus/ch1/node15.html"
status_code: 200
sha256: "cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452"
mimetype: "text/html"
url: "http://www.asheesh.org:80/APUS/ch1/node15.html"
- timestamp: "20020429162520"
+ timestamp: "2016-09-19T17:20:24Z"
ident: "q3nouwy3nnbsvo3h5klxsx4a7y"
extra: "{}"
state: "wip"
@@ -7862,7 +7972,8 @@ definitions:
language: "language"
title: "title"
contribs:
- - creator:
+ - raw_affiliation: "raw_affiliation"
+ creator:
redirect: "q3nouwy3nnbsvo3h5klxsx4a7y"
surname: "surname"
ident: "q3nouwy3nnbsvo3h5klxsx4a7y"
@@ -7879,7 +7990,8 @@ definitions:
extra: "{}"
creator_id: "creator_id"
index: 1
- - creator:
+ - raw_affiliation: "raw_affiliation"
+ creator:
redirect: "q3nouwy3nnbsvo3h5klxsx4a7y"
surname: "surname"
ident: "q3nouwy3nnbsvo3h5klxsx4a7y"
@@ -7901,9 +8013,11 @@ definitions:
extra: "{}"
state: "wip"
edit_extra: "{}"
+ jstor_id: "jstor_id"
redirect: "q3nouwy3nnbsvo3h5klxsx4a7y"
work_id: "q3nouwy3nnbsvo3h5klxsx4a7y"
issue: "12"
+ original_title: "original_title"
abstracts:
- sha1: "e9dd75237c94b209dc3ccd52722de6931a310ba3"
mimetype: "application/xml+jats"
@@ -7917,9 +8031,10 @@ definitions:
release_type: "book"
wikidata_qid: "wikidata_qid"
pmid: "pmid"
- release_status: "preprint"
+ release_status: "preprint, retracted"
revision: "86daea5b-1b6b-432a-bb67-ea97795f80fe"
volume: "volume"
+ license_slug: "license_slug"
refs:
- target_release_id: "q3nouwy3nnbsvo3h5klxsx4a7y"
container_name: "container_name"
@@ -7979,6 +8094,7 @@ definitions:
- "q3nouwy3nnbsvo3h5klxsx4a7y"
edit_extra: "{}"
md5: "1b39813549077b2347c0f370c3864b40"
+ arxiv_id: "arxiv_id"
filesets:
- redirect: "q3nouwy3nnbsvo3h5klxsx4a7y"
urls:
@@ -8929,11 +9045,15 @@ definitions:
$ref: "#/definitions/creator_entity"
raw_name:
type: "string"
- extra:
- type: "object"
role:
type: "string"
+ raw_affiliation:
+ type: "string"
+ description: "Raw affiliation string as displayed in text"
+ extra:
+ type: "object"
example:
+ raw_affiliation: "raw_affiliation"
creator:
redirect: "q3nouwy3nnbsvo3h5klxsx4a7y"
surname: "surname"
@@ -9072,7 +9192,9 @@ definitions:
example: "org,asheesh)/apus/ch1/node15.html"
timestamp:
type: "string"
- example: "20020429162520"
+ format: "date-time"
+ example: "2016-09-19T17:20:24Z"
+ description: "UTC, 'Z'-terminated, second (or better) precision"
url:
type: "string"
example: "http://www.asheesh.org:80/APUS/ch1/node15.html"
@@ -9102,7 +9224,7 @@ definitions:
sha256: "cb1c378f464d5935ddaa8de28446d82638396c61f042295d7fb85e3cccc9e452"
mimetype: "text/html"
url: "http://www.asheesh.org:80/APUS/ch1/node15.html"
- timestamp: "20020429162520"
+ timestamp: "2016-09-19T17:20:24Z"
upperCaseName: "WEBCAPTURE_ENTITY_CDX"
release_entity_abstracts:
properties:
diff --git a/rust/fatcat-api-spec/examples/client.rs b/rust/fatcat-api-spec/examples/client.rs
index b4d90719..40a5a3ab 100644
--- a/rust/fatcat-api-spec/examples/client.rs
+++ b/rust/fatcat-api-spec/examples/client.rs
@@ -144,7 +144,15 @@ fn main() {
// println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
// },
Some("CreateContainerBatch") => {
- let result = client.create_container_batch(&Vec::new(), Some(true), Some("editgroup_id_example".to_string())).wait();
+ let result = client
+ .create_container_batch(
+ &Vec::new(),
+ Some(true),
+ Some("editgroup_id_example".to_string()),
+ Some("description_example".to_string()),
+ Some("extra_example".to_string()),
+ )
+ .wait();
println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
}
@@ -211,7 +219,15 @@ fn main() {
// println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
// },
Some("CreateCreatorBatch") => {
- let result = client.create_creator_batch(&Vec::new(), Some(true), Some("editgroup_id_example".to_string())).wait();
+ let result = client
+ .create_creator_batch(
+ &Vec::new(),
+ Some(true),
+ Some("editgroup_id_example".to_string()),
+ Some("description_example".to_string()),
+ Some("extra_example".to_string()),
+ )
+ .wait();
println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
}
@@ -359,7 +375,15 @@ fn main() {
// println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
// },
Some("CreateFileBatch") => {
- let result = client.create_file_batch(&Vec::new(), Some(true), Some("editgroup_id_example".to_string())).wait();
+ let result = client
+ .create_file_batch(
+ &Vec::new(),
+ Some(true),
+ Some("editgroup_id_example".to_string()),
+ Some("description_example".to_string()),
+ Some("extra_example".to_string()),
+ )
+ .wait();
println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
}
@@ -427,7 +451,15 @@ fn main() {
// println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
// },
Some("CreateFilesetBatch") => {
- let result = client.create_fileset_batch(&Vec::new(), Some(true), Some("editgroup_id_example".to_string())).wait();
+ let result = client
+ .create_fileset_batch(
+ &Vec::new(),
+ Some(true),
+ Some("editgroup_id_example".to_string()),
+ Some("description_example".to_string()),
+ Some("extra_example".to_string()),
+ )
+ .wait();
println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
}
@@ -482,7 +514,15 @@ fn main() {
// println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
// },
Some("CreateReleaseBatch") => {
- let result = client.create_release_batch(&Vec::new(), Some(true), Some("editgroup_id_example".to_string())).wait();
+ let result = client
+ .create_release_batch(
+ &Vec::new(),
+ Some(true),
+ Some("editgroup_id_example".to_string()),
+ Some("description_example".to_string()),
+ Some("extra_example".to_string()),
+ )
+ .wait();
println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
}
@@ -554,6 +594,8 @@ fn main() {
Some("pmid_example".to_string()),
Some("pmcid_example".to_string()),
Some("core_id_example".to_string()),
+ Some("arxiv_id_example".to_string()),
+ Some("jstor_id_example".to_string()),
Some("expand_example".to_string()),
Some("hide_example".to_string()),
)
@@ -573,7 +615,15 @@ fn main() {
// println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
// },
Some("CreateWebcaptureBatch") => {
- let result = client.create_webcapture_batch(&Vec::new(), Some(true), Some("editgroup_id_example".to_string())).wait();
+ let result = client
+ .create_webcapture_batch(
+ &Vec::new(),
+ Some(true),
+ Some("editgroup_id_example".to_string()),
+ Some("description_example".to_string()),
+ Some("extra_example".to_string()),
+ )
+ .wait();
println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
}
@@ -622,7 +672,15 @@ fn main() {
// println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
// },
Some("CreateWorkBatch") => {
- let result = client.create_work_batch(&Vec::new(), Some(true), Some("editgroup_id_example".to_string())).wait();
+ let result = client
+ .create_work_batch(
+ &Vec::new(),
+ Some(true),
+ Some("editgroup_id_example".to_string()),
+ Some("description_example".to_string()),
+ Some("extra_example".to_string()),
+ )
+ .wait();
println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>")));
}
diff --git a/rust/fatcat-api-spec/examples/server_lib/server.rs b/rust/fatcat-api-spec/examples/server_lib/server.rs
index 3c37106a..98f31485 100644
--- a/rust/fatcat-api-spec/examples/server_lib/server.rs
+++ b/rust/fatcat-api-spec/examples/server_lib/server.rs
@@ -46,14 +46,18 @@ impl Api for Server {
entity_list: &Vec<models::ContainerEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> {
let context = context.clone();
println!(
- "create_container_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}",
+ "create_container_batch({:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
entity_list,
autoaccept,
editgroup_id,
+ description,
+ extra,
context.x_span_id.unwrap_or(String::from("<none>")).clone()
);
Box::new(futures::failed("Generic failure".into()))
@@ -171,14 +175,18 @@ impl Api for Server {
entity_list: &Vec<models::CreatorEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> {
let context = context.clone();
println!(
- "create_creator_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}",
+ "create_creator_batch({:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
entity_list,
autoaccept,
editgroup_id,
+ description,
+ extra,
context.x_span_id.unwrap_or(String::from("<none>")).clone()
);
Box::new(futures::failed("Generic failure".into()))
@@ -465,14 +473,18 @@ impl Api for Server {
entity_list: &Vec<models::FileEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> {
let context = context.clone();
println!(
- "create_file_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}",
+ "create_file_batch({:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
entity_list,
autoaccept,
editgroup_id,
+ description,
+ extra,
context.x_span_id.unwrap_or(String::from("<none>")).clone()
);
Box::new(futures::failed("Generic failure".into()))
@@ -592,14 +604,18 @@ impl Api for Server {
entity_list: &Vec<models::FilesetEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateFilesetBatchResponse, Error = ApiError> + Send> {
let context = context.clone();
println!(
- "create_fileset_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}",
+ "create_fileset_batch({:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
entity_list,
autoaccept,
editgroup_id,
+ description,
+ extra,
context.x_span_id.unwrap_or(String::from("<none>")).clone()
);
Box::new(futures::failed("Generic failure".into()))
@@ -697,14 +713,18 @@ impl Api for Server {
entity_list: &Vec<models::ReleaseEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> {
let context = context.clone();
println!(
- "create_release_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}",
+ "create_release_batch({:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
entity_list,
autoaccept,
editgroup_id,
+ description,
+ extra,
context.x_span_id.unwrap_or(String::from("<none>")).clone()
);
Box::new(futures::failed("Generic failure".into()))
@@ -826,19 +846,23 @@ impl Api for Server {
pmid: Option<String>,
pmcid: Option<String>,
core_id: Option<String>,
+ arxiv_id: Option<String>,
+ jstor_id: Option<String>,
expand: Option<String>,
hide: Option<String>,
context: &Context,
) -> Box<Future<Item = LookupReleaseResponse, Error = ApiError> + Send> {
let context = context.clone();
println!(
- "lookup_release({:?}, {:?}, {:?}, {:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
+ "lookup_release({:?}, {:?}, {:?}, {:?}, {:?}, {:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
doi,
wikidata_qid,
isbn13,
pmid,
pmcid,
core_id,
+ arxiv_id,
+ jstor_id,
expand,
hide,
context.x_span_id.unwrap_or(String::from("<none>")).clone()
@@ -874,14 +898,18 @@ impl Api for Server {
entity_list: &Vec<models::WebcaptureEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateWebcaptureBatchResponse, Error = ApiError> + Send> {
let context = context.clone();
println!(
- "create_webcapture_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}",
+ "create_webcapture_batch({:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
entity_list,
autoaccept,
editgroup_id,
+ description,
+ extra,
context.x_span_id.unwrap_or(String::from("<none>")).clone()
);
Box::new(futures::failed("Generic failure".into()))
@@ -968,14 +996,18 @@ impl Api for Server {
entity_list: &Vec<models::WorkEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> {
let context = context.clone();
println!(
- "create_work_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}",
+ "create_work_batch({:?}, {:?}, {:?}, {:?}, {:?}) - X-Span-ID: {:?}",
entity_list,
autoaccept,
editgroup_id,
+ description,
+ extra,
context.x_span_id.unwrap_or(String::from("<none>")).clone()
);
Box::new(futures::failed("Generic failure".into()))
diff --git a/rust/fatcat-api-spec/src/client.rs b/rust/fatcat-api-spec/src/client.rs
index a3e97fb3..6d654146 100644
--- a/rust/fatcat-api-spec/src/client.rs
+++ b/rust/fatcat-api-spec/src/client.rs
@@ -272,17 +272,23 @@ impl Api for Client {
param_entity_list: &Vec<models::ContainerEntity>,
param_autoaccept: Option<bool>,
param_editgroup_id: Option<String>,
+ param_description: Option<String>,
+ param_extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> {
// Query parameters
let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string()));
let query_editgroup_id = param_editgroup_id.map_or_else(String::new, |query| format!("editgroup_id={editgroup_id}&", editgroup_id = query.to_string()));
+ let query_description = param_description.map_or_else(String::new, |query| format!("description={description}&", description = query.to_string()));
+ let query_extra = param_extra.map_or_else(String::new, |query| format!("extra={extra}&", extra = query.to_string()));
let url = format!(
- "{}/v0/container/batch?{autoaccept}{editgroup_id}",
+ "{}/v0/container/batch?{autoaccept}{editgroup_id}{description}{extra}",
self.base_path,
autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET),
- editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET)
+ editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET),
+ description = utf8_percent_encode(&query_description, QUERY_ENCODE_SET),
+ extra = utf8_percent_encode(&query_extra, QUERY_ENCODE_SET)
);
let body = serde_json::to_string(&param_entity_list).expect("impossible to fail to serialize");
@@ -1167,17 +1173,23 @@ impl Api for Client {
param_entity_list: &Vec<models::CreatorEntity>,
param_autoaccept: Option<bool>,
param_editgroup_id: Option<String>,
+ param_description: Option<String>,
+ param_extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> {
// Query parameters
let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string()));
let query_editgroup_id = param_editgroup_id.map_or_else(String::new, |query| format!("editgroup_id={editgroup_id}&", editgroup_id = query.to_string()));
+ let query_description = param_description.map_or_else(String::new, |query| format!("description={description}&", description = query.to_string()));
+ let query_extra = param_extra.map_or_else(String::new, |query| format!("extra={extra}&", extra = query.to_string()));
let url = format!(
- "{}/v0/creator/batch?{autoaccept}{editgroup_id}",
+ "{}/v0/creator/batch?{autoaccept}{editgroup_id}{description}{extra}",
self.base_path,
autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET),
- editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET)
+ editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET),
+ description = utf8_percent_encode(&query_description, QUERY_ENCODE_SET),
+ extra = utf8_percent_encode(&query_extra, QUERY_ENCODE_SET)
);
let body = serde_json::to_string(&param_entity_list).expect("impossible to fail to serialize");
@@ -3364,17 +3376,23 @@ impl Api for Client {
param_entity_list: &Vec<models::FileEntity>,
param_autoaccept: Option<bool>,
param_editgroup_id: Option<String>,
+ param_description: Option<String>,
+ param_extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> {
// Query parameters
let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string()));
let query_editgroup_id = param_editgroup_id.map_or_else(String::new, |query| format!("editgroup_id={editgroup_id}&", editgroup_id = query.to_string()));
+ let query_description = param_description.map_or_else(String::new, |query| format!("description={description}&", description = query.to_string()));
+ let query_extra = param_extra.map_or_else(String::new, |query| format!("extra={extra}&", extra = query.to_string()));
let url = format!(
- "{}/v0/file/batch?{autoaccept}{editgroup_id}",
+ "{}/v0/file/batch?{autoaccept}{editgroup_id}{description}{extra}",
self.base_path,
autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET),
- editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET)
+ editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET),
+ description = utf8_percent_encode(&query_description, QUERY_ENCODE_SET),
+ extra = utf8_percent_encode(&query_extra, QUERY_ENCODE_SET)
);
let body = serde_json::to_string(&param_entity_list).expect("impossible to fail to serialize");
@@ -4256,17 +4274,23 @@ impl Api for Client {
param_entity_list: &Vec<models::FilesetEntity>,
param_autoaccept: Option<bool>,
param_editgroup_id: Option<String>,
+ param_description: Option<String>,
+ param_extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateFilesetBatchResponse, Error = ApiError> + Send> {
// Query parameters
let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string()));
let query_editgroup_id = param_editgroup_id.map_or_else(String::new, |query| format!("editgroup_id={editgroup_id}&", editgroup_id = query.to_string()));
+ let query_description = param_description.map_or_else(String::new, |query| format!("description={description}&", description = query.to_string()));
+ let query_extra = param_extra.map_or_else(String::new, |query| format!("extra={extra}&", extra = query.to_string()));
let url = format!(
- "{}/v0/fileset/batch?{autoaccept}{editgroup_id}",
+ "{}/v0/fileset/batch?{autoaccept}{editgroup_id}{description}{extra}",
self.base_path,
autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET),
- editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET)
+ editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET),
+ description = utf8_percent_encode(&query_description, QUERY_ENCODE_SET),
+ extra = utf8_percent_encode(&query_extra, QUERY_ENCODE_SET)
);
let body = serde_json::to_string(&param_entity_list).expect("impossible to fail to serialize");
@@ -5071,17 +5095,23 @@ impl Api for Client {
param_entity_list: &Vec<models::ReleaseEntity>,
param_autoaccept: Option<bool>,
param_editgroup_id: Option<String>,
+ param_description: Option<String>,
+ param_extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> {
// Query parameters
let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string()));
let query_editgroup_id = param_editgroup_id.map_or_else(String::new, |query| format!("editgroup_id={editgroup_id}&", editgroup_id = query.to_string()));
+ let query_description = param_description.map_or_else(String::new, |query| format!("description={description}&", description = query.to_string()));
+ let query_extra = param_extra.map_or_else(String::new, |query| format!("extra={extra}&", extra = query.to_string()));
let url = format!(
- "{}/v0/release/batch?{autoaccept}{editgroup_id}",
+ "{}/v0/release/batch?{autoaccept}{editgroup_id}{description}{extra}",
self.base_path,
autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET),
- editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET)
+ editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET),
+ description = utf8_percent_encode(&query_description, QUERY_ENCODE_SET),
+ extra = utf8_percent_encode(&query_extra, QUERY_ENCODE_SET)
);
let body = serde_json::to_string(&param_entity_list).expect("impossible to fail to serialize");
@@ -5988,6 +6018,8 @@ impl Api for Client {
param_pmid: Option<String>,
param_pmcid: Option<String>,
param_core_id: Option<String>,
+ param_arxiv_id: Option<String>,
+ param_jstor_id: Option<String>,
param_expand: Option<String>,
param_hide: Option<String>,
context: &Context,
@@ -5999,11 +6031,13 @@ impl Api for Client {
let query_pmid = param_pmid.map_or_else(String::new, |query| format!("pmid={pmid}&", pmid = query.to_string()));
let query_pmcid = param_pmcid.map_or_else(String::new, |query| format!("pmcid={pmcid}&", pmcid = query.to_string()));
let query_core_id = param_core_id.map_or_else(String::new, |query| format!("core_id={core_id}&", core_id = query.to_string()));
+ let query_arxiv_id = param_arxiv_id.map_or_else(String::new, |query| format!("arxiv_id={arxiv_id}&", arxiv_id = query.to_string()));
+ let query_jstor_id = param_jstor_id.map_or_else(String::new, |query| format!("jstor_id={jstor_id}&", jstor_id = query.to_string()));
let query_expand = param_expand.map_or_else(String::new, |query| format!("expand={expand}&", expand = query.to_string()));
let query_hide = param_hide.map_or_else(String::new, |query| format!("hide={hide}&", hide = query.to_string()));
let url = format!(
- "{}/v0/release/lookup?{doi}{wikidata_qid}{isbn13}{pmid}{pmcid}{core_id}{expand}{hide}",
+ "{}/v0/release/lookup?{doi}{wikidata_qid}{isbn13}{pmid}{pmcid}{core_id}{arxiv_id}{jstor_id}{expand}{hide}",
self.base_path,
doi = utf8_percent_encode(&query_doi, QUERY_ENCODE_SET),
wikidata_qid = utf8_percent_encode(&query_wikidata_qid, QUERY_ENCODE_SET),
@@ -6011,6 +6045,8 @@ impl Api for Client {
pmid = utf8_percent_encode(&query_pmid, QUERY_ENCODE_SET),
pmcid = utf8_percent_encode(&query_pmcid, QUERY_ENCODE_SET),
core_id = utf8_percent_encode(&query_core_id, QUERY_ENCODE_SET),
+ arxiv_id = utf8_percent_encode(&query_arxiv_id, QUERY_ENCODE_SET),
+ jstor_id = utf8_percent_encode(&query_jstor_id, QUERY_ENCODE_SET),
expand = utf8_percent_encode(&query_expand, QUERY_ENCODE_SET),
hide = utf8_percent_encode(&query_hide, QUERY_ENCODE_SET)
);
@@ -6272,17 +6308,23 @@ impl Api for Client {
param_entity_list: &Vec<models::WebcaptureEntity>,
param_autoaccept: Option<bool>,
param_editgroup_id: Option<String>,
+ param_description: Option<String>,
+ param_extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateWebcaptureBatchResponse, Error = ApiError> + Send> {
// Query parameters
let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string()));
let query_editgroup_id = param_editgroup_id.map_or_else(String::new, |query| format!("editgroup_id={editgroup_id}&", editgroup_id = query.to_string()));
+ let query_description = param_description.map_or_else(String::new, |query| format!("description={description}&", description = query.to_string()));
+ let query_extra = param_extra.map_or_else(String::new, |query| format!("extra={extra}&", extra = query.to_string()));
let url = format!(
- "{}/v0/webcapture/batch?{autoaccept}{editgroup_id}",
+ "{}/v0/webcapture/batch?{autoaccept}{editgroup_id}{description}{extra}",
self.base_path,
autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET),
- editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET)
+ editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET),
+ description = utf8_percent_encode(&query_description, QUERY_ENCODE_SET),
+ extra = utf8_percent_encode(&query_extra, QUERY_ENCODE_SET)
);
let body = serde_json::to_string(&param_entity_list).expect("impossible to fail to serialize");
@@ -6993,17 +7035,23 @@ impl Api for Client {
param_entity_list: &Vec<models::WorkEntity>,
param_autoaccept: Option<bool>,
param_editgroup_id: Option<String>,
+ param_description: Option<String>,
+ param_extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> {
// Query parameters
let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string()));
let query_editgroup_id = param_editgroup_id.map_or_else(String::new, |query| format!("editgroup_id={editgroup_id}&", editgroup_id = query.to_string()));
+ let query_description = param_description.map_or_else(String::new, |query| format!("description={description}&", description = query.to_string()));
+ let query_extra = param_extra.map_or_else(String::new, |query| format!("extra={extra}&", extra = query.to_string()));
let url = format!(
- "{}/v0/work/batch?{autoaccept}{editgroup_id}",
+ "{}/v0/work/batch?{autoaccept}{editgroup_id}{description}{extra}",
self.base_path,
autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET),
- editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET)
+ editgroup_id = utf8_percent_encode(&query_editgroup_id, QUERY_ENCODE_SET),
+ description = utf8_percent_encode(&query_description, QUERY_ENCODE_SET),
+ extra = utf8_percent_encode(&query_extra, QUERY_ENCODE_SET)
);
let body = serde_json::to_string(&param_entity_list).expect("impossible to fail to serialize");
diff --git a/rust/fatcat-api-spec/src/lib.rs b/rust/fatcat-api-spec/src/lib.rs
index 9585f1c0..59129869 100644
--- a/rust/fatcat-api-spec/src/lib.rs
+++ b/rust/fatcat-api-spec/src/lib.rs
@@ -1345,6 +1345,8 @@ pub trait Api {
entity_list: &Vec<models::ContainerEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send>;
@@ -1380,6 +1382,8 @@ pub trait Api {
entity_list: &Vec<models::CreatorEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send>;
@@ -1473,6 +1477,8 @@ pub trait Api {
entity_list: &Vec<models::FileEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send>;
@@ -1509,6 +1515,8 @@ pub trait Api {
entity_list: &Vec<models::FilesetEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateFilesetBatchResponse, Error = ApiError> + Send>;
@@ -1535,6 +1543,8 @@ pub trait Api {
entity_list: &Vec<models::ReleaseEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send>;
@@ -1568,6 +1578,8 @@ pub trait Api {
pmid: Option<String>,
pmcid: Option<String>,
core_id: Option<String>,
+ arxiv_id: Option<String>,
+ jstor_id: Option<String>,
expand: Option<String>,
hide: Option<String>,
context: &Context,
@@ -1582,6 +1594,8 @@ pub trait Api {
entity_list: &Vec<models::WebcaptureEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateWebcaptureBatchResponse, Error = ApiError> + Send>;
@@ -1606,6 +1620,8 @@ pub trait Api {
entity_list: &Vec<models::WorkEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send>;
@@ -1637,6 +1653,8 @@ pub trait ApiNoContext {
entity_list: &Vec<models::ContainerEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send>;
fn delete_container(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteContainerResponse, Error = ApiError> + Send>;
@@ -1670,6 +1688,8 @@ pub trait ApiNoContext {
entity_list: &Vec<models::CreatorEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send>;
fn delete_creator(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteCreatorResponse, Error = ApiError> + Send>;
@@ -1742,7 +1762,14 @@ pub trait ApiNoContext {
fn create_file(&self, entity: models::FileEntity, editgroup_id: String) -> Box<Future<Item = CreateFileResponse, Error = ApiError> + Send>;
- fn create_file_batch(&self, entity_list: &Vec<models::FileEntity>, autoaccept: Option<bool>, editgroup_id: Option<String>) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send>;
+ fn create_file_batch(
+ &self,
+ entity_list: &Vec<models::FileEntity>,
+ autoaccept: Option<bool>,
+ editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
+ ) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send>;
fn delete_file(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteFileResponse, Error = ApiError> + Send>;
@@ -1776,6 +1803,8 @@ pub trait ApiNoContext {
entity_list: &Vec<models::FilesetEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateFilesetBatchResponse, Error = ApiError> + Send>;
fn delete_fileset(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteFilesetResponse, Error = ApiError> + Send>;
@@ -1801,6 +1830,8 @@ pub trait ApiNoContext {
entity_list: &Vec<models::ReleaseEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send>;
fn create_work(&self, entity: models::WorkEntity, editgroup_id: String) -> Box<Future<Item = CreateWorkResponse, Error = ApiError> + Send>;
@@ -1833,6 +1864,8 @@ pub trait ApiNoContext {
pmid: Option<String>,
pmcid: Option<String>,
core_id: Option<String>,
+ arxiv_id: Option<String>,
+ jstor_id: Option<String>,
expand: Option<String>,
hide: Option<String>,
) -> Box<Future<Item = LookupReleaseResponse, Error = ApiError> + Send>;
@@ -1846,6 +1879,8 @@ pub trait ApiNoContext {
entity_list: &Vec<models::WebcaptureEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateWebcaptureBatchResponse, Error = ApiError> + Send>;
fn delete_webcapture(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteWebcaptureResponse, Error = ApiError> + Send>;
@@ -1864,7 +1899,14 @@ pub trait ApiNoContext {
fn update_webcapture(&self, ident: String, entity: models::WebcaptureEntity, editgroup_id: String) -> Box<Future<Item = UpdateWebcaptureResponse, Error = ApiError> + Send>;
- fn create_work_batch(&self, entity_list: &Vec<models::WorkEntity>, autoaccept: Option<bool>, editgroup_id: Option<String>) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send>;
+ fn create_work_batch(
+ &self,
+ entity_list: &Vec<models::WorkEntity>,
+ autoaccept: Option<bool>,
+ editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
+ ) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send>;
fn delete_work(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteWorkResponse, Error = ApiError> + Send>;
@@ -1910,8 +1952,10 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> {
entity_list: &Vec<models::ContainerEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> {
- self.api().create_container_batch(entity_list, autoaccept, editgroup_id, &self.context())
+ self.api().create_container_batch(entity_list, autoaccept, editgroup_id, description, extra, &self.context())
}
fn delete_container(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteContainerResponse, Error = ApiError> + Send> {
@@ -1965,8 +2009,10 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> {
entity_list: &Vec<models::CreatorEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> {
- self.api().create_creator_batch(entity_list, autoaccept, editgroup_id, &self.context())
+ self.api().create_creator_batch(entity_list, autoaccept, editgroup_id, description, extra, &self.context())
}
fn delete_creator(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteCreatorResponse, Error = ApiError> + Send> {
@@ -2091,8 +2137,15 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> {
self.api().create_file(entity, editgroup_id, &self.context())
}
- fn create_file_batch(&self, entity_list: &Vec<models::FileEntity>, autoaccept: Option<bool>, editgroup_id: Option<String>) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> {
- self.api().create_file_batch(entity_list, autoaccept, editgroup_id, &self.context())
+ fn create_file_batch(
+ &self,
+ entity_list: &Vec<models::FileEntity>,
+ autoaccept: Option<bool>,
+ editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
+ ) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> {
+ self.api().create_file_batch(entity_list, autoaccept, editgroup_id, description, extra, &self.context())
}
fn delete_file(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteFileResponse, Error = ApiError> + Send> {
@@ -2147,8 +2200,10 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> {
entity_list: &Vec<models::FilesetEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateFilesetBatchResponse, Error = ApiError> + Send> {
- self.api().create_fileset_batch(entity_list, autoaccept, editgroup_id, &self.context())
+ self.api().create_fileset_batch(entity_list, autoaccept, editgroup_id, description, extra, &self.context())
}
fn delete_fileset(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteFilesetResponse, Error = ApiError> + Send> {
@@ -2192,8 +2247,10 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> {
entity_list: &Vec<models::ReleaseEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> {
- self.api().create_release_batch(entity_list, autoaccept, editgroup_id, &self.context())
+ self.api().create_release_batch(entity_list, autoaccept, editgroup_id, description, extra, &self.context())
}
fn create_work(&self, entity: models::WorkEntity, editgroup_id: String) -> Box<Future<Item = CreateWorkResponse, Error = ApiError> + Send> {
@@ -2248,10 +2305,13 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> {
pmid: Option<String>,
pmcid: Option<String>,
core_id: Option<String>,
+ arxiv_id: Option<String>,
+ jstor_id: Option<String>,
expand: Option<String>,
hide: Option<String>,
) -> Box<Future<Item = LookupReleaseResponse, Error = ApiError> + Send> {
- self.api().lookup_release(doi, wikidata_qid, isbn13, pmid, pmcid, core_id, expand, hide, &self.context())
+ self.api()
+ .lookup_release(doi, wikidata_qid, isbn13, pmid, pmcid, core_id, arxiv_id, jstor_id, expand, hide, &self.context())
}
fn update_release(&self, ident: String, entity: models::ReleaseEntity, editgroup_id: String) -> Box<Future<Item = UpdateReleaseResponse, Error = ApiError> + Send> {
@@ -2267,8 +2327,10 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> {
entity_list: &Vec<models::WebcaptureEntity>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
) -> Box<Future<Item = CreateWebcaptureBatchResponse, Error = ApiError> + Send> {
- self.api().create_webcapture_batch(entity_list, autoaccept, editgroup_id, &self.context())
+ self.api().create_webcapture_batch(entity_list, autoaccept, editgroup_id, description, extra, &self.context())
}
fn delete_webcapture(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteWebcaptureResponse, Error = ApiError> + Send> {
@@ -2303,8 +2365,15 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> {
self.api().update_webcapture(ident, entity, editgroup_id, &self.context())
}
- fn create_work_batch(&self, entity_list: &Vec<models::WorkEntity>, autoaccept: Option<bool>, editgroup_id: Option<String>) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> {
- self.api().create_work_batch(entity_list, autoaccept, editgroup_id, &self.context())
+ fn create_work_batch(
+ &self,
+ entity_list: &Vec<models::WorkEntity>,
+ autoaccept: Option<bool>,
+ editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
+ ) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> {
+ self.api().create_work_batch(entity_list, autoaccept, editgroup_id, description, extra, &self.context())
}
fn delete_work(&self, ident: String, editgroup_id: String) -> Box<Future<Item = DeleteWorkResponse, Error = ApiError> + Send> {
diff --git a/rust/fatcat-api-spec/src/models.rs b/rust/fatcat-api-spec/src/models.rs
index 5d05b737..d6e6e07f 100644
--- a/rust/fatcat-api-spec/src/models.rs
+++ b/rust/fatcat-api-spec/src/models.rs
@@ -79,14 +79,6 @@ impl ChangelogEntry {
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ContainerEntity {
- #[serde(rename = "coden")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub coden: Option<String>,
-
- #[serde(rename = "abbrev")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub abbrev: Option<String>,
-
#[serde(rename = "wikidata_qid")]
#[serde(skip_serializing_if = "Option::is_none")]
pub wikidata_qid: Option<String>,
@@ -99,6 +91,11 @@ pub struct ContainerEntity {
#[serde(skip_serializing_if = "Option::is_none")]
pub publisher: Option<String>,
+ /// Eg, 'journal'
+ #[serde(rename = "container_type")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub container_type: Option<String>,
+
/// Required for valid entities
#[serde(rename = "name")]
#[serde(skip_serializing_if = "Option::is_none")]
@@ -136,11 +133,10 @@ pub struct ContainerEntity {
impl ContainerEntity {
pub fn new() -> ContainerEntity {
ContainerEntity {
- coden: None,
- abbrev: None,
wikidata_qid: None,
issnl: None,
publisher: None,
+ container_type: None,
name: None,
edit_extra: None,
extra: None,
@@ -709,13 +705,18 @@ pub struct ReleaseContrib {
#[serde(skip_serializing_if = "Option::is_none")]
pub raw_name: Option<String>,
- #[serde(rename = "extra")]
- #[serde(skip_serializing_if = "Option::is_none")]
- pub extra: Option<serde_json::Value>,
-
#[serde(rename = "role")]
#[serde(skip_serializing_if = "Option::is_none")]
pub role: Option<String>,
+
+ /// Raw affiliation string as displayed in text
+ #[serde(rename = "raw_affiliation")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub raw_affiliation: Option<String>,
+
+ #[serde(rename = "extra")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub extra: Option<serde_json::Value>,
}
impl ReleaseContrib {
@@ -725,8 +726,9 @@ impl ReleaseContrib {
creator_id: None,
creator: None,
raw_name: None,
- extra: None,
role: None,
+ raw_affiliation: None,
+ extra: None,
}
}
}
@@ -745,6 +747,11 @@ pub struct ReleaseEntity {
#[serde(skip_serializing_if = "Option::is_none")]
pub contribs: Option<Vec<models::ReleaseContrib>>,
+ /// Short version of license name. Eg, 'CC-BY'
+ #[serde(rename = "license_slug")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub license_slug: Option<String>,
+
/// Two-letter RFC1766/ISO639-1 language code, with extensions
#[serde(rename = "language")]
#[serde(skip_serializing_if = "Option::is_none")]
@@ -766,6 +773,14 @@ pub struct ReleaseEntity {
#[serde(skip_serializing_if = "Option::is_none")]
pub volume: Option<String>,
+ #[serde(rename = "jstor_id")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub jstor_id: Option<String>,
+
+ #[serde(rename = "arxiv_id")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub arxiv_id: Option<String>,
+
#[serde(rename = "core_id")]
#[serde(skip_serializing_if = "Option::is_none")]
pub core_id: Option<String>,
@@ -834,7 +849,12 @@ pub struct ReleaseEntity {
#[serde(skip_serializing_if = "Option::is_none")]
pub work_id: Option<String>,
- /// Required for valid entities
+ /// Title in original language (or, the language of the full text of this release)
+ #[serde(rename = "original_title")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub original_title: Option<String>,
+
+ /// Required for valid entities. The title used in citations and for display; usually English
#[serde(rename = "title")]
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
@@ -874,11 +894,14 @@ impl ReleaseEntity {
abstracts: None,
refs: None,
contribs: None,
+ license_slug: None,
language: None,
publisher: None,
pages: None,
issue: None,
volume: None,
+ jstor_id: None,
+ arxiv_id: None,
core_id: None,
pmcid: None,
pmid: None,
@@ -895,6 +918,7 @@ impl ReleaseEntity {
files: None,
container: None,
work_id: None,
+ original_title: None,
title: None,
state: None,
ident: None,
@@ -1008,6 +1032,7 @@ pub struct WebcaptureEntity {
#[serde(skip_serializing_if = "Option::is_none")]
pub release_ids: Option<Vec<String>>,
+ /// same format as CDX line timestamp (UTC, etc). Corresponds to the overall capture timestamp. Can be the earliest or average of CDX timestamps if that makes sense.
#[serde(rename = "timestamp")]
#[serde(skip_serializing_if = "Option::is_none")]
pub timestamp: Option<chrono::DateTime<chrono::Utc>>,
@@ -1091,8 +1116,9 @@ pub struct WebcaptureEntityCdx {
#[serde(rename = "surt")]
pub surt: String,
+ /// UTC, 'Z'-terminated, second (or better) precision
#[serde(rename = "timestamp")]
- pub timestamp: String,
+ pub timestamp: chrono::DateTime<chrono::Utc>,
#[serde(rename = "url")]
pub url: String,
@@ -1114,7 +1140,7 @@ pub struct WebcaptureEntityCdx {
}
impl WebcaptureEntityCdx {
- pub fn new(surt: String, timestamp: String, url: String, sha1: String) -> WebcaptureEntityCdx {
+ pub fn new(surt: String, timestamp: chrono::DateTime<chrono::Utc>, url: String, sha1: String) -> WebcaptureEntityCdx {
WebcaptureEntityCdx {
surt: surt,
timestamp: timestamp,
diff --git a/rust/fatcat-api-spec/src/server.rs b/rust/fatcat-api-spec/src/server.rs
index 8b616959..af13948e 100644
--- a/rust/fatcat-api-spec/src/server.rs
+++ b/rust/fatcat-api-spec/src/server.rs
@@ -262,8 +262,15 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok());
+ let param_autoaccept = query_params
+ .get("autoaccept")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.to_lowercase().parse::<bool>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;
let param_editgroup_id = query_params.get("editgroup_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_description = query_params.get("description").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_extra = query_params.get("extra").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
// Body parameters (note that non-required body parameters will ignore garbage
// values, rather than causing a 400 response). Produce warning header and logs for
@@ -290,7 +297,10 @@ where
};
let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?;
- match api.create_container_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, context).wait() {
+ match api
+ .create_container_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, param_description, param_extra, context)
+ .wait()
+ {
Ok(rsp) => match rsp {
CreateContainerBatchResponse::CreatedEntities(body) => {
let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize");
@@ -837,7 +847,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
match api.get_container_history(param_ident, param_limit, context).wait() {
Ok(rsp) => match rsp {
@@ -1488,8 +1503,15 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok());
+ let param_autoaccept = query_params
+ .get("autoaccept")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.to_lowercase().parse::<bool>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;
let param_editgroup_id = query_params.get("editgroup_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_description = query_params.get("description").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_extra = query_params.get("extra").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
// Body parameters (note that non-required body parameters will ignore garbage
// values, rather than causing a 400 response). Produce warning header and logs for
@@ -1516,7 +1538,10 @@ where
};
let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?;
- match api.create_creator_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, context).wait() {
+ match api
+ .create_creator_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, param_description, param_extra, context)
+ .wait()
+ {
Ok(rsp) => match rsp {
CreateCreatorBatchResponse::CreatedEntities(body) => {
let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize");
@@ -2063,7 +2088,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
match api.get_creator_history(param_ident, param_limit, context).wait() {
Ok(rsp) => match rsp {
@@ -2901,9 +2931,24 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
let param_expand = query_params.get("expand").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
- let param_before = query_params.get("before").and_then(|list| list.first()).and_then(|x| x.parse::<chrono::DateTime<chrono::Utc>>().ok());
- let param_since = query_params.get("since").and_then(|list| list.first()).and_then(|x| x.parse::<chrono::DateTime<chrono::Utc>>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
+ let param_before = query_params
+ .get("before")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<chrono::DateTime<chrono::Utc>>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected UTC datetime in ISO/RFC format)".to_string())))?;
+ let param_since = query_params
+ .get("since")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<chrono::DateTime<chrono::Utc>>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected UTC datetime in ISO/RFC format)".to_string())))?;
match api.get_editgroups_reviewable(param_expand, param_limit, param_before, param_since, context).wait() {
Ok(rsp) => match rsp {
@@ -3085,9 +3130,24 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
- let param_before = query_params.get("before").and_then(|list| list.first()).and_then(|x| x.parse::<chrono::DateTime<chrono::Utc>>().ok());
- let param_since = query_params.get("since").and_then(|list| list.first()).and_then(|x| x.parse::<chrono::DateTime<chrono::Utc>>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
+ let param_before = query_params
+ .get("before")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<chrono::DateTime<chrono::Utc>>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected UTC datetime in ISO/RFC format)".to_string())))?;
+ let param_since = query_params
+ .get("since")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<chrono::DateTime<chrono::Utc>>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected UTC datetime in ISO/RFC format)".to_string())))?;
match api.get_editor_editgroups(param_editor_id, param_limit, param_before, param_since, context).wait() {
Ok(rsp) => match rsp {
@@ -3182,7 +3242,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_submit = query_params.get("submit").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok());
+ let param_submit = query_params
+ .get("submit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.to_lowercase().parse::<bool>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;
// Body parameters (note that non-required body parameters will ignore garbage
// values, rather than causing a 400 response). Produce warning header and logs for
@@ -3882,7 +3947,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
match api.get_changelog(param_limit, context).wait() {
Ok(rsp) => match rsp {
@@ -4261,9 +4331,24 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
- let param_before = query_params.get("before").and_then(|list| list.first()).and_then(|x| x.parse::<chrono::DateTime<chrono::Utc>>().ok());
- let param_since = query_params.get("since").and_then(|list| list.first()).and_then(|x| x.parse::<chrono::DateTime<chrono::Utc>>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
+ let param_before = query_params
+ .get("before")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<chrono::DateTime<chrono::Utc>>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected UTC datetime in ISO/RFC format)".to_string())))?;
+ let param_since = query_params
+ .get("since")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<chrono::DateTime<chrono::Utc>>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected UTC datetime in ISO/RFC format)".to_string())))?;
match api.get_editor_annotations(param_editor_id, param_limit, param_before, param_since, context).wait() {
Ok(rsp) => match rsp {
@@ -4512,8 +4597,15 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok());
+ let param_autoaccept = query_params
+ .get("autoaccept")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.to_lowercase().parse::<bool>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;
let param_editgroup_id = query_params.get("editgroup_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_description = query_params.get("description").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_extra = query_params.get("extra").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
// Body parameters (note that non-required body parameters will ignore garbage
// values, rather than causing a 400 response). Produce warning header and logs for
@@ -4540,7 +4632,10 @@ where
};
let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?;
- match api.create_file_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, context).wait() {
+ match api
+ .create_file_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, param_description, param_extra, context)
+ .wait()
+ {
Ok(rsp) => match rsp {
CreateFileBatchResponse::CreatedEntities(body) => {
let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize");
@@ -5087,7 +5182,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
match api.get_file_history(param_ident, param_limit, context).wait() {
Ok(rsp) => match rsp {
@@ -5739,8 +5839,15 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok());
+ let param_autoaccept = query_params
+ .get("autoaccept")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.to_lowercase().parse::<bool>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;
let param_editgroup_id = query_params.get("editgroup_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_description = query_params.get("description").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_extra = query_params.get("extra").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
// Body parameters (note that non-required body parameters will ignore garbage
// values, rather than causing a 400 response). Produce warning header and logs for
@@ -5767,7 +5874,10 @@ where
};
let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?;
- match api.create_fileset_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, context).wait() {
+ match api
+ .create_fileset_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, param_description, param_extra, context)
+ .wait()
+ {
Ok(rsp) => match rsp {
CreateFilesetBatchResponse::CreatedEntities(body) => {
let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize");
@@ -6314,7 +6424,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
match api.get_fileset_history(param_ident, param_limit, context).wait() {
Ok(rsp) => match rsp {
@@ -6884,8 +6999,15 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok());
+ let param_autoaccept = query_params
+ .get("autoaccept")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.to_lowercase().parse::<bool>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;
let param_editgroup_id = query_params.get("editgroup_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_description = query_params.get("description").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_extra = query_params.get("extra").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
// Body parameters (note that non-required body parameters will ignore garbage
// values, rather than causing a 400 response). Produce warning header and logs for
@@ -6912,7 +7034,10 @@ where
};
let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?;
- match api.create_release_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, context).wait() {
+ match api
+ .create_release_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, param_description, param_extra, context)
+ .wait()
+ {
Ok(rsp) => match rsp {
CreateReleaseBatchResponse::CreatedEntities(body) => {
let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize");
@@ -7791,7 +7916,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
match api.get_release_history(param_ident, param_limit, context).wait() {
Ok(rsp) => match rsp {
@@ -8151,11 +8281,25 @@ where
let param_pmid = query_params.get("pmid").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
let param_pmcid = query_params.get("pmcid").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
let param_core_id = query_params.get("core_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_arxiv_id = query_params.get("arxiv_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_jstor_id = query_params.get("jstor_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
let param_expand = query_params.get("expand").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
let param_hide = query_params.get("hide").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
match api
- .lookup_release(param_doi, param_wikidata_qid, param_isbn13, param_pmid, param_pmcid, param_core_id, param_expand, param_hide, context)
+ .lookup_release(
+ param_doi,
+ param_wikidata_qid,
+ param_isbn13,
+ param_pmid,
+ param_pmcid,
+ param_core_id,
+ param_arxiv_id,
+ param_jstor_id,
+ param_expand,
+ param_hide,
+ context,
+ )
.wait()
{
Ok(rsp) => match rsp {
@@ -8542,8 +8686,15 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok());
+ let param_autoaccept = query_params
+ .get("autoaccept")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.to_lowercase().parse::<bool>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;
let param_editgroup_id = query_params.get("editgroup_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_description = query_params.get("description").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_extra = query_params.get("extra").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
// Body parameters (note that non-required body parameters will ignore garbage
// values, rather than causing a 400 response). Produce warning header and logs for
@@ -8570,7 +8721,10 @@ where
};
let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?;
- match api.create_webcapture_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, context).wait() {
+ match api
+ .create_webcapture_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, param_description, param_extra, context)
+ .wait()
+ {
Ok(rsp) => match rsp {
CreateWebcaptureBatchResponse::CreatedEntities(body) => {
let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize");
@@ -9117,7 +9271,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
match api.get_webcapture_history(param_ident, param_limit, context).wait() {
Ok(rsp) => match rsp {
@@ -9541,8 +9700,15 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok());
+ let param_autoaccept = query_params
+ .get("autoaccept")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.to_lowercase().parse::<bool>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected boolean)".to_string())))?;
let param_editgroup_id = query_params.get("editgroup_id").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_description = query_params.get("description").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
+ let param_extra = query_params.get("extra").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok());
// Body parameters (note that non-required body parameters will ignore garbage
// values, rather than causing a 400 response). Produce warning header and logs for
@@ -9569,7 +9735,10 @@ where
};
let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?;
- match api.create_work_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, context).wait() {
+ match api
+ .create_work_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup_id, param_description, param_extra, context)
+ .wait()
+ {
Ok(rsp) => match rsp {
CreateWorkBatchResponse::CreatedEntities(body) => {
let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize");
@@ -10116,7 +10285,12 @@ where
// Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response)
let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default();
- let param_limit = query_params.get("limit").and_then(|list| list.first()).and_then(|x| x.parse::<i64>().ok());
+ let param_limit = query_params
+ .get("limit")
+ .and_then(|list| list.first())
+ .and_then(|x| Some(x.parse::<i64>()))
+ .map_or_else(|| Ok(None), |x| x.map(|v| Some(v)))
+ .map_err(|x| Response::with((status::BadRequest, "unparsable query parameter (expected integer)".to_string())))?;
match api.get_work_history(param_ident, param_limit, context).wait() {
Ok(rsp) => match rsp {
diff --git a/rust/migrations/2019-01-01-000000_init/down.sql b/rust/migrations/2019-01-01-000000_init/down.sql
index 30e712e3..e238a690 100644
--- a/rust/migrations/2019-01-01-000000_init/down.sql
+++ b/rust/migrations/2019-01-01-000000_init/down.sql
@@ -2,6 +2,7 @@
-- in opposite order as up.sql
DROP TABLE IF EXISTS release_contrib CASCADE;
+DROP TABLE IF EXISTS refs_blob CASCADE;
DROP TABLE IF EXISTS release_ref CASCADE;
DROP TABLE IF EXISTS file_rev_release CASCADE;
DROP TABLE IF EXISTS fileset_rev_release CASCADE;
diff --git a/rust/migrations/2019-01-01-000000_init/up.sql b/rust/migrations/2019-01-01-000000_init/up.sql
index b4c7a684..2bb3f4ec 100644
--- a/rust/migrations/2019-01-01-000000_init/up.sql
+++ b/rust/migrations/2019-01-01-000000_init/up.sql
@@ -1,4 +1,5 @@
--- written for Postgres 9.6 with OSSP extension for UUIDs -- ... but actually runs on Postgres 10 in qa/production
+-- written for Postgres 9.6 with OSSP extension for UUIDs
+-- ... but actually runs on Postgres 11 in qa/production/tests
-- Previously VARCHAR and fixed-size CHAR was used in this schema for specific
-- columns (especially fixed-size external identifiers, and hashes). This was
@@ -47,7 +48,7 @@ CREATE TABLE editgroup (
created TIMESTAMP WITH TIME ZONE DEFAULT now() NOT NULL,
submitted TIMESTAMP WITH TIME ZONE,
is_accepted BOOLEAN DEFAULT false NOT NULL,
- description TEXT,
+ description TEXT CHECK (octet_length(description) >= 1),
extra_json JSONB
);
@@ -60,7 +61,7 @@ CREATE TABLE editgroup_annotation (
editgroup_id UUID REFERENCES editgroup(id) NOT NULL,
editor_id UUID REFERENCES editor(id) NOT NULL,
created TIMESTAMP WITH TIME ZONE DEFAULT now() NOT NULL,
- comment_markdown TEXT,
+ comment_markdown TEXT CHECK (octet_length(comment_markdown) >= 1),
extra_json JSONB
);
@@ -79,7 +80,13 @@ CREATE INDEX changelog_editgroup_idx ON changelog(editgroup_id);
CREATE TABLE abstracts (
-- fixed size hash (in hex). TODO: switch to bytes
sha1 TEXT PRIMARY KEY CHECK (octet_length(sha1) = 40),
- content TEXT NOT NULL
+ content TEXT NOT NULL CHECK (octet_length(content) >= 8)
+);
+
+CREATE TABLE refs_blob (
+ -- fixed size hash (in hex). TODO: switch to bytes
+ sha1 TEXT PRIMARY KEY CHECK (octet_length(sha1) = 40),
+ refs_json JSONB NOT NULL
);
-------------------- Creators -----------------------------------------------
@@ -87,9 +94,9 @@ CREATE TABLE creator_rev (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
extra_json JSONB,
- display_name TEXT NOT NULL,
- given_name TEXT,
- surname TEXT,
+ display_name TEXT NOT NULL CHECK (octet_length(display_name) >= 1),
+ given_name TEXT CHECK (octet_length(given_name) >= 1),
+ surname TEXT CHECK (octet_length(surname) >= 1),
-- fixed size identifier
orcid TEXT CHECK(octet_length(orcid) = 19),
-- limited size for data quality
@@ -132,15 +139,13 @@ CREATE TABLE container_rev (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
extra_json JSONB,
- name TEXT NOT NULL,
- publisher TEXT,
+ name TEXT NOT NULL CHECK (octet_length(name) >= 1),
+ container_type TEXT,
+ publisher TEXT CHECK (octet_length(publisher) >= 1),
-- fixed size identifier
issnl TEXT CHECK(octet_length(issnl) = 9),
-- limited size for data quality
- wikidata_qid TEXT CHECK(octet_length(wikidata_qid) <= 12),
- abbrev TEXT,
- -- limited size for data quality
- coden TEXT CHECK(octet_length(coden) <= 6)
+ wikidata_qid TEXT CHECK(octet_length(wikidata_qid) <= 12)
);
CREATE INDEX container_rev_issnl_idx ON container_rev(issnl);
@@ -175,10 +180,10 @@ CREATE TABLE file_rev (
size_bytes BIGINT,
-- fixed size hashes (in hex). TODO: switch to binary type type
- sha1 TEXT CHECK(octet_length(sha1) = 40),
- sha256 TEXT CHECK(octet_length(sha256) = 64),
- md5 TEXT CHECK(octet_length(md5) = 32),
- mimetype TEXT
+ sha1 TEXT CHECK (octet_length(sha1) = 40),
+ sha256 TEXT CHECK (octet_length(sha256) = 64),
+ md5 TEXT CHECK (octet_length(md5) = 32),
+ mimetype TEXT CHECK (octet_length(mimetype) >= 3)
);
CREATE INDEX file_rev_sha1_idx ON file_rev(sha1);
@@ -188,8 +193,8 @@ CREATE INDEX file_rev_sha256_idx ON file_rev(sha256);
CREATE TABLE file_rev_url (
id BIGSERIAL PRIMARY KEY,
file_rev UUID REFERENCES file_rev(id) NOT NULL,
- rel TEXT NOT NULL, -- TODO: enum? web, webarchive, repo, etc TODO: default web?
- url TEXT NOT NULL
+ rel TEXT NOT NULL CHECK (octet_length(rel) >= 1), -- TODO: enum? web, webarchive, repo, etc
+ url TEXT NOT NULL CHECK (octet_length(url) >= 1)
);
CREATE INDEX file_rev_url_rev_idx ON file_rev_url(file_rev);
@@ -225,8 +230,8 @@ CREATE TABLE fileset_rev (
CREATE TABLE fileset_rev_url (
id BIGSERIAL PRIMARY KEY,
fileset_rev UUID REFERENCES fileset_rev(id) NOT NULL,
- rel TEXT NOT NULL, -- TODO: enum? web, webarchive, repo, etc TODO: default web?
- url TEXT NOT NULL
+ rel TEXT NOT NULL CHECK (octet_length(rel) >= 1), -- TODO: enum? web, webarchive, repo, etc
+ url TEXT NOT NULL CHECK (octet_length(url) >= 1)
);
CREATE INDEX fileset_rev_url_rev_idx ON fileset_rev_url(fileset_rev);
@@ -234,7 +239,7 @@ CREATE INDEX fileset_rev_url_rev_idx ON fileset_rev_url(fileset_rev);
CREATE TABLE fileset_rev_file (
id BIGSERIAL PRIMARY KEY,
fileset_rev UUID REFERENCES fileset_rev(id) NOT NULL,
- path_name TEXT NOT NULL,
+ path_name TEXT NOT NULL CHECK (octet_length(path_name) >= 1),
size_bytes BIGINT NOT NULL,
md5 TEXT CHECK(octet_length(md5) = 32),
sha1 TEXT CHECK(octet_length(sha1) = 40),
@@ -270,15 +275,15 @@ CREATE TABLE webcapture_rev (
id UUID PRIMARY KEY DEFAULT uuid_generate_v4(),
extra_json JSONB,
- original_url TEXT NOT NULL,
+ original_url TEXT NOT NULL CHECK (octet_length(original_url) >= 1),
timestamp TIMESTAMP WITH TIME ZONE NOT NULL
);
CREATE TABLE webcapture_rev_url (
id BIGSERIAL PRIMARY KEY,
webcapture_rev UUID REFERENCES webcapture_rev(id) NOT NULL,
- rel TEXT NOT NULL, -- TODO: enum? web, webarchive, repo, etc TODO: default web?
- url TEXT NOT NULL
+ rel TEXT NOT NULL CHECK (octet_length(rel) >= 1), -- TODO: enum? web, webarchive, repo, etc
+ url TEXT NOT NULL CHECK (octet_length(url) >= 1)
);
CREATE INDEX webcapture_rev_url_rev_idx ON webcapture_rev_url(webcapture_rev);
@@ -286,10 +291,10 @@ CREATE INDEX webcapture_rev_url_rev_idx ON webcapture_rev_url(webcapture_rev);
CREATE TABLE webcapture_rev_cdx (
id BIGSERIAL PRIMARY KEY,
webcapture_rev UUID REFERENCES webcapture_rev(id) NOT NULL,
- surt TEXT NOT NULL,
- timestamp TEXT NOT NULL, -- TODO: timestamp type?
- url TEXT NOT NULL,
- mimetype TEXT,
+ surt TEXT NOT NULL CHECK (octet_length(surt) >= 1),
+ timestamp TIMESTAMP WITH TIME ZONE NOT NULL,
+ url TEXT NOT NULL CHECK (octet_length(url) >= 1),
+ mimetype TEXT CHECK (octet_length(mimetype) >= 1),
status_code BIGINT,
sha1 TEXT CHECK(octet_length(sha1) = 40) NOT NULL,
sha256 TEXT CHECK(octet_length(sha256) = 64)
@@ -326,23 +331,28 @@ CREATE TABLE release_rev (
work_ident_id UUID NOT NULL, -- FOREIGN KEY; see ALRTER below
container_ident_id UUID REFERENCES container_ident(id),
- title TEXT NOT NULL,
+ refs_blob_sha1 TEXT REFERENCES refs_blob(sha1),
+ title TEXT NOT NULL CHECK (octet_length(title) >= 1),
+ original_title TEXT CHECK (octet_length(original_title) >= 1),
release_type TEXT, -- TODO: enum
release_status TEXT, -- TODO: enum
release_date DATE,
release_year BIGINT,
- doi TEXT,
+ doi TEXT CHECK (octet_length(doi) >= 7),
-- CHECK for length limit for data quality
- pmid TEXT CHECK(octet_length(pmid) <= 12),
- pmcid TEXT CHECK(octet_length(pmcid) <= 12),
- wikidata_qid TEXT CHECK(octet_length(wikidata_qid) <= 12),
- isbn13 TEXT CHECK(octet_length(isbn13) = 17),
- core_id TEXT CHECK(octet_length(core_id) <= 12),
- volume TEXT,
- issue TEXT,
- pages TEXT,
- publisher TEXT, -- for books, NOT if container exists
- language TEXT -- primary language of the work's fulltext; RFC1766/ISO639-1
+ pmid TEXT CHECK (octet_length(pmid) <= 12),
+ pmcid TEXT CHECK (octet_length(pmcid) <= 12),
+ wikidata_qid TEXT CHECK (octet_length(wikidata_qid) <= 12),
+ isbn13 TEXT CHECK (octet_length(isbn13) = 17),
+ core_id TEXT CHECK (octet_length(core_id) <= 12),
+ arxiv_id TEXT CHECK (octet_length(arxiv_id) <= 12),
+ jstor_id TEXT CHECK (octet_length(jstor_id) <= 12),
+ volume TEXT CHECK (octet_length(volume) >= 1),
+ issue TEXT CHECK (octet_length(issue) >= 1),
+ pages TEXT CHECK (octet_length(pages) >= 1),
+ publisher TEXT CHECK (octet_length(publisher) >= 1), -- for books, NOT if container exists
+ language TEXT CHECK (octet_length(language) >= 1), -- primary language of the work's fulltext; RFC1766/ISO639-1
+ license_slug TEXT CHECK (octet_length(license_slug) >= 1)
-- TODO: oclc_ocn (TEXT or BIGINT)
-- TODO: identifier table?
);
@@ -353,14 +363,16 @@ CREATE INDEX release_rev_pmcid_idx ON release_rev(pmcid);
CREATE INDEX release_rev_wikidata_idx ON release_rev(wikidata_qid);
CREATE INDEX release_rev_isbn13_idx ON release_rev(isbn13);
CREATE INDEX release_rev_core_idx ON release_rev(core_id);
+CREATE INDEX release_rev_arxiv_idx ON release_rev(arxiv_id);
+CREATE INDEX release_rev_jstor_idx ON release_rev(jstor_id);
CREATE INDEX release_rev_work_idx ON release_rev(work_ident_id);
CREATE TABLE release_rev_abstract (
id BIGSERIAL PRIMARY KEY,
release_rev UUID REFERENCES release_rev(id) NOT NULL,
abstract_sha1 TEXT REFERENCES abstracts(sha1) NOT NULL,
- mimetype TEXT,
- lang TEXT
+ mimetype TEXT CHECK (octet_length(mimetype) >= 1),
+ lang TEXT CHECK (octet_length(lang) >= 1)
);
CREATE INDEX release_rev_abstract_rev_idx ON release_rev_abstract(release_rev);
@@ -426,8 +438,9 @@ CREATE TABLE release_contrib (
id BIGSERIAL PRIMARY KEY,
release_rev UUID REFERENCES release_rev(id) NOT NULL,
creator_ident_id UUID REFERENCES creator_ident(id),
- raw_name TEXT,
+ raw_name TEXT CHECK (octet_length(raw_name) >= 1),
role TEXT, -- TODO: enum?
+ raw_affiliation TEXT CHECK (octet_length(raw_affiliation) >= 1),
index_val INTEGER,
extra_json JSONB
);
@@ -436,20 +449,19 @@ CREATE INDEX release_contrib_rev_idx ON release_contrib(release_rev);
CREATE INDEX release_contrib_creator_idx ON release_contrib(creator_ident_id);
CREATE TABLE release_ref (
- id BIGSERIAL PRIMARY KEY,
release_rev UUID REFERENCES release_rev(id) NOT NULL,
- target_release_ident_id UUID REFERENCES release_ident(id), -- or work?
- index_val INTEGER,
- key TEXT,
- extra_json JSONB, -- title, year, container_title, locator (aka, page), oci_id
- container_name TEXT,
- year INTEGER,
- title TEXT,
- locator TEXT
- -- TODO: oci_id (TEXT)
-);
-
-CREATE INDEX release_ref_rev_idx ON release_ref(release_rev);
+ index_val INTEGER NOT NULL,
+ target_release_ident_id UUID REFERENCES release_ident(id) NOT NULL,
+ -- all other fields are interned in refs_blob as JSONB
+ -- key TEXT,
+ -- extra_json JSONB, -- title, year, container_title, locator (aka, page), oci_id
+ -- container_name TEXT,
+ -- year INTEGER,
+ -- title TEXT,
+ -- locator TEXT
+ PRIMARY KEY(release_rev, index_val)
+);
+
CREATE INDEX release_ref_target_release_idx ON release_ref(target_release_ident_id);
CREATE TABLE file_rev_release (
@@ -516,10 +528,10 @@ INSERT INTO abstracts (sha1, content) VALUES
('1ba86bf8c2979a62d29b18b537e50b2b093be27e', 'some long abstract in plain text'),
('0da908ab584b5e445a06beb172e3fab8cb5169e3', '<jats>A longer, more correct abstract should in theory go here</jats>');
-INSERT INTO container_rev (id, name, publisher, issnl, abbrev, coden, extra_json) VALUES
- ('00000000-0000-0000-1111-FFF000000001', 'MySpace Blog', null, null, null, null, null),
- ('00000000-0000-0000-1111-FFF000000002', 'Journal of Trivial Results', 'bogus publishing group', '1234-5678', 'Triv. Res.', 'CDNXYZ', '{"is_oa": false, "in_doaj": false}'),
- ('00000000-0000-0000-1111-FFF000000003', 'PLOS Medicine', 'Public Library of Science', '1549-1277', 'PLoS med.', null, '{"is_oa": true, "in_doaj": true}');
+INSERT INTO container_rev (id, name, publisher, issnl, extra_json) VALUES
+ ('00000000-0000-0000-1111-FFF000000001', 'MySpace Blog', null, null, null),
+ ('00000000-0000-0000-1111-FFF000000002', 'Journal of Trivial Results', 'bogus publishing group', '1234-5678', '{"is_oa": false, "in_doaj": false}'),
+ ('00000000-0000-0000-1111-FFF000000003', 'PLOS Medicine', 'Public Library of Science', '1549-1277', '{"is_oa": true, "in_doaj": true}');
INSERT INTO container_ident (id, is_live, rev_id, redirect_id) VALUES
('00000000-0000-0000-1111-000000000001', true, '00000000-0000-0000-1111-FFF000000001', null), -- aaaaaaaaaaaaaeiraaaaaaaaae
@@ -598,9 +610,9 @@ INSERT INTO webcapture_rev (id, original_url, timestamp) VALUES
('00000000-0000-0000-7777-FFF000000003', 'https://asheesh.org', '2003-02-17T04:47:21Z');
INSERT INTO webcapture_rev_cdx (webcapture_rev, surt, timestamp, url, mimetype, status_code, sha1, sha256) VALUES
- ('00000000-0000-0000-7777-FFF000000002', 'org,example)/', 19960102123456, 'http://example.org', null, 200, '5886903ba5aeaf7446fe9f77bd03adfc029cedf0', null),
- ('00000000-0000-0000-7777-FFF000000003', 'org,asheesh)/', 20030217044721, 'http://asheesh.org:80/', 'text/html', 200, '5886903ba5aeaf7446fe9f77bd03adfc029cedf0', 'ffc1005680cb620eec4c913437dfabbf311b535cfe16cbaeb2faec1f92afc362'),
- ('00000000-0000-0000-7777-FFF000000003', 'org,asheesh)/robots.txt', 20030217044719, 'http://asheesh.org:80/robots.txt', 'text/html', 404, 'a637f1d27d9bcb237310ed29f19c07e1c8cf0aa5', 'ffc1005680cb620eec4c913437dfabbf311b535cfe16cbaeb2faec1f92afc362');
+ ('00000000-0000-0000-7777-FFF000000002', 'org,example)/', '1996-01-02T12:34:56Z', 'http://example.org', null, 200, '5886903ba5aeaf7446fe9f77bd03adfc029cedf0', null),
+ ('00000000-0000-0000-7777-FFF000000003', 'org,asheesh)/', '2003-02-17T04:47:21Z', 'http://asheesh.org:80/', 'text/html', 200, '5886903ba5aeaf7446fe9f77bd03adfc029cedf0', 'ffc1005680cb620eec4c913437dfabbf311b535cfe16cbaeb2faec1f92afc362'),
+ ('00000000-0000-0000-7777-FFF000000003', 'org,asheesh)/robots.txt', '2003-02-17T04:47:19Z', 'http://asheesh.org:80/robots.txt', 'text/html', 404, 'a637f1d27d9bcb237310ed29f19c07e1c8cf0aa5', 'ffc1005680cb620eec4c913437dfabbf311b535cfe16cbaeb2faec1f92afc362');
INSERT INTO webcapture_rev_url (webcapture_rev, rel, url) VALUES
('00000000-0000-0000-7777-FFF000000002', 'wayback', 'http://web.archive.org/201801010001/http://example.org'),
@@ -632,10 +644,14 @@ INSERT INTO work_edit (ident_id, rev_id, redirect_id, editgroup_id, prev_rev) VA
('00000000-0000-0000-5555-000000000002', '00000000-0000-0000-5555-FFF000000002', null, '00000000-0000-0000-BBBB-000000000004', null),
('00000000-0000-0000-5555-000000000002', '00000000-0000-0000-5555-FFF000000003', null, '00000000-0000-0000-BBBB-000000000005', '00000000-0000-0000-5555-FFF000000002');
-INSERT INTO release_rev (id, work_ident_id, container_ident_id, title, release_type, release_status, release_date, release_year, doi, wikidata_qid, pmid, pmcid, isbn13, core_id, volume, issue, pages, publisher, language) VALUES
- ('00000000-0000-0000-4444-FFF000000001', '00000000-0000-0000-5555-000000000001', null, 'example title', null, null, null, null, null, null, null, null, null, null, null, null, null, null, null),
- ('00000000-0000-0000-4444-FFF000000002', '00000000-0000-0000-5555-000000000002', '00000000-0000-0000-1111-000000000001', 'bigger example', 'article-journal', null, '2018-01-01', 2018, '10.123/abc', 'Q55555', '54321', 'PMC555','978-3-16-148410-0', '42022773', '12', 'IV', '5-9', 'bogus publishing group', 'cn'),
- ('00000000-0000-0000-4444-FFF000000003', '00000000-0000-0000-5555-000000000003', '00000000-0000-0000-1111-000000000003', 'Why Most Published Research Findings Are False', 'article-journal', 'published', '2005-08-30', 2005, '10.1371/journal.pmed.0020124', null, null, null, null, null, '2', '8', 'e124', 'Public Library of Science', 'en');
+INSERT INTO refs_blob (sha1, refs_json) VALUES
+ ('22222222c2979a62d29b18b537e50b2b093be27e', '[{}, {}, {}, {}, {"extra": {"unstructured":"citation note"}}]'),
+ ('33333333c2979a62d29b18b537e50b2b093be27e', '[{"extra": {"unstructured": "Ioannidis JP, Haidich AB, Lau J. Any casualties in the clash of randomised and observational evidence? BMJ. 2001;322:879–880"}}, {"extra": {"unstructured":"Lawlor DA, Davey Smith G, Kundu D, Bruckdorfer KR, Ebrahim S. Those confounded vitamins: What can we learn from the differences between observational versus randomised trial evidence? Lancet. 2004;363:1724–1727."}}, {"extra": {"unstructured":"Vandenbroucke JP. When are observational studies as credible as randomised trials? Lancet. 2004;363:1728–1731."}}, {"extra": {"unstructured":"Michiels S, Koscielny S, Hill C. Prediction of cancer outcome with microarrays: A multiple random validation strategy. Lancet. 2005;365:488–492."}}, {"extra": {"unstructured":"Ioannidis JPA, Ntzani EE, Trikalinos TA, Contopoulos-Ioannidis DG. Replication validity of genetic association studies. Nat Genet. 2001;29:306–309."}}, {"extra": {"unstructured":"Colhoun HM, McKeigue PM, Davey Smith G. Problems of reporting genetic associations with complex outcomes. Lancet. 2003;361:865–872."}}]');
+
+INSERT INTO release_rev (id, work_ident_id, container_ident_id, title, release_type, release_status, release_date, release_year, doi, wikidata_qid, pmid, pmcid, isbn13, core_id, volume, issue, pages, publisher, language, refs_blob_sha1) VALUES
+ ('00000000-0000-0000-4444-FFF000000001', '00000000-0000-0000-5555-000000000001', null, 'example title', null, null, null, null, null, null, null, null, null, null, null, null, null, null, null, null),
+ ('00000000-0000-0000-4444-FFF000000002', '00000000-0000-0000-5555-000000000002', '00000000-0000-0000-1111-000000000001', 'bigger example', 'article-journal', null, '2018-01-01', 2018, '10.123/abc', 'Q55555', '54321', 'PMC555','978-3-16-148410-0', '42022773', '12', 'IV', '5-9', 'bogus publishing group', 'cn', '22222222c2979a62d29b18b537e50b2b093be27e'),
+ ('00000000-0000-0000-4444-FFF000000003', '00000000-0000-0000-5555-000000000003', '00000000-0000-0000-1111-000000000003', 'Why Most Published Research Findings Are False', 'article-journal', 'published', '2005-08-30', 2005, '10.1371/journal.pmed.0020124', null, null, null, null, null, '2', '8', 'e124', 'Public Library of Science', 'en', '33333333c2979a62d29b18b537e50b2b093be27e');
INSERT INTO release_ident (id, is_live, rev_id, redirect_id) VALUES
('00000000-0000-0000-4444-000000000001', true, '00000000-0000-0000-4444-FFF000000001', null), -- aaaaaaaaaaaaarceaaaaaaaaae
@@ -656,15 +672,14 @@ INSERT INTO release_contrib (release_rev, creator_ident_id, raw_name, role, inde
('00000000-0000-0000-4444-FFF000000002', '00000000-0000-0000-2222-000000000002', 'some contrib', 'editor', 4),
('00000000-0000-0000-4444-FFF000000003', '00000000-0000-0000-2222-000000000003', 'John P. A. Ioannidis', 'author', 0);
-INSERT INTO release_ref (release_rev, target_release_ident_id, index_val, extra_json) VALUES
- ('00000000-0000-0000-4444-FFF000000002', null, null, null),
- ('00000000-0000-0000-4444-FFF000000002', '00000000-0000-0000-4444-000000000001', 4, '{"unstructured":"citation note"}'),
- ('00000000-0000-0000-4444-FFF000000003', null, 0, '{"unstructured": "Ioannidis JP, Haidich AB, Lau J. Any casualties in the clash of randomised and observational evidence? BMJ. 2001;322:879–880"}'),
- ('00000000-0000-0000-4444-FFF000000003', null, 1, '{"unstructured":"Lawlor DA, Davey Smith G, Kundu D, Bruckdorfer KR, Ebrahim S. Those confounded vitamins: What can we learn from the differences between observational versus randomised trial evidence? Lancet. 2004;363:1724–1727."}'),
- ('00000000-0000-0000-4444-FFF000000003', null, 2, '{"unstructured":"Vandenbroucke JP. When are observational studies as credible as randomised trials? Lancet. 2004;363:1728–1731."}'),
- ('00000000-0000-0000-4444-FFF000000003', null, 3, '{"unstructured":"Michiels S, Koscielny S, Hill C. Prediction of cancer outcome with microarrays: A multiple random validation strategy. Lancet. 2005;365:488–492."}'),
- ('00000000-0000-0000-4444-FFF000000003', null, 4, '{"unstructured":"Ioannidis JPA, Ntzani EE, Trikalinos TA, Contopoulos-Ioannidis DG. Replication validity of genetic association studies. Nat Genet. 2001;29:306–309."}'),
- ('00000000-0000-0000-4444-FFF000000003', null, 5, '{"unstructured":"Colhoun HM, McKeigue PM, Davey Smith G. Problems of reporting genetic associations with complex outcomes. Lancet. 2003;361:865–872."}');
+INSERT INTO release_ref (release_rev, index_val, target_release_ident_id) VALUES
+ ('00000000-0000-0000-4444-FFF000000002', 4, '00000000-0000-0000-4444-000000000001'), -- '{"unstructured":"citation note"}'),
+ ('00000000-0000-0000-4444-FFF000000003', 0, '00000000-0000-0000-4444-000000000001'), --'{"unstructured": "Ioannidis JP, Haidich AB, Lau J. Any casualties in the clash of randomised and observational evidence? BMJ. 2001;322:879–880"}'),
+ ('00000000-0000-0000-4444-FFF000000003', 1, '00000000-0000-0000-4444-000000000001'), --'{"unstructured":"Lawlor DA, Davey Smith G, Kundu D, Bruckdorfer KR, Ebrahim S. Those confounded vitamins: What can we learn from the differences between observational versus randomised trial evidence? Lancet. 2004;363:1724–1727."}'),
+ ('00000000-0000-0000-4444-FFF000000003', 2, '00000000-0000-0000-4444-000000000001'), --'{"unstructured":"Vandenbroucke JP. When are observational studies as credible as randomised trials? Lancet. 2004;363:1728–1731."}'),
+ ('00000000-0000-0000-4444-FFF000000003', 3, '00000000-0000-0000-4444-000000000001'), --'{"unstructured":"Michiels S, Koscielny S, Hill C. Prediction of cancer outcome with microarrays: A multiple random validation strategy. Lancet. 2005;365:488–492."}'),
+ ('00000000-0000-0000-4444-FFF000000003', 4, '00000000-0000-0000-4444-000000000001'), --'{"unstructured":"Ioannidis JPA, Ntzani EE, Trikalinos TA, Contopoulos-Ioannidis DG. Replication validity of genetic association studies. Nat Genet. 2001;29:306–309."}'),
+ ('00000000-0000-0000-4444-FFF000000003', 5, '00000000-0000-0000-4444-000000000001'); --'{"unstructured":"Colhoun HM, McKeigue PM, Davey Smith G. Problems of reporting genetic associations with complex outcomes. Lancet. 2003;361:865–872."}');
INSERT INTO file_rev_release (file_rev, target_release_ident_id) VALUES
('00000000-0000-0000-3333-FFF000000002', '00000000-0000-0000-4444-000000000002'),
diff --git a/rust/src/bin/fatcatd.rs b/rust/src/bin/fatcatd.rs
index 75a6f000..ccce6725 100644
--- a/rust/src/bin/fatcatd.rs
+++ b/rust/src/bin/fatcatd.rs
@@ -88,7 +88,6 @@ fn main() -> Result<()> {
server.metrics.incr("restart").unwrap();
}
};
- info!(logger, "{:#?}", server.metrics);
info!(
logger,
diff --git a/rust/src/database_models.rs b/rust/src/database_models.rs
index 63fbcb29..adb38bda 100644
--- a/rust/src/database_models.rs
+++ b/rust/src/database_models.rs
@@ -3,8 +3,10 @@
use crate::database_schema::*;
use crate::errors::*;
use crate::identifiers::uuid2fcid;
-use chrono;
-use fatcat_api_spec::models::{ChangelogEntry, Editgroup, EditgroupAnnotation, Editor, EntityEdit};
+use chrono::Utc;
+use fatcat_api_spec::models::{
+ ChangelogEntry, Editgroup, EditgroupAnnotation, Editor, EntityEdit, ReleaseRef,
+};
use serde_json;
use uuid::Uuid;
@@ -127,11 +129,10 @@ pub struct ContainerRevRow {
pub id: Uuid,
pub extra_json: Option<serde_json::Value>,
pub name: String,
+ pub container_type: Option<String>,
pub publisher: Option<String>,
pub issnl: Option<String>,
pub wikidata_qid: Option<String>,
- pub abbrev: Option<String>,
- pub coden: Option<String>,
}
#[derive(Debug, Associations, AsChangeset, Insertable)]
@@ -139,11 +140,10 @@ pub struct ContainerRevRow {
pub struct ContainerRevNewRow {
pub extra_json: Option<serde_json::Value>,
pub name: String,
+ pub container_type: Option<String>,
pub publisher: Option<String>,
pub issnl: Option<String>,
pub wikidata_qid: Option<String>,
- pub abbrev: Option<String>,
- pub coden: Option<String>,
}
entity_structs!(
@@ -305,7 +305,7 @@ pub struct WebcaptureRevCdxRow {
pub id: i64,
pub webcapture_rev: Uuid,
pub surt: String,
- pub timestamp: String,
+ pub timestamp: chrono::DateTime<Utc>,
pub url: String,
pub mimetype: Option<String>,
pub status_code: Option<i64>,
@@ -318,7 +318,7 @@ pub struct WebcaptureRevCdxRow {
pub struct WebcaptureRevCdxNewRow {
pub webcapture_rev: Uuid,
pub surt: String,
- pub timestamp: String,
+ pub timestamp: chrono::DateTime<Utc>,
pub url: String,
pub mimetype: Option<String>,
pub status_code: Option<i64>,
@@ -376,7 +376,9 @@ pub struct ReleaseRevRow {
pub extra_json: Option<serde_json::Value>,
pub work_ident_id: Uuid,
pub container_ident_id: Option<Uuid>,
+ pub refs_blob_sha1: Option<String>,
pub title: String,
+ pub original_title: Option<String>,
pub release_type: Option<String>,
pub release_status: Option<String>,
pub release_date: Option<chrono::NaiveDate>,
@@ -387,11 +389,14 @@ pub struct ReleaseRevRow {
pub wikidata_qid: Option<String>,
pub isbn13: Option<String>,
pub core_id: Option<String>,
+ pub arxiv_id: Option<String>,
+ pub jstor_id: Option<String>,
pub volume: Option<String>,
pub issue: Option<String>,
pub pages: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
+ pub license_slug: Option<String>,
}
#[derive(Debug, Associations, AsChangeset, Insertable)]
@@ -400,7 +405,9 @@ pub struct ReleaseRevNewRow {
pub extra_json: Option<serde_json::Value>,
pub work_ident_id: Uuid,
pub container_ident_id: Option<Uuid>,
+ pub refs_blob_sha1: Option<String>,
pub title: String,
+ pub original_title: Option<String>,
pub release_type: Option<String>,
pub release_status: Option<String>,
pub release_date: Option<chrono::NaiveDate>,
@@ -411,11 +418,14 @@ pub struct ReleaseRevNewRow {
pub wikidata_qid: Option<String>,
pub isbn13: Option<String>,
pub core_id: Option<String>,
+ pub arxiv_id: Option<String>,
+ pub jstor_id: Option<String>,
pub volume: Option<String>,
pub issue: Option<String>,
pub pages: Option<String>,
pub publisher: Option<String>,
pub language: Option<String>,
+ pub license_slug: Option<String>,
}
entity_structs!(
@@ -476,6 +486,7 @@ pub struct ReleaseContribRow {
pub creator_ident_id: Option<Uuid>,
pub raw_name: Option<String>,
pub role: Option<String>,
+ pub raw_affiliation: Option<String>,
pub index_val: Option<i32>,
pub extra_json: Option<serde_json::Value>,
}
@@ -487,39 +498,107 @@ pub struct ReleaseContribNewRow {
pub creator_ident_id: Option<Uuid>,
pub raw_name: Option<String>,
pub role: Option<String>,
+ pub raw_affiliation: Option<String>,
pub index_val: Option<i32>,
pub extra_json: Option<serde_json::Value>,
}
-#[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)]
+#[derive(Debug, Queryable, Insertable, Associations, AsChangeset)]
#[table_name = "release_ref"]
pub struct ReleaseRefRow {
- pub id: i64,
pub release_rev: Uuid,
- pub target_release_ident_id: Option<Uuid>,
- pub index_val: Option<i32>,
- pub key: Option<String>,
- pub extra_json: Option<serde_json::Value>,
- pub container_name: Option<String>,
- pub year: Option<i32>,
- pub title: Option<String>,
- pub locator: Option<String>,
+ pub index_val: i32,
+ pub target_release_ident_id: Uuid,
}
-#[derive(Debug, Insertable, AsChangeset)]
-#[table_name = "release_ref"]
-pub struct ReleaseRefNewRow {
- pub release_rev: Uuid,
- pub target_release_ident_id: Option<Uuid>,
- pub index_val: Option<i32>,
+#[derive(Debug, Queryable, Insertable, Associations, AsChangeset)]
+#[table_name = "refs_blob"]
+pub struct RefsBlobRow {
+ pub sha1: String,
+ pub refs_json: serde_json::Value,
+}
+
+#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
+/// This model is a stable representation of what goes in a RefsBlobRow `refs_json` field (an array
+/// of this model). We could rely on the `ReleaseRef` API spec model directly, but that would lock
+/// the database contents to the API spec rigidly; by defining this struct independently, we can
+/// migrate the schemas. To start, this is a direct copy of the `ReleaseRef` model.
+pub struct RefsBlobJson {
+ #[serde(rename = "index")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub index: Option<i64>,
+
+ /// base32-encoded unique identifier
+ #[serde(rename = "target_release_id")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub target_release_id: Option<String>,
+
+ #[serde(rename = "extra")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub extra: Option<serde_json::Value>,
+
+ #[serde(rename = "key")]
+ #[serde(skip_serializing_if = "Option::is_none")]
pub key: Option<String>,
- pub extra_json: Option<serde_json::Value>,
+
+ #[serde(rename = "year")]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub year: Option<i64>,
+
+ #[serde(rename = "container_name")]
+ #[serde(skip_serializing_if = "Option::is_none")]
pub container_name: Option<String>,
- pub year: Option<i32>,
+
+ #[serde(rename = "title")]
+ #[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
+
+ #[serde(rename = "locator")]
+ #[serde(skip_serializing_if = "Option::is_none")]
pub locator: Option<String>,
}
+impl RefsBlobJson {
+ pub fn into_model(self) -> ReleaseRef {
+ ReleaseRef {
+ index: self.index,
+ target_release_id: self.target_release_id,
+ extra: self.extra,
+ key: self.key,
+ year: self.year,
+ container_name: self.container_name,
+ title: self.title,
+ locator: self.locator,
+ }
+ }
+
+ pub fn to_model(&self) -> ReleaseRef {
+ ReleaseRef {
+ index: self.index,
+ target_release_id: self.target_release_id.clone(),
+ extra: self.extra.clone(),
+ key: self.key.clone(),
+ year: self.year,
+ container_name: self.container_name.clone(),
+ title: self.title.clone(),
+ locator: self.locator.clone(),
+ }
+ }
+
+ pub fn from_model(model: &ReleaseRef) -> RefsBlobJson {
+ RefsBlobJson {
+ index: model.index,
+ target_release_id: model.target_release_id.clone(),
+ extra: model.extra.clone(),
+ key: model.key.clone(),
+ year: model.year,
+ container_name: model.container_name.clone(),
+ title: model.title.clone(),
+ locator: model.locator.clone(),
+ }
+ }
+}
+
#[derive(Debug, Queryable, Insertable, Associations, AsChangeset)]
#[table_name = "file_rev_release"]
pub struct FileRevReleaseRow {
diff --git a/rust/src/database_schema.rs b/rust/src/database_schema.rs
index 3bc57d95..ea184226 100644
--- a/rust/src/database_schema.rs
+++ b/rust/src/database_schema.rs
@@ -51,11 +51,10 @@ table! {
id -> Uuid,
extra_json -> Nullable<Jsonb>,
name -> Text,
+ container_type -> Nullable<Text>,
publisher -> Nullable<Text>,
issnl -> Nullable<Text>,
wikidata_qid -> Nullable<Text>,
- abbrev -> Nullable<Text>,
- coden -> Nullable<Text>,
}
}
@@ -239,12 +238,20 @@ table! {
}
table! {
+ refs_blob (sha1) {
+ sha1 -> Text,
+ refs_json -> Jsonb,
+ }
+}
+
+table! {
release_contrib (id) {
id -> Int8,
release_rev -> Uuid,
creator_ident_id -> Nullable<Uuid>,
raw_name -> Nullable<Text>,
role -> Nullable<Text>,
+ raw_affiliation -> Nullable<Text>,
index_val -> Nullable<Int4>,
extra_json -> Nullable<Jsonb>,
}
@@ -273,17 +280,10 @@ table! {
}
table! {
- release_ref (id) {
- id -> Int8,
+ release_ref (release_rev, index_val) {
release_rev -> Uuid,
- target_release_ident_id -> Nullable<Uuid>,
- index_val -> Nullable<Int4>,
- key -> Nullable<Text>,
- extra_json -> Nullable<Jsonb>,
- container_name -> Nullable<Text>,
- year -> Nullable<Int4>,
- title -> Nullable<Text>,
- locator -> Nullable<Text>,
+ index_val -> Int4,
+ target_release_ident_id -> Uuid,
}
}
@@ -293,7 +293,9 @@ table! {
extra_json -> Nullable<Jsonb>,
work_ident_id -> Uuid,
container_ident_id -> Nullable<Uuid>,
+ refs_blob_sha1 -> Nullable<Text>,
title -> Text,
+ original_title -> Nullable<Text>,
release_type -> Nullable<Text>,
release_status -> Nullable<Text>,
release_date -> Nullable<Date>,
@@ -304,11 +306,14 @@ table! {
wikidata_qid -> Nullable<Text>,
isbn13 -> Nullable<Text>,
core_id -> Nullable<Text>,
+ arxiv_id -> Nullable<Text>,
+ jstor_id -> Nullable<Text>,
volume -> Nullable<Text>,
issue -> Nullable<Text>,
pages -> Nullable<Text>,
publisher -> Nullable<Text>,
language -> Nullable<Text>,
+ license_slug -> Nullable<Text>,
}
}
@@ -358,7 +363,7 @@ table! {
id -> Int8,
webcapture_rev -> Uuid,
surt -> Text,
- timestamp -> Text,
+ timestamp -> Timestamptz,
url -> Text,
mimetype -> Nullable<Text>,
status_code -> Nullable<Int8>,
@@ -439,6 +444,7 @@ joinable!(release_ident -> release_rev (rev_id));
joinable!(release_ref -> release_ident (target_release_ident_id));
joinable!(release_ref -> release_rev (release_rev));
joinable!(release_rev -> container_ident (container_ident_id));
+joinable!(release_rev -> refs_blob (refs_blob_sha1));
joinable!(release_rev -> work_ident (work_ident_id));
joinable!(release_rev_abstract -> abstracts (abstract_sha1));
joinable!(release_rev_abstract -> release_rev (release_rev));
@@ -475,6 +481,7 @@ allow_tables_to_appear_in_same_query!(
fileset_rev_file,
fileset_rev_release,
fileset_rev_url,
+ refs_blob,
release_contrib,
release_edit,
release_ident,
diff --git a/rust/src/editing.rs b/rust/src/editing.rs
index e181e8a7..c17e5964 100644
--- a/rust/src/editing.rs
+++ b/rust/src/editing.rs
@@ -42,6 +42,8 @@ pub fn make_edit_context(
editor_id: FatcatId,
editgroup_id: Option<FatcatId>,
autoaccept: bool,
+ description: Option<String>,
+ extra: Option<serde_json::Value>,
) -> Result<EditContext> {
// *either* autoaccept is false and editgroup_id is Some, *or* autoaccept is true and
// editgroup_id is None
@@ -54,8 +56,8 @@ pub fn make_edit_context(
editor: None,
changelog_index: None,
submitted: None,
- description: None,
- extra: None,
+ description: description,
+ extra: extra,
annotations: None,
edits: None,
};
diff --git a/rust/src/endpoint_handlers.rs b/rust/src/endpoint_handlers.rs
index bc606af9..d9bd3403 100644
--- a/rust/src/endpoint_handlers.rs
+++ b/rust/src/endpoint_handlers.rs
@@ -26,9 +26,11 @@ macro_rules! entity_batch_handler {
autoaccept: bool,
editor_id: FatcatId,
editgroup_id: Option<FatcatId>,
+ description: Option<String>,
+ extra: Option<serde_json::Value>,
) -> Result<Vec<EntityEdit>> {
- let edit_context = make_edit_context(conn, editor_id, editgroup_id, autoaccept)?;
+ let edit_context = make_edit_context(conn, editor_id, editgroup_id, autoaccept, description, extra)?;
edit_context.check(&conn)?;
let model_list: Vec<&models::$model> = entity_list.iter().map(|e| e).collect();
let edits = $model::db_create_batch(conn, &edit_context, model_list.as_slice())?;
@@ -259,71 +261,99 @@ impl Server {
pmid: &Option<String>,
pmcid: &Option<String>,
core_id: &Option<String>,
+ arxiv_id: &Option<String>,
+ jstor_id: &Option<String>,
expand_flags: ExpandFlags,
hide_flags: HideFlags,
) -> Result<ReleaseEntity> {
- let (ident, rev): (ReleaseIdentRow, ReleaseRevRow) =
- match (doi, wikidata_qid, isbn13, pmid, pmcid, core_id) {
- (Some(doi), None, None, None, None, None) => {
- check_doi(doi)?;
- release_ident::table
- .inner_join(release_rev::table)
- .filter(release_rev::doi.eq(doi))
- .filter(release_ident::is_live.eq(true))
- .filter(release_ident::redirect_id.is_null())
- .first(conn)?
- }
- (None, Some(wikidata_qid), None, None, None, None) => {
- check_wikidata_qid(wikidata_qid)?;
- release_ident::table
- .inner_join(release_rev::table)
- .filter(release_rev::wikidata_qid.eq(wikidata_qid))
- .filter(release_ident::is_live.eq(true))
- .filter(release_ident::redirect_id.is_null())
- .first(conn)?
- }
- (None, None, Some(isbn13), None, None, None) => {
- // TODO: check_isbn13(isbn13)?;
- release_ident::table
- .inner_join(release_rev::table)
- .filter(release_rev::isbn13.eq(isbn13))
- .filter(release_ident::is_live.eq(true))
- .filter(release_ident::redirect_id.is_null())
- .first(conn)?
- }
- (None, None, None, Some(pmid), None, None) => {
- check_pmid(pmid)?;
- release_ident::table
- .inner_join(release_rev::table)
- .filter(release_rev::pmid.eq(pmid))
- .filter(release_ident::is_live.eq(true))
- .filter(release_ident::redirect_id.is_null())
- .first(conn)?
- }
- (None, None, None, None, Some(pmcid), None) => {
- check_pmcid(pmcid)?;
- release_ident::table
- .inner_join(release_rev::table)
- .filter(release_rev::pmcid.eq(pmcid))
- .filter(release_ident::is_live.eq(true))
- .filter(release_ident::redirect_id.is_null())
- .first(conn)?
- }
- (None, None, None, None, None, Some(core_id)) => {
- // TODO: check_core_id(core_id)?;
- release_ident::table
- .inner_join(release_rev::table)
- .filter(release_rev::core_id.eq(core_id))
- .filter(release_ident::is_live.eq(true))
- .filter(release_ident::redirect_id.is_null())
- .first(conn)?
- }
- _ => {
- return Err(
- FatcatError::MissingOrMultipleExternalId("in lookup".to_string()).into(),
- );
- }
- };
+ let (ident, rev): (ReleaseIdentRow, ReleaseRevRow) = match (
+ doi,
+ wikidata_qid,
+ isbn13,
+ pmid,
+ pmcid,
+ core_id,
+ arxiv_id,
+ jstor_id,
+ ) {
+ (Some(doi), None, None, None, None, None, None, None) => {
+ check_doi(doi)?;
+ release_ident::table
+ .inner_join(release_rev::table)
+ .filter(release_rev::doi.eq(doi))
+ .filter(release_ident::is_live.eq(true))
+ .filter(release_ident::redirect_id.is_null())
+ .first(conn)?
+ }
+ (None, Some(wikidata_qid), None, None, None, None, None, None) => {
+ check_wikidata_qid(wikidata_qid)?;
+ release_ident::table
+ .inner_join(release_rev::table)
+ .filter(release_rev::wikidata_qid.eq(wikidata_qid))
+ .filter(release_ident::is_live.eq(true))
+ .filter(release_ident::redirect_id.is_null())
+ .first(conn)?
+ }
+ (None, None, Some(isbn13), None, None, None, None, None) => {
+ // TODO: check_isbn13(isbn13)?;
+ release_ident::table
+ .inner_join(release_rev::table)
+ .filter(release_rev::isbn13.eq(isbn13))
+ .filter(release_ident::is_live.eq(true))
+ .filter(release_ident::redirect_id.is_null())
+ .first(conn)?
+ }
+ (None, None, None, Some(pmid), None, None, None, None) => {
+ check_pmid(pmid)?;
+ release_ident::table
+ .inner_join(release_rev::table)
+ .filter(release_rev::pmid.eq(pmid))
+ .filter(release_ident::is_live.eq(true))
+ .filter(release_ident::redirect_id.is_null())
+ .first(conn)?
+ }
+ (None, None, None, None, Some(pmcid), None, None, None) => {
+ check_pmcid(pmcid)?;
+ release_ident::table
+ .inner_join(release_rev::table)
+ .filter(release_rev::pmcid.eq(pmcid))
+ .filter(release_ident::is_live.eq(true))
+ .filter(release_ident::redirect_id.is_null())
+ .first(conn)?
+ }
+ (None, None, None, None, None, Some(core_id), None, None) => {
+ // TODO: check_core_id(core_id)?;
+ release_ident::table
+ .inner_join(release_rev::table)
+ .filter(release_rev::core_id.eq(core_id))
+ .filter(release_ident::is_live.eq(true))
+ .filter(release_ident::redirect_id.is_null())
+ .first(conn)?
+ }
+ (None, None, None, None, None, None, Some(arxiv_id), None) => {
+ // TODO: check_arxiv_id(arxiv_id)?;
+ release_ident::table
+ .inner_join(release_rev::table)
+ .filter(release_rev::arxiv_id.eq(arxiv_id))
+ .filter(release_ident::is_live.eq(true))
+ .filter(release_ident::redirect_id.is_null())
+ .first(conn)?
+ }
+ (None, None, None, None, None, None, None, Some(jstor_id)) => {
+ // TODO: check_jstor_id(jstor_id)?;
+ release_ident::table
+ .inner_join(release_rev::table)
+ .filter(release_rev::jstor_id.eq(jstor_id))
+ .filter(release_ident::is_live.eq(true))
+ .filter(release_ident::redirect_id.is_null())
+ .first(conn)?
+ }
+ _ => {
+ return Err(
+ FatcatError::MissingOrMultipleExternalId("in lookup".to_string()).into(),
+ );
+ }
+ };
let mut entity = ReleaseEntity::db_from_row(conn, rev, Some(ident), hide_flags)?;
entity.db_expand(&conn, expand_flags)?;
diff --git a/rust/src/endpoints.rs b/rust/src/endpoints.rs
index f7e93448..2e467957 100644
--- a/rust/src/endpoints.rs
+++ b/rust/src/endpoints.rs
@@ -120,7 +120,7 @@ macro_rules! wrap_entity_handlers {
let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($post_fn)))?;
auth_context.require_role(FatcatRole::Editor)?;
auth_context.require_editgroup(&conn, editgroup_id)?;
- let edit_context = make_edit_context(&conn, auth_context.editor_id, Some(editgroup_id), false)?;
+ let edit_context = make_edit_context(&conn, auth_context.editor_id, Some(editgroup_id), false, None, None)?;
edit_context.check(&conn)?;
entity.db_create(&conn, &edit_context)?.into_model()
}).map_err(|e| FatcatError::from(e)) {
@@ -138,18 +138,30 @@ macro_rules! wrap_entity_handlers {
entity_list: &Vec<models::$model>,
autoaccept: Option<bool>,
editgroup_id: Option<String>,
+ description: Option<String>,
+ extra: Option<String>,
context: &Context,
) -> Box<Future<Item = $post_batch_resp, Error = ApiError> + Send> {
let conn = self.db_pool.get().expect("db_pool error");
let ret = match conn.transaction(|| {
let auth_context = self.auth_confectionary.require_auth(&conn, &context.auth_data, Some(stringify!($post_batch_fn)))?;
- auth_context.require_role(FatcatRole::Editor)?;
+ let autoaccept = autoaccept.unwrap_or(false);
+ if autoaccept {
+ auth_context.require_role(FatcatRole::Admin)?;
+ } else {
+ auth_context.require_role(FatcatRole::Editor)?;
+ };
let editgroup_id = if let Some(s) = editgroup_id {
+ // make_edit_context() checks for "both editgroup_id and autosubmit" error case
let eg_id = FatcatId::from_str(&s)?;
auth_context.require_editgroup(&conn, eg_id)?;
Some(eg_id)
} else { None };
- self.$post_batch_handler(&conn, entity_list, autoaccept.unwrap_or(false), auth_context.editor_id, editgroup_id)
+ let extra: Option<serde_json::Value> = match extra {
+ Some(v) => serde_json::from_str(&v)?,
+ None => None,
+ };
+ self.$post_batch_handler(&conn, entity_list, autoaccept, auth_context.editor_id, editgroup_id, description, extra)
}).map_err(|e| FatcatError::from(e)) {
Ok(edits) => {
self.metrics.count("entities.created", edits.len() as i64).ok();
@@ -178,7 +190,7 @@ macro_rules! wrap_entity_handlers {
auth_context.require_role(FatcatRole::Editor)?;
let entity_id = FatcatId::from_str(&ident)?;
auth_context.require_editgroup(&conn, editgroup_id)?;
- let edit_context = make_edit_context(&conn, auth_context.editor_id, Some(editgroup_id), false)?;
+ let edit_context = make_edit_context(&conn, auth_context.editor_id, Some(editgroup_id), false, None, None)?;
edit_context.check(&conn)?;
entity.db_update(&conn, &edit_context, entity_id)?.into_model()
}).map_err(|e| FatcatError::from(e)) {
@@ -204,7 +216,7 @@ macro_rules! wrap_entity_handlers {
auth_context.require_role(FatcatRole::Editor)?;
let entity_id = FatcatId::from_str(&ident)?;
auth_context.require_editgroup(&conn, editgroup_id)?;
- let edit_context = make_edit_context(&conn, auth_context.editor_id, Some(editgroup_id), false)?;
+ let edit_context = make_edit_context(&conn, auth_context.editor_id, Some(editgroup_id), false, None, None)?;
edit_context.check(&conn)?;
$model::db_delete(&conn, &edit_context, entity_id)?.into_model()
}).map_err(|e| FatcatError::from(e)) {
@@ -659,6 +671,8 @@ impl Api for Server {
pmid: Option<String>,
pmcid: Option<String>,
core_id: Option<String>,
+ arxiv_id: Option<String>,
+ jstor_id: Option<String>,
expand: Option<String>,
hide: Option<String>,
_context: &Context,
@@ -682,6 +696,8 @@ impl Api for Server {
&pmid,
&pmcid,
&core_id,
+ &arxiv_id,
+ &jstor_id,
expand_flags,
hide_flags,
)
diff --git a/rust/src/entity_crud.rs b/rust/src/entity_crud.rs
index ce1c1ed7..a92c45a6 100644
--- a/rust/src/entity_crud.rs
+++ b/rust/src/entity_crud.rs
@@ -8,7 +8,7 @@
use crate::database_models::*;
use crate::database_schema::*;
use crate::editing::EditContext;
-use crate::endpoint_handlers::get_release_files;
+use crate::endpoint_handlers::{get_release_files, get_release_filesets, get_release_webcaptures};
use crate::errors::*;
use crate::identifiers::*;
use crate::server::DbConn;
@@ -798,8 +798,7 @@ impl EntityCrud for ContainerEntity {
wikidata_qid: None,
publisher: None,
name: None,
- abbrev: None,
- coden: None,
+ container_type: None,
state: Some(ident_row.state().unwrap().shortname()),
ident: Some(FatcatId::from_uuid(&ident_row.id).to_string()),
revision: ident_row.rev_id.map(|u| u.to_string()),
@@ -831,8 +830,7 @@ impl EntityCrud for ContainerEntity {
wikidata_qid: rev_row.wikidata_qid,
publisher: rev_row.publisher,
name: Some(rev_row.name),
- abbrev: rev_row.abbrev,
- coden: rev_row.coden,
+ container_type: rev_row.container_type,
state,
ident: ident_id,
revision: Some(rev_row.id.to_string()),
@@ -869,8 +867,7 @@ impl EntityCrud for ContainerEntity {
publisher: model.publisher.clone(),
issnl: model.issnl.clone(),
wikidata_qid: model.wikidata_qid.clone(),
- abbrev: model.abbrev.clone(),
- coden: model.coden.clone(),
+ container_type: model.container_type.clone(),
extra_json: model.extra.clone(),
})
.collect::<Vec<ContainerRevNewRow>>(),
@@ -1619,6 +1616,7 @@ impl EntityCrud for ReleaseEntity {
Ok(ReleaseEntity {
title: None,
+ original_title: None,
release_type: None,
release_status: None,
release_date: None,
@@ -1627,8 +1625,10 @@ impl EntityCrud for ReleaseEntity {
pmid: None,
pmcid: None,
isbn13: None,
- core_id: None,
wikidata_qid: None,
+ core_id: None,
+ arxiv_id: None,
+ jstor_id: None,
volume: None,
issue: None,
pages: None,
@@ -1639,6 +1639,7 @@ impl EntityCrud for ReleaseEntity {
container_id: None,
publisher: None,
language: None,
+ license_slug: None,
work_id: None,
refs: None,
contribs: None,
@@ -1675,6 +1676,26 @@ impl EntityCrud for ReleaseEntity {
};
self.files = Some(get_release_files(conn, ident, HideFlags::none())?);
}
+ if expand.filesets && self.ident.is_some() {
+ let ident = match &self.ident {
+ None => bail!("Can't expand filesets on a non-concrete entity"), // redundant with above is_some()
+ Some(ident) => match &self.redirect {
+ None => FatcatId::from_str(&ident)?,
+ Some(redir) => FatcatId::from_str(&redir)?,
+ },
+ };
+ self.filesets = Some(get_release_filesets(conn, ident, HideFlags::none())?);
+ }
+ if expand.webcaptures && self.ident.is_some() {
+ let ident = match &self.ident {
+ None => bail!("Can't expand webcaptures on a non-concrete entity"), // redundant with above is_some()
+ Some(ident) => match &self.redirect {
+ None => FatcatId::from_str(&ident)?,
+ Some(redir) => FatcatId::from_str(&redir)?,
+ },
+ };
+ self.webcaptures = Some(get_release_webcaptures(conn, ident, HideFlags::none())?);
+ }
if expand.container {
if let Some(ref cid) = self.container_id {
self.container = Some(ContainerEntity::db_get(
@@ -1812,28 +1833,28 @@ impl EntityCrud for ReleaseEntity {
None => (None, None, None),
};
- let refs: Option<Vec<ReleaseRef>> = match hide.refs {
- true => None,
- false => Some(
- release_ref::table
+ let refs: Option<Vec<ReleaseRef>> = match (hide.refs, rev_row.refs_blob_sha1) {
+ (true, _) => None,
+ (false, None) => Some(vec![]),
+ (false, Some(sha1)) => Some({
+ let refs_blob: RefsBlobRow = refs_blob::table
+ .find(sha1) // checked in match
+ .get_result(conn)?;
+ let refs: Vec<RefsBlobJson> = serde_json::from_value(refs_blob.refs_json)?;
+ let mut refs: Vec<ReleaseRef> = refs.into_iter().map(|j| j.into_model()).collect();
+ let ref_rows: Vec<ReleaseRefRow> = release_ref::table
.filter(release_ref::release_rev.eq(rev_row.id))
.order(release_ref::index_val.asc())
- .get_results(conn)?
- .into_iter()
- .map(|r: ReleaseRefRow| ReleaseRef {
- index: r.index_val.map(|v| v as i64),
- key: r.key,
- extra: r.extra_json,
- container_name: r.container_name,
- year: r.year.map(|v| v as i64),
- title: r.title,
- locator: r.locator,
- target_release_id: r
- .target_release_ident_id
- .map(|v| FatcatId::from_uuid(&v).to_string()),
- })
- .collect(),
- ),
+ .get_results(conn)?;
+ for index in 0..refs.len() {
+ refs[index].index = Some(index as i64)
+ }
+ for row in ref_rows {
+ refs[row.index_val as usize].target_release_id =
+ Some(FatcatId::from_uuid(&row.target_release_ident_id).to_string());
+ }
+ refs
+ }),
};
let contribs: Option<Vec<ReleaseContrib>> = match hide.contribs {
@@ -1851,6 +1872,7 @@ impl EntityCrud for ReleaseEntity {
index: c.index_val.map(|v| v as i64),
raw_name: c.raw_name,
role: c.role,
+ raw_affiliation: c.raw_affiliation,
extra: c.extra_json,
creator_id: c
.creator_ident_id
@@ -1884,6 +1906,7 @@ impl EntityCrud for ReleaseEntity {
Ok(ReleaseEntity {
title: Some(rev_row.title),
+ original_title: rev_row.original_title,
release_type: rev_row.release_type,
release_status: rev_row.release_status,
release_date: rev_row.release_date,
@@ -1892,8 +1915,10 @@ impl EntityCrud for ReleaseEntity {
pmid: rev_row.pmid,
pmcid: rev_row.pmcid,
isbn13: rev_row.isbn13,
- core_id: rev_row.core_id,
wikidata_qid: rev_row.wikidata_qid,
+ core_id: rev_row.core_id,
+ arxiv_id: rev_row.arxiv_id,
+ jstor_id: rev_row.jstor_id,
volume: rev_row.volume,
issue: rev_row.issue,
pages: rev_row.pages,
@@ -1906,6 +1931,7 @@ impl EntityCrud for ReleaseEntity {
.map(|u| FatcatId::from_uuid(&u).to_string()),
publisher: rev_row.publisher,
language: rev_row.language,
+ license_slug: rev_row.license_slug,
work_id: Some(FatcatId::from_uuid(&rev_row.work_ident_id).to_string()),
refs,
contribs,
@@ -1934,6 +1960,7 @@ impl EntityCrud for ReleaseEntity {
if let Some(ref extid) = entity.wikidata_qid {
check_wikidata_qid(extid)?;
}
+ // TODO: JSTOR and arxiv IDs
if let Some(ref release_type) = entity.release_type {
check_release_type(release_type)?;
}
@@ -1953,13 +1980,65 @@ impl EntityCrud for ReleaseEntity {
.into());
}
+ // First, calculate and upsert any refs JSON blobs and record the SHA1 keys, so they can be
+ // included in the release_rev row itself
+ let mut refs_blob_rows: Vec<RefsBlobRow> = vec![];
+ let mut refs_blob_sha1: Vec<Option<String>> = vec![];
+ for model in models.iter() {
+ match &model.refs {
+ None => {
+ refs_blob_sha1.push(None);
+ }
+ Some(ref_list) => {
+ if ref_list.is_empty() {
+ refs_blob_sha1.push(None);
+ continue;
+ }
+ // Have to strip out target refs and indexes, or hashing won't work well when
+ // these change
+ let ref_list: Vec<RefsBlobJson> = ref_list
+ .iter()
+ .map(|r: &ReleaseRef| {
+ let mut r = RefsBlobJson::from_model(r);
+ r.target_release_id = None;
+ r.index = None;
+ r
+ })
+ .collect();
+ // TODO: maybe `canonical_json` crate?
+ let refs_json = serde_json::to_value(ref_list)?;
+ let refs_str = refs_json.to_string();
+ let sha1 = Sha1::from(refs_str).hexdigest();
+ let blob = RefsBlobRow {
+ sha1: sha1.clone(),
+ refs_json,
+ };
+ refs_blob_rows.push(blob);
+ refs_blob_sha1.push(Some(sha1));
+ }
+ };
+ }
+
+ if !refs_blob_rows.is_empty() {
+ // Sort of an "upsert"; only inserts new abstract rows if they don't already exist
+ insert_into(refs_blob::table)
+ .values(&refs_blob_rows)
+ .on_conflict(refs_blob::sha1)
+ .do_nothing()
+ .execute(conn)?;
+ }
+
+ // Then the main release_revs themselves
let rev_ids: Vec<Uuid> = insert_into(release_rev::table)
.values(
models
.iter()
- .map(|model| {
+ .zip(refs_blob_sha1.into_iter())
+ .map(|(model, refs_sha1)| {
Ok(ReleaseRevNewRow {
+ refs_blob_sha1: refs_sha1,
title: model.title.clone().unwrap(), // titles checked above
+ original_title: model.original_title.clone(),
release_type: model.release_type.clone(),
release_status: model.release_status.clone(),
release_date: model.release_date,
@@ -1970,6 +2049,8 @@ impl EntityCrud for ReleaseEntity {
wikidata_qid: model.wikidata_qid.clone(),
isbn13: model.isbn13.clone(),
core_id: model.core_id.clone(),
+ arxiv_id: model.arxiv_id.clone(),
+ jstor_id: model.jstor_id.clone(),
volume: model.volume.clone(),
issue: model.issue.clone(),
pages: model.pages.clone(),
@@ -1983,6 +2064,7 @@ impl EntityCrud for ReleaseEntity {
},
publisher: model.publisher.clone(),
language: model.language.clone(),
+ license_slug: model.license_slug.clone(),
extra_json: model.extra.clone()
})
})
@@ -1991,34 +2073,32 @@ impl EntityCrud for ReleaseEntity {
.returning(release_rev::id)
.get_results(conn)?;
- let mut release_ref_rows: Vec<ReleaseRefNewRow> = vec![];
+ let mut release_ref_rows: Vec<ReleaseRefRow> = vec![];
let mut release_contrib_rows: Vec<ReleaseContribNewRow> = vec![];
let mut abstract_rows: Vec<AbstractsRow> = vec![];
let mut release_abstract_rows: Vec<ReleaseRevAbstractNewRow> = vec![];
for (model, rev_id) in models.iter().zip(rev_ids.iter()) {
+ // We didn't know the release_rev id to insert here, so need to re-iterate over refs
match &model.refs {
None => (),
Some(ref_list) => {
- let these_ref_rows: Vec<ReleaseRefNewRow> = ref_list
+ let these_ref_rows: Vec<ReleaseRefRow> = ref_list
.iter()
- .map(|r| {
- Ok(ReleaseRefNewRow {
+ .enumerate()
+ .filter(|(_, r)| r.target_release_id.is_some())
+ .map(|(index, r)| {
+ Ok(ReleaseRefRow {
release_rev: *rev_id,
- target_release_ident_id: match r.target_release_id.clone() {
- None => None,
- Some(v) => Some(FatcatId::from_str(&v)?.to_uuid()),
- },
- index_val: r.index.map(|v| v as i32),
- key: r.key.clone(),
- container_name: r.container_name.clone(),
- year: r.year.map(|v| v as i32),
- title: r.title.clone(),
- locator: r.locator.clone(),
- extra_json: r.extra.clone(),
+ // unwrap() checked by is_some() filter
+ target_release_ident_id: FatcatId::from_str(
+ &r.target_release_id.clone().unwrap(),
+ )?
+ .to_uuid(),
+ index_val: index as i32,
})
})
- .collect::<Result<Vec<ReleaseRefNewRow>>>()?;
+ .collect::<Result<Vec<ReleaseRefRow>>>()?;
release_ref_rows.extend(these_ref_rows);
}
};
@@ -2038,6 +2118,7 @@ impl EntityCrud for ReleaseEntity {
raw_name: c.raw_name.clone(),
index_val: c.index.map(|v| v as i32),
role: c.role.clone(),
+ raw_affiliation: c.raw_affiliation.clone(),
extra_json: c.extra.clone(),
})
})
@@ -2053,7 +2134,7 @@ impl EntityCrud for ReleaseEntity {
.iter()
.filter(|ea| ea.content.is_some())
.map(|c| AbstractsRow {
- sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(),
+ sha1: Sha1::from(c.content.as_ref().unwrap()).hexdigest(),
content: c.content.clone().unwrap(),
})
.collect();
diff --git a/rust/src/lib.rs b/rust/src/lib.rs
index b7661334..d089adf8 100644
--- a/rust/src/lib.rs
+++ b/rust/src/lib.rs
@@ -9,6 +9,8 @@ extern crate log;
extern crate lazy_static;
#[macro_use]
extern crate failure;
+#[macro_use]
+extern crate serde_derive;
pub mod auth;
pub mod database_models;
diff --git a/rust/tests/test_api_server_http.rs b/rust/tests/test_api_server_http.rs
index 0ec2650a..66f36a14 100644
--- a/rust/tests/test_api_server_http.rs
+++ b/rust/tests/test_api_server_http.rs
@@ -694,7 +694,7 @@ fn test_post_webcapture() {
"timestamp": "2018-12-28T05:06:07Z",
"cdx": [
{"surt": "org,asheesh,)/robots.txt",
- "timestamp": "20181228050607",
+ "timestamp": "2018-12-28T05:06:07Z",
"url": "https://asheesh.org/robots.txt",
"status_code": 200,
"mimetype": "text/html",
@@ -1409,7 +1409,7 @@ fn test_post_batch_autoaccept() {
None,
);
- // "n"
+ // "n" (TODO)
let editgroup_id = helpers::quick_editgroup(&conn);
helpers::check_http_response(
request::post(
@@ -1421,7 +1421,7 @@ fn test_post_batch_autoaccept() {
r#"[{"name": "test journal"}, {"name": "another test journal"}]"#,
&router,
),
- status::Created,
+ status::BadRequest, // TODO
None,
);
@@ -1813,3 +1813,86 @@ fn test_editgroup_annotations() {
Some("special test annotation"),
);
}
+
+#[test]
+fn test_query_params() {
+ let (headers, router, _conn) = helpers::setup_http();
+
+ helpers::check_http_response(
+ request::get(
+ "http://localhost:9411/v0/changelog?limit=true",
+ headers.clone(),
+ &router,
+ ),
+ status::BadRequest,
+ Some("integer"),
+ );
+
+ helpers::check_http_response(
+ request::get(
+ &format!("http://localhost:9411/v0/editgroup/reviewable?since=asdf"),
+ headers.clone(),
+ &router,
+ ),
+ status::BadRequest,
+ Some("datetime"),
+ );
+
+ helpers::check_http_response(
+ request::get(
+ &format!("http://localhost:9411/v0/editgroup/reviewable?since=1999-06-05T12:34:00Z"),
+ headers.clone(),
+ &router,
+ ),
+ status::Ok,
+ None,
+ );
+
+ // Python3: datetime.datetime.utcnow().isoformat() + "Z"
+ helpers::check_http_response(
+ request::get(
+ &format!(
+ "http://localhost:9411/v0/editgroup/reviewable?since=2019-01-17T23:32:03.269010Z"
+ ),
+ headers.clone(),
+ &router,
+ ),
+ status::Ok,
+ None,
+ );
+
+ // Python3: datetime.datetime.now(datetime.timezone.utc).isoformat()
+ /* TODO: this doesn't work currently :(
+ helpers::check_http_response(
+ request::get(
+ &format!("http://localhost:9411/v0/editgroup/reviewable?since=2019-01-17T23:30:45.799289+00:00"),
+ headers.clone(),
+ &router,
+ ),
+ status::Ok,
+ None,
+ );
+ */
+
+ helpers::check_http_response(
+ request::post(
+ "http://localhost:9411/v0/container/batch?autoaccept=asdf",
+ headers.clone(),
+ r#"[{"name": "test journal"}, {"name": "another test journal"}]"#,
+ &router,
+ ),
+ status::BadRequest,
+ Some("boolean"),
+ );
+
+ helpers::check_http_response(
+ request::post(
+ "http://localhost:9411/v0/container/batch?autoaccept=True",
+ headers.clone(),
+ r#"[{"name": "test journal"}, {"name": "another test journal"}]"#,
+ &router,
+ ),
+ status::Created,
+ None,
+ );
+}
diff --git a/rust/tests/test_refs.rs b/rust/tests/test_refs.rs
new file mode 100644
index 00000000..ae4be4b5
--- /dev/null
+++ b/rust/tests/test_refs.rs
@@ -0,0 +1,169 @@
+use diesel::prelude::*;
+use fatcat::database_models::*;
+use fatcat::database_schema::*;
+use fatcat::editing::{accept_editgroup, make_edit_context};
+use fatcat::entity_crud::{EntityCrud, HideFlags};
+use fatcat::identifiers::FatcatId;
+use fatcat::server;
+use fatcat_api_spec::models::*;
+use std::str::FromStr;
+use uuid::Uuid;
+
+mod helpers;
+
+#[test]
+fn test_refs_blob() {
+ let server = server::create_test_server().unwrap();
+ let conn = server.db_pool.get().expect("db_pool error");
+ let editor_id = FatcatId::from_str(helpers::TEST_ADMIN_EDITOR_ID).unwrap();
+ let editgroup_id = helpers::quick_editgroup(&conn);
+ let edit_context =
+ make_edit_context(&conn, editor_id, Some(editgroup_id), false, None, None).unwrap();
+
+ // this release entity should be unchanged after being inserted/fetched
+ let mut r1 = ReleaseEntity::new();
+ r1.title = Some("release-test hashes".to_string());
+ r1.refs = Some(vec![
+ ReleaseRef {
+ index: Some(0),
+ target_release_id: None,
+ extra: None,
+ key: Some("one".to_string()),
+ year: Some(1932),
+ container_name: Some("bogus container".to_string()),
+ title: Some("first bogus paper".to_string()),
+ locator: Some("p100".to_string()),
+ },
+ ReleaseRef {
+ index: Some(1),
+ target_release_id: Some("aaaaaaaaaaaaarceaaaaaaaaai".to_string()),
+ extra: None,
+ key: Some("one".to_string()),
+ year: Some(2032),
+ container_name: Some("bogus other container".to_string()),
+ title: Some("second bogus paper".to_string()),
+ locator: Some("p200".to_string()),
+ },
+ ]);
+
+ // this release entity should have the same hash as r1. the indexes will change after fetching,
+ // but otherwise the fetched refs should be the same as the r1 fetched results.
+ let mut r2 = r1.clone();
+ r2.refs = Some(vec![
+ ReleaseRef {
+ index: None,
+ target_release_id: None,
+ extra: None,
+ key: Some("one".to_string()),
+ year: Some(1932),
+ container_name: Some("bogus container".to_string()),
+ title: Some("first bogus paper".to_string()),
+ locator: Some("p100".to_string()),
+ },
+ ReleaseRef {
+ index: Some(99),
+ target_release_id: Some("aaaaaaaaaaaaarceaaaaaaaaai".to_string()),
+ extra: None,
+ key: Some("one".to_string()),
+ year: Some(2032),
+ container_name: Some("bogus other container".to_string()),
+ title: Some("second bogus paper".to_string()),
+ locator: Some("p200".to_string()),
+ },
+ ]);
+
+ // this release entity has different ref *targets* and indexes, but should still have the same
+ // refs_blob hashes as r1/r2.
+ let mut r3 = r1.clone();
+ r3.refs = Some(vec![
+ ReleaseRef {
+ index: Some(1),
+ target_release_id: Some("aaaaaaaaaaaaarceaaaaaaaaae".to_string()),
+ extra: None,
+ key: Some("one".to_string()),
+ year: Some(1932),
+ container_name: Some("bogus container".to_string()),
+ title: Some("first bogus paper".to_string()),
+ locator: Some("p100".to_string()),
+ },
+ ReleaseRef {
+ index: Some(1),
+ target_release_id: Some("aaaaaaaaaaaaarceaaaaaaaaam".to_string()),
+ extra: None,
+ key: Some("one".to_string()),
+ year: Some(2032),
+ container_name: Some("bogus other container".to_string()),
+ title: Some("second bogus paper".to_string()),
+ locator: Some("p200".to_string()),
+ },
+ ]);
+
+ // this one is obviously just plain different (hashes shouldn't match)
+ let mut r4 = r1.clone();
+ r4.refs = Some(vec![ReleaseRef {
+ index: Some(1),
+ target_release_id: Some("aaaaaaaaaaaaarceaaaaaaaaae".to_string()),
+ extra: None,
+ key: Some("one".to_string()),
+ year: Some(1932),
+ container_name: Some("bogus container".to_string()),
+ title: Some("first bogus paper".to_string()),
+ locator: Some("p100".to_string()),
+ }]);
+
+ let edit1 = r1.db_create(&conn, &edit_context).unwrap();
+ let edit2 = r2.db_create(&conn, &edit_context).unwrap();
+ let edit3 = r3.db_create(&conn, &edit_context).unwrap();
+ let edit4 = r4.db_create(&conn, &edit_context).unwrap();
+
+ let r1b = ReleaseEntity::db_get(&conn, edit1.ident_id.into(), HideFlags::none()).unwrap();
+ let r2b = ReleaseEntity::db_get(&conn, edit2.ident_id.into(), HideFlags::none()).unwrap();
+ let r3b = ReleaseEntity::db_get(&conn, edit3.ident_id.into(), HideFlags::none()).unwrap();
+ let r4b = ReleaseEntity::db_get(&conn, edit4.ident_id.into(), HideFlags::none()).unwrap();
+ assert_eq!(r1b.refs, r1.refs);
+ assert_eq!(r1b.refs, r2b.refs);
+ assert_ne!(r1b.refs, r3b.refs);
+ assert_ne!(r1b.refs, r4b.refs);
+
+ let r1_row: ReleaseRevRow = release_rev::table
+ .find(Uuid::from_str(&r1b.revision.clone().unwrap()).unwrap())
+ .get_result(&conn)
+ .unwrap();
+ let r2_row: ReleaseRevRow = release_rev::table
+ .find(Uuid::from_str(&r2b.revision.unwrap()).unwrap())
+ .get_result(&conn)
+ .unwrap();
+ let r3_row: ReleaseRevRow = release_rev::table
+ .find(Uuid::from_str(&r3b.revision.clone().unwrap()).unwrap())
+ .get_result(&conn)
+ .unwrap();
+ let r4_row: ReleaseRevRow = release_rev::table
+ .find(Uuid::from_str(&r4b.revision.unwrap()).unwrap())
+ .get_result(&conn)
+ .unwrap();
+ assert_eq!(r1_row.refs_blob_sha1, r2_row.refs_blob_sha1);
+ assert_eq!(r1_row.refs_blob_sha1, r3_row.refs_blob_sha1);
+ assert_ne!(r1_row.refs_blob_sha1, r4_row.refs_blob_sha1);
+
+ // ensure that SHA1 hashing is stable over time (as much as possible!)
+ assert_eq!(
+ r1_row.refs_blob_sha1,
+ Some("4e38812fbf99e00e0cb648896e9f7a9d58c5ab23".to_string())
+ );
+
+ // update r1 with new target_idents (r3); SHA1 row still shouldn't change
+ accept_editgroup(&conn, editgroup_id).unwrap();
+ let editgroup_id = helpers::quick_editgroup(&conn);
+ let edit_context =
+ make_edit_context(&conn, editor_id, Some(editgroup_id), false, None, None).unwrap();
+
+ let _edit4 = r3b
+ .db_update(&conn, &edit_context, edit1.ident_id.into())
+ .unwrap();
+ let r1c = ReleaseEntity::db_get(&conn, edit1.ident_id.into(), HideFlags::none()).unwrap();
+ let r1c_row: ReleaseRevRow = release_rev::table
+ .find(Uuid::from_str(&r1c.revision.unwrap()).unwrap())
+ .get_result(&conn)
+ .unwrap();
+ assert_eq!(r1_row.refs_blob_sha1, r1c_row.refs_blob_sha1);
+}