diff options
author | Bryan Newbold <bnewbold@robocracy.org> | 2018-09-09 10:10:42 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@robocracy.org> | 2018-09-09 10:10:42 -0700 |
commit | b15eff77fdb7974ce2bf3c2e44c8edc354f9f452 (patch) | |
tree | 5bccb9ff2633eb35dc00babc0b2dd1842f02e49b | |
parent | 419bddcb0377e82e7177356350d35bf84b3e80d8 (diff) | |
parent | a29beab0683d77086cc1b431779d0540dc5a9b49 (diff) | |
download | fatcat-b15eff77fdb7974ce2bf3c2e44c8edc354f9f452.tar.gz fatcat-b15eff77fdb7974ce2bf3c2e44c8edc354f9f452.zip |
Merge branch 'http-verbs' into cockroach
Manually merged conflicts:
rust/migrations/2018-05-12-001226_init/up.sql
rust/src/api_server.rs
rust/src/database_schema.rs
48 files changed, 7243 insertions, 896 deletions
@@ -31,7 +31,7 @@ released, while the API server and web interface are strong copyleft (AGPLv3). ## Status - HTTP API - - [ ] base32 encoding of UUID identifiers + - [x] base32 encoding of UUID identifiers - [x] inverse many-to-many helpers (files-by-release, release-by-creator) - SQL Schema - [x] Basic entities @@ -53,7 +53,9 @@ released, while the API server and web interface are strong copyleft (AGPLv3). ## Identifiers -Fatcat entity identifiers are in "boring" +Fatcat entity identifiers are 128-bit UUIDs encoded in base32 format. Revision +ids are also UUIDs, and encoded in normal UUID fashion, to disambiguate from +edity identifiers. Python helpers for conversion: @@ -2,18 +2,34 @@ ## Next Up - some significant slow-down has happened? transactions, or regexes? +summer roadmap: +- PUT/UPDATE, DELETE, and merge code paths +- faster UPDATE-free bulk import code path +- container import (extra?): lang, region, subject +- basic API+webface creation, editing, merging, editgroup approval +- elastic schema/transform for releases; bulk and continuous scripts features: - fast database dump command: both changelog-based and entity-based (rust) => lighter, more complete dumps for each entity type? +- guide skeleton (mdbook; guide.fatcat.wiki) importers: +- CORE +- wikidata cross-ref (if they have a dump) - manifest: multiple URLs per SHA1 -- pubmed (medline) +- pubmed (medline), if not in CORE => and/or, use pubmed ID lookups on crossref import - core - semantic scholar (up to 39 million; author de-dupe) - wikidata (if they have a dump) +- crossref: relations ("is-preprint-of") +- crossref: filter works + => content-type whitelist + => title length and title/slug blacklist + => at least one author (?) + => make this a method on Release object + => or just set release_stub as "stub"? bugs: - test: release pointing to a collection that has been deleted/redirected @@ -29,10 +45,16 @@ july roadmap: ## Schema / Alignment / Scope - "container" -> "venue"? -- release_type, release_status, url.rel enums (and others?) +- release_type, release_status, url.rel write-time schema(and others?) name ref: https://www.w3.org/International/questions/qa-personal-names +## API + +- how to send edit "extra" metadata? +- hydrate entities in API + ? "expand" query param + ## High-Level Priorities - full database dump (export) diff --git a/fatcat-openapi2.yml b/fatcat-openapi2.yml index 7ec91bad..a8919216 100644 --- a/fatcat-openapi2.yml +++ b/fatcat-openapi2.yml @@ -37,7 +37,7 @@ x-issn: &FATCATISSN x-orcid: &FATCATORCID type: string example: "0000-0002-1825-0097" - pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{4}" + pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]" minLength: 19 maxLength: 19 @@ -57,6 +57,10 @@ x-entity-props: &ENTITYPROPS extra: type: object additionalProperties: {} +# TODO: +# edit_extra: +# type: object +# additionalProperties: {} definitions: error_response: @@ -438,6 +442,16 @@ paths: post: operationId: "create_container_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -459,19 +473,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_container" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/container_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_container" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/container_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_container" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /container/{id}/history: parameters: - name: id @@ -526,6 +568,16 @@ paths: post: operationId: "create_creator_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -547,19 +599,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_creator" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/creator_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_creator" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/creator_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_creator" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /creator/{id}/history: parameters: - name: id @@ -630,6 +710,16 @@ paths: post: operationId: "create_file_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -651,19 +741,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_file" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/file_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_file" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/file_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_file" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /file/{id}/history: parameters: - name: id @@ -718,6 +836,16 @@ paths: post: operationId: "create_release_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -739,19 +867,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_release" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/release_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_release" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/release_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_release" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /release/{id}/history: parameters: - name: id @@ -822,6 +978,16 @@ paths: post: operationId: "create_work_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -843,19 +1009,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_work" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/work_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_work" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/work_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_work" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /work/{id}/history: parameters: - name: id @@ -1001,10 +1195,18 @@ paths: description: Unmergable schema: $ref: "#/definitions/error_response" + 400: + description: Bad Request + schema: + $ref: "#/definitions/error_response" 404: description: Not Found schema: $ref: "#/definitions/error_response" + 409: + description: Edit Conflict + schema: + $ref: "#/definitions/error_response" 500: description: Generic Error schema: diff --git a/notes/auth_thoughts.txt b/notes/auth_thoughts.txt index 3ccaf668..4782dd0f 100644 --- a/notes/auth_thoughts.txt +++ b/notes/auth_thoughts.txt @@ -10,3 +10,45 @@ haven't been revoked. Could use portier with openid connect as an email-based option. Otherwise, orcid, github, google. +--------- + +Use macaroons! + +editor/user table has a "auth_epoch" timestamp; only macaroons generated +after this timestamp are valid. revocation is done by incrementing this +timestamp ("touch"). + +Rust CLI tool for managing users: +- create editor + +Special users/editor that can create editor accounts via API; eg, one for +fatcat-web. + +Associate one oauth2 id per domain per editor/user. + +Users come to fatcat-web and do oauth2 to login or create an account. All +oauth2 internal to fatcat-web. If successful, fatcat-web does an +(authenticated) lookup to API for that identifier. If found, requests a +new macaroon to use as a cookie for auth. All future requests pass this +cookie through as bearer auth. fatcat-web remains stateless! macaroon +contains username (for display); no lookup-per page. Need to logout/login for +this to update? + +Later, can do a "add additional account" feature. + +Backend: +- oauth2 account table, foreign key to editor table + => this is the only private table +- auth_epoch timestamp column on editor table +- lock editor by setting auth_epoch to deep future + +TODO: privacy policy + +fatcat API doesn't *require* auth, but if auth is provided, it will check +macaroon, and validate against editor table's timestamp. + +support oauth2 against: +- orcid +- git.archive.org +- github +? google diff --git a/notes/autoaccept_api.txt b/notes/autoaccept_api.txt new file mode 100644 index 00000000..b7e0a824 --- /dev/null +++ b/notes/autoaccept_api.txt @@ -0,0 +1,31 @@ + +Currently only on batch creation (POST) for entities. + +For all bulk operations, optional 'editgroup' query parameter overrides +individual editgroup parameters. + +If autoaccept flag is set and editgroup is not, a new editgroup is +automatically created and overrides for all entities inserted. Note +that this is different behavior from the "use current or create new" +default behavior for regular creation. + +Unfortunately, "true" and "false" are the only values acceptable for boolean +rust/openapi2 query parameters + +THOUGHT: doing an UPDATE in a transaction is probably not expensive + +Intent: +- check can_autoaccept flag on editor table + +--------- + +Crude benchmarking... + +cat /data/crossref/crossref-works.2018-01-21.badsample_5k.json | time ./fatcat_import.py import-crossref - /data/issn/20180216.ISSN-to-ISSN-L.txt + +autoaccept: 7.47user 0.48system 0:30.64elapsed 25%CPU +master: 5.70user 0.34system 0:25.61elapsed 23%CPU + batch creation: ~153ms+ + accept: ~5ms + +uh... diff --git a/notes/cloud_instances.txt b/notes/cloud_instances.txt new file mode 100644 index 00000000..b7071758 --- /dev/null +++ b/notes/cloud_instances.txt @@ -0,0 +1,10 @@ + +digital ocean + 48 GB RAM, 12 cores, 960 GB $240/month + (or more) + +aws + i3.2xlarge 61 GB RAM, 8 cores, 1900 GB NVMe, $455/month + +OVH + MG-128 128 GB RAM, 16 cores, 2880 GB SSD (RAID), 500mbps unlimited b/w, $315/month diff --git a/notes/database_dumps_backups.txt b/notes/database_dumps_backups.txt new file mode 100644 index 00000000..60d4bba0 --- /dev/null +++ b/notes/database_dumps_backups.txt @@ -0,0 +1,53 @@ + +## Dumps and Backups + +There are a few different database dump formats folks might want: + +- raw native database backups, for disaster recovery (would include + volatile/unsupported schema details, user API credentials, full history, + in-process edits, comments, etc) +- a sanitized version of the above: roughly per-table dumps of the full state + of the database. Could use per-table SQL expressions with sub-queries to pull + in small tables ("partial transform") and export JSON for each table; would + be extra work to maintain, so not pursuing for now. +- full history, full public schema exports, in a form that might be used to + mirror or enitrely fork the project. Propose supplying the full "changelog" + in API schema format, in a single file to capture all entity history, without + "hydrating" any inter-entity references. Rely on separate dumps of + non-entity, non-versioned tables (editors, abstracts, etc). Note that a + variant of this could use the public interface, in particular to do + incremental updates (though that wouldn't capture schema changes). +- transformed exports of the current state of the database (aka, without + history). Useful for data analysis, search engines, etc. Propose supplying + just the Release table in a fully "hydrated" state to start. Unclear if + should be on a work or release basis; will go with release for now. Harder to + do using public interface because of the need for transaction locking. + +## Full Postgres Backup + +Backing up the entire database using `pg_dump`, with parallelism 1 (use more on +larger machine with fast disks; try 4 or 8?), assuming the database name is +'fatcat', and the current user has access: + + pg_dump -j1 -Fd -f test-dump fatcat + +## Identifier Dumps + +The `extras/quick_dump.sql` script will dump abstracts and identifiers as TSV +files to `/tmp/`. Pretty quick; takes about 15 GB of disk space (uncompressed). + +## Releases Export + + # simple command + ./fatcat_export.py releases /tmp/fatcat_ident_releases.tsv /tmp/releases-dump.json + + # usual command + time ./fatcat_export.py releases /tmp/fatcat_ident_releases.tsv - | pv -l | wc + +## Changelog Export + + # simple command + ./fatcat_export.py changelog /tmp/changelog-dump.json + + # usual command + time ./fatcat_export.py changelog - | pv -l | wc diff --git a/notes/import_timing_20180815.txt b/notes/import_timing_20180815.txt new file mode 100644 index 00000000..1206cc41 --- /dev/null +++ b/notes/import_timing_20180815.txt @@ -0,0 +1,292 @@ + +Schema changes since previous imports: +- more fields (identifiers+indexes) +- timestamps +- UUIDs more places +- fixed some crossref import bugs? +- abstracts +- file_urls as table (not single value) +- timestamps +- TEXT -> CHAR in a few places +- removed many work fields + +## Containers + +(python)webcrawl@wbgrp-svc500:/srv/fatcat/src/python$ time ./fatcat_import.py import-issn /srv/datasets/journal_extra_metadata.csv + +real 1m25.292s +user 0m12.640s +sys 0m0.412s + +## Creators + +time parallel --bar --pipepart -j8 -a /srv/datasets/public_profiles_1_2_json.all.json ./fatcat_import.py import-orcid - + +(times very invalid due to hangs; got 3537837 creators, which is most of the way, so *shrug*) +real 22m2.465s +user 26m41.924s +sys 1m33.844s + +## Releases + +xzcat /srv/datasets/crossref-works.2018-01-21.json.xz | time parallel -j20 --round-robin --pipe ./fatcat_import.py import-crossref - /srv/datasets/20180216.ISSN-to-ISSN-L.txt + + 128516.30 user + 3905.14 system + 44:17:05 elapsed + 83% CPU + +Almost 44 hours... I think I remember more like 36 hours last time? Things +slowed down a lot towards the end, many more ORCID cross-references? + +looking in htop, postgres seems to be primary bottleneck. At something like 12 +hours in, had 44 million release_ident rows, which is 1000/second. + +Note: seems like the more frequently `count(*)` is run, the more performant. +Because in-memory? + + 2018-08-16 16:54:16.977 UTC [17996] postgres@fatcat_prod LOG: duration: 42949.549 ms statement: select count(id) from release_ident; + + fatcat_prod=# select count(*) from release_ident; + count + ---------- + 44185608 + (1 row) + + Time: 2753.916 ms (00:02.754) + fatcat_prod=# select count(*) from release_ident; + count + ---------- + 44187937 + (1 row) + + Time: 2711.670 ms (00:02.712) + +As expected, autovacuum very busy. Only ~150 TPS; but that includes batch +writes? 75061172 rows. + +## Files + + time ./fatcat_import.py import-manifest /srv/datasets/idents_files_urls.sqlite + + Done! Inserted 6607075 + + real 2152m28.822s => 36 hours (!) + user 401m46.464s + sys 21m45.724s + + +Going pretty slow, < 100 transactions/sec. Lots of SELECTs, which seem slow, on the abstract table? + + SELECT "release_rev_abstract"."id", "release_rev_abstract"."release_rev", "release_rev_abstract"."abstract_sha1", "release_rev_abstract"."mimetype", "release_rev_abstract"."lang", "abstracts"."sha1", "abstracts"."content" FROM ("release_rev_abstract" INNER JOIN "abstracts" ON "release_rev_abstract"."abstract_sha1" = "abstracts"."sha1") WHERE "release_rev_abstract"."release_rev" = 'ffffffc0-4dd2-47ce-a51d-44051f3699ce'; + +Created index: + + CREATE INDEX release_rev_abstract_rev_idx ON release_rev_abstract(release_rev); + +... and things sped way up. Re-ran some crossref imports to EXPLAIN and didn't +see non-indexed queries. Maybe an ANALYZE does need to happen? + +This being single-threaded is going to be a problem in the future. ~50 million +files would be ~2 weeks. + +## Post-Import Status + + Size: 358.89G (postgres self-reported) + Mem.: 57.10% - 16.85G/49.14G + +Was 184G last time in late June; doubled in size (!). + + bnewbold@wbgrp-svc500$ df -h / + Filesystem Size Used Avail Use% Mounted on + /dev/vda1 858G 529G 286G 65% / + + bnewbold@wbgrp-svc500$ sudo du -sh /var/lib/postgresql/ /srv/datasets/ /srv/elastic-blah/ + 361G /var/lib/postgresql/ + 83G /srv/datasets/ + 77G /srv/elastic-blah/ + + fatcat_prod=# select count(*) from changelog; => 2,085,067 + + SELECT + table_name, + pg_size_pretty(table_size) AS table_size, + pg_size_pretty(indexes_size) AS indexes_size, + pg_size_pretty(total_size) AS total_size + FROM ( + SELECT + table_name, + pg_table_size(table_name) AS table_size, + pg_indexes_size(table_name) AS indexes_size, + pg_total_relation_size(table_name) AS total_size + FROM ( + SELECT ('"' || table_schema || '"."' || table_name || '"') AS table_name + FROM information_schema.tables + ) AS all_tables + ORDER BY total_size DESC + ) AS pretty_sizes; + + table_name | table_size | indexes_size | total_size +--------------------------------------------------------------+------------+--------------+------------ + "public"."release_ref" | 159 GB | 47 GB | 206 GB + "public"."release_rev" | 40 GB | 10 GB | 51 GB + "public"."release_contrib" | 19 GB | 20 GB | 39 GB + "public"."release_ident" | 5797 MB | 6597 MB | 12 GB + "public"."work_ident" | 5787 MB | 6394 MB | 12 GB + "public"."release_edit" | 6674 MB | 4646 MB | 11 GB + "public"."work_edit" | 6674 MB | 4646 MB | 11 GB + "public"."work_rev" | 3175 MB | 2939 MB | 6114 MB + "public"."file_rev_url" | 1995 MB | 275 MB | 2270 MB + "public"."abstracts" | 1665 MB | 135 MB | 1800 MB + "public"."file_rev" | 829 MB | 954 MB | 1783 MB + "public"."file_ident" | 498 MB | 532 MB | 1030 MB + "public"."file_release" | 369 MB | 642 MB | 1011 MB + "public"."file_edit" | 591 MB | 410 MB | 1002 MB + "public"."creator_rev" | 337 MB | 318 MB | 655 MB + "public"."creator_ident" | 280 MB | 297 MB | 577 MB + "public"."creator_edit" | 316 MB | 220 MB | 536 MB + "public"."release_rev_abstract" | 183 MB | 84 MB | 267 MB + "public"."changelog" | 123 MB | 125 MB | 249 MB + "public"."editgroup" | 139 MB | 81 MB | 220 MB + "public"."container_rev" | 19 MB | 6912 kB | 26 MB + "public"."container_ident" | 6896 kB | 7016 kB | 14 MB + "public"."container_edit" | 8056 kB | 5240 kB | 13 MB + +In context, the full uncompressed crossref 2018-01-21 dump is about 285 GB. + +For many of these indexes, and the _ident tables, switching from UUID to +BIGSERIAL would half the size. + +## Exports + + time ./fatcat_export.py changelog - | pv -l | wc + + As of: + + 159k 1:17:35 [34.3 /s] + 159,740 lines + 2,427,277,881 chars (bytes; 2.4GB) + + real 77m35.183s + user 15m36.208s + sys 0m31.484s + +Running at about 100/sec; estimate 6 hours for completion. Could shard using +start/end flags, but am not here. + +Running `quick_dump.sql` (identifier tables, in a transaction): + + 251M Aug 19 23:08 fatcat_ident_creators.tsv + 5.9M Aug 19 23:08 fatcat_ident_containers.tsv + 467M Aug 19 23:08 fatcat_ident_files.tsv + 5.2G Aug 19 23:10 fatcat_ident_releases.tsv + 5.2G Aug 19 23:11 fatcat_ident_works.tsv + 12K Aug 19 23:11 . + 1.8G Aug 19 23:12 fatcat_abstracts.json + +Work and Release tables in under 2 minutes each; say 5 minutes total. + + time ./fatcat_export.py releases /tmp/fatcat_ident_releases.tsv - | pv -l | wc + + 172k 1:07:08 [42.7 /s] + 172181 lines + 1,118,166,293 chars (bytes; 1.1 GB) + + real 67m8.340s + user 10m21.988s + sys 0m34.612s + +Running at only 10/sec or so, this would take forever even if sharded. :( + +Both exports/dumps are running in parallel. "Expand" queries might help with speed? + +## Postgres Analysis + +SELECT * +FROM + pg_stat_statements +ORDER BY + total_time DESC LIMIT 5; + +Summary: + + SELECT "creator_ident" by ORCID + 1,295,864 calls + 930,305,208 total time + 717.9 mean time <= this should be less than a ms! + + INSERT INTO release_rev + 75144055 calls + 111470961 total time + 1.483 mean time + + INSERT INTO work_rev + 75,144,055 calls + 82693994 total time + 1.1 mean time + + INSERT INTO release_contrib (creator_ident_id = DEFAULT) RETURNING * + 26,008,280 calls <= why so few? different query depending on number + of rows inserted + 18955782 total time + 0.728 mean time + + SELECT container_ident + 78,4143 calls + 17683156 total time + 22.55 mean time <= why so slow? + + INSERT INTO release_contrib + 15,072,820 calls + + INSERT INTO "release_contrib + + + relname | too_much_seq | case | rel_size | seq_scan | idx_scan +----------------------+--------------+----------------+--------------+----------+----------- + file_rev_url | 2391 | Missing Index? | 2091147264 | 2391 | 0 + file_release | -30670 | OK | 386899968 | 2 | 30672 + container_rev | -979948 | OK | 20242432 | 784146 | 1764094 + file_edit | -2206807 | OK | 619896832 | 6 | 2206813 + creator_edit | -2206810 | OK | 331079680 | 11 | 2206821 + work_edit | -2206811 | OK | 6996566016 | 14 | 2206825 + release_edit | -2206811 | OK | 6996582400 | 14 | 2206825 + container_edit | -2206816 | OK | 8216576 | 5 | 2206821 + changelog | -2209659 | OK | 129286144 | 10 | 2209669 + abstracts | -3486466 | OK | 1706237952 | 8 | 3486474 + release_rev_abstract | -4975493 | OK | 191602688 | 42919 | 5018412 + release_ref | -5032717 | OK | 170494861312 | 3 | 5032720 + release_contrib | -5032744 | OK | 20370251776 | 3 | 5032747 + creator_rev | -8400410 | OK | 353583104 | 1296507 | 9696917 + file_ident | -13483224 | OK | 522190848 | 7 | 13483231 + creator_ident | -16686744 | OK | 293625856 | 3 | 16686747 + file_rev | -32405557 | OK | 868515840 | 4 | 32405561 + container_ident | -69162337 | OK | 7028736 | 3 | 69162340 + work_rev | -150288161 | OK | 3328589824 | 1 | 150288162 + editgroup | -162783807 | OK | 146112512 | 9 | 162783816 + release_ident | -165676917 | OK | 6076841984 | 52 | 165676969 + work_ident | -229439828 | OK | 6066814976 | 3 | 229439831 + release_rev | -930140217 | OK | 43360542720 | 9 | 930140226 + +TODO changes: +- don't return all as often; in particular, inserting release_contrib, release_ref +x missing an index somewhere on file_rev_url, release_rev_abstract +x why so many seq_scan on container_rev, creator_rev + => running/EXPLAIN same query on psql hits index, not seq_scan + => seemed to be an issue with VALUE params getting sent separately; query + planner only looked at query and wasn't using index on ORCID/ISSN-L because + it didn't know those values were not-NULL? + => adding NOT NULL to query seems to have sped up case of there being a + "hit", but no hit still slow. might need to change indices or something for + the (perhaps common in future) case of DOI lookups with invalid DOIs (eg, + CORE import) + +random DEBUG queries: + + EXPLAIN ANALYSE SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = '0000-0002-8867-1663' AND "creator_ident"."is_live" = true AND "creator_ident"."redirect_id" IS NULL LIMIT 1; + + EXPLAIN VERBOSE SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = $1 AND "creator_ident"."is_live" = true AND "creator_ident"."redirect_id" IS NULL VALUES ('0000-0002-8867-1669') LIMIT 1; + + EXPLAIN SELECT "container_ident"."id", "container_ident"."is_live", "container_ident"."rev_id", "container_ident"."redirect_id", "container_rev"."id", "container_rev"."extra_json", "container_rev"."name", "container_rev"."publisher", "container_rev"."issnl", "container_rev"."wikidata_qid", "container_rev"."abbrev", "container_rev"."coden" FROM ("container_ident" INNER JOIN "container_rev" ON "container_ident"."rev_id" = "container_rev"."id") WHERE "container_rev"."issnl" = '0001-0782' AND "container_ident"."is_live" = true AND "container_ident"."redirect_id" IS NULL LIMIT 1; + + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = '0000-0002-8867-1663' AND "creator_ident"."is_live" = 't' AND "creator_ident"."redirect_id" IS NULL LIMIT 1; diff --git a/rust/NOTES.txt b/notes/rust_libraries.txt index 7e6f33eb..7e6f33eb 100644 --- a/rust/NOTES.txt +++ b/notes/rust_libraries.txt diff --git a/python/README_codegen.md b/python/README_codegen.md index 393fae32..0d072dde 100644 --- a/python/README_codegen.md +++ b/python/README_codegen.md @@ -80,6 +80,11 @@ Class | Method | HTTP request | Description *DefaultApi* | [**create_release_batch**](docs/DefaultApi.md#create_release_batch) | **POST** /release/batch | *DefaultApi* | [**create_work**](docs/DefaultApi.md#create_work) | **POST** /work | *DefaultApi* | [**create_work_batch**](docs/DefaultApi.md#create_work_batch) | **POST** /work/batch | +*DefaultApi* | [**delete_container**](docs/DefaultApi.md#delete_container) | **DELETE** /container/{id} | +*DefaultApi* | [**delete_creator**](docs/DefaultApi.md#delete_creator) | **DELETE** /creator/{id} | +*DefaultApi* | [**delete_file**](docs/DefaultApi.md#delete_file) | **DELETE** /file/{id} | +*DefaultApi* | [**delete_release**](docs/DefaultApi.md#delete_release) | **DELETE** /release/{id} | +*DefaultApi* | [**delete_work**](docs/DefaultApi.md#delete_work) | **DELETE** /work/{id} | *DefaultApi* | [**get_changelog**](docs/DefaultApi.md#get_changelog) | **GET** /changelog | *DefaultApi* | [**get_changelog_entry**](docs/DefaultApi.md#get_changelog_entry) | **GET** /changelog/{id} | *DefaultApi* | [**get_container**](docs/DefaultApi.md#get_container) | **GET** /container/{id} | @@ -103,6 +108,11 @@ Class | Method | HTTP request | Description *DefaultApi* | [**lookup_creator**](docs/DefaultApi.md#lookup_creator) | **GET** /creator/lookup | *DefaultApi* | [**lookup_file**](docs/DefaultApi.md#lookup_file) | **GET** /file/lookup | *DefaultApi* | [**lookup_release**](docs/DefaultApi.md#lookup_release) | **GET** /release/lookup | +*DefaultApi* | [**update_container**](docs/DefaultApi.md#update_container) | **PUT** /container/{id} | +*DefaultApi* | [**update_creator**](docs/DefaultApi.md#update_creator) | **PUT** /creator/{id} | +*DefaultApi* | [**update_file**](docs/DefaultApi.md#update_file) | **PUT** /file/{id} | +*DefaultApi* | [**update_release**](docs/DefaultApi.md#update_release) | **PUT** /release/{id} | +*DefaultApi* | [**update_work**](docs/DefaultApi.md#update_work) | **PUT** /work/{id} | ## Documentation For Models diff --git a/python/fatcat/crossref_importer.py b/python/fatcat/crossref_importer.py index d3e525a4..54a3e84f 100644 --- a/python/fatcat/crossref_importer.py +++ b/python/fatcat/crossref_importer.py @@ -152,4 +152,4 @@ class FatcatCrossrefImporter(FatcatImporter): re.container_id = container.ident self._issnl_id_map[ce.issnl] = container.ident release_batch.append(re) - self.api.create_release_batch(release_batch) + self.api.create_release_batch(release_batch, autoaccept="true", editgroup=editgroup_id) diff --git a/python/fatcat/fcid.py b/python/fatcat/fcid.py new file mode 100644 index 00000000..dd72b242 --- /dev/null +++ b/python/fatcat/fcid.py @@ -0,0 +1,17 @@ + +import base64 +import uuid + +def fcid2uuid(s): + s = s.split('_')[-1].upper().encode('utf-8') + assert len(s) == 26 + raw = base64.b32decode(s + b"======") + return str(uuid.UUID(bytes=raw)).lower() + +def uuid2fcid(s): + raw = uuid.UUID(s).bytes + return base64.b32encode(raw)[:26].lower().decode('utf-8') + +def test_fcid(): + test_uuid = '00000000-0000-0000-3333-000000000001' + assert test_uuid == fcid2uuid(uuid2fcid(test_uuid)) diff --git a/python/fatcat/importer_common.py b/python/fatcat/importer_common.py index 9d495aa7..0b02d175 100644 --- a/python/fatcat/importer_common.py +++ b/python/fatcat/importer_common.py @@ -1,4 +1,5 @@ +import re import sys import csv import json @@ -22,6 +23,7 @@ class FatcatImporter: self._orcid_id_map = dict() self._doi_id_map = dict() self._issn_issnl_map = None + self._orcid_regex = re.compile("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$") if issn_map_file: self.read_issn_map_file(issn_map_file) @@ -43,8 +45,7 @@ class FatcatImporter: for rows in grouper(source, size): eg = self.api.create_editgroup( fatcat_client.Editgroup(editor_id='aaaaaaaaaaaabkvkaaaaaaaaae')) - self.create_batch(rows, eg.id) - self.api.accept_editgroup(eg.id) + self.create_batch(rows, editgroup_id=eg.id) def process_csv_source(self, source, group_size=100, delimiter=','): reader = csv.DictReader(source, delimiter=delimiter) @@ -54,9 +55,11 @@ class FatcatImporter: reader = csv.DictReader(source, delimiter=delimiter) self.process_batch(reader, size) + def is_issnl(self, issnl): + return len(issnl) == 9 and issnl[4] == '-' + def lookup_issnl(self, issnl): """Caches calls to the ISSN-L lookup API endpoint in a local dict""" - assert len(issnl) == 9 and issnl[4] == '-' if issnl in self._issnl_id_map: return self._issnl_id_map[issnl] container_id = None @@ -69,9 +72,13 @@ class FatcatImporter: self._issnl_id_map[issnl] = container_id # might be None return container_id + def is_orcid(self, orcid): + return self._orcid_regex.match(orcid) != None + def lookup_orcid(self, orcid): """Caches calls to the Orcid lookup API endpoint in a local dict""" - assert len(orcid) == 19 and orcid[4] == '-' + if not self.is_orcid(orcid): + return None if orcid in self._orcid_id_map: return self._orcid_id_map[orcid] creator_id = None @@ -84,9 +91,12 @@ class FatcatImporter: self._orcid_id_map[orcid] = creator_id # might be None return creator_id + def is_doi(self, doi): + return doi.startswith("10.") and doi.count("/") >= 1 + def lookup_doi(self, doi): """Caches calls to the doi lookup API endpoint in a local dict""" - assert doi.startswith('10.') + assert self.is_doi(doi) doi = doi.lower() if doi in self._doi_id_map: return self._doi_id_map[doi] diff --git a/python/fatcat/issn_importer.py b/python/fatcat/issn_importer.py index 181137ac..eb8a50ba 100644 --- a/python/fatcat/issn_importer.py +++ b/python/fatcat/issn_importer.py @@ -70,4 +70,4 @@ class FatcatIssnImporter(FatcatImporter): objects = [o for o in objects if o != None] for o in objects: o.editgroup_id = editgroup_id - self.api.create_container_batch(objects) + self.api.create_container_batch(objects, autoaccept="true", editgroup=editgroup_id) diff --git a/python/fatcat/manifest_importer.py b/python/fatcat/manifest_importer.py index 7762d132..2965d0ef 100644 --- a/python/fatcat/manifest_importer.py +++ b/python/fatcat/manifest_importer.py @@ -66,7 +66,7 @@ class FatcatManifestImporter(FatcatImporter): total_count = int(list(db.execute("SELECT COUNT(*) FROM files_metadata;"))[0][0]) print("{} rows to process".format(total_count)) - eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id=1)) + eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id="aaaaaaaaaaaabkvkaaaaaaaaae")) i = 0 j = -1 for row in db.execute(QUERY): @@ -81,7 +81,7 @@ class FatcatManifestImporter(FatcatImporter): self.create_entity(fe, editgroup_id=eg.id) if i > 0 and (i % size) == 0: self.api.accept_editgroup(eg.id) - eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id=1)) + eg = self.api.create_editgroup(fatcat_client.Editgroup(editor_id="aaaaaaaaaaaabkvkaaaaaaaaae")) print("Finished a batch; row {} of {} ({:.2f}%).\tTotal inserted: {}".format( j, total_count, 100.0*j/total_count, i)) i = i + 1 diff --git a/python/fatcat/orcid_importer.py b/python/fatcat/orcid_importer.py index 69b184d5..fe76b02c 100644 --- a/python/fatcat/orcid_importer.py +++ b/python/fatcat/orcid_importer.py @@ -5,7 +5,6 @@ import itertools import fatcat_client from fatcat.importer_common import FatcatImporter - def value_or_none(e): if type(e) == dict: e = e.get('value') @@ -46,8 +45,12 @@ class FatcatOrcidImporter(FatcatImporter): else: # must have *some* name return None + orcid = obj['orcid-identifier']['path'] + if not self.is_orcid(orcid): + sys.stderr.write("Bad ORCID: {}\n".format(orcid)) + return None ce = fatcat_client.CreatorEntity( - orcid=obj['orcid-identifier']['path'], + orcid=orcid, given_name=given, surname=sur, display_name=display, @@ -68,4 +71,4 @@ class FatcatOrcidImporter(FatcatImporter): objects = [o for o in objects if o != None] for o in objects: o.editgroup_id = editgroup_id - self.api.create_creator_batch(objects) + self.api.create_creator_batch(objects, autoaccept="true", editgroup=editgroup_id) diff --git a/python/fatcat/templates/file_view.html b/python/fatcat/templates/file_view.html index 2934224d..febc2b19 100644 --- a/python/fatcat/templates/file_view.html +++ b/python/fatcat/templates/file_view.html @@ -45,7 +45,7 @@ No known public URL, mirror, or archive for this file. {% endif %} <h3>Checksums</h3> -<table class="ui table"> +<table class="ui compact table"> <thead> <tr><th>Algorithm <th>Value diff --git a/python/fatcat/templates/release_view.html b/python/fatcat/templates/release_view.html index dd92f611..9be312e1 100644 --- a/python/fatcat/templates/release_view.html +++ b/python/fatcat/templates/release_view.html @@ -147,11 +147,11 @@ Believed to represent this release... This release citing other releases. <ol> {% for ref in release.refs %} - <li>{% if ref.extra != None %}{{ ref.extra }}{% else %}<i>unknown</i>{% endif %} + <li>{% if ref.extra != None and ref.extra.unstructured != None %}{{ ref.extra.unstructured }}{% else %}<i>unknown</i>{% endif %} {% if ref.target_release_id != None %} (<a href="/release/{{ ref.target_release_id }}">fatcat release</a>) - {% elif ref.extra != None and ref.extra.doi != None %} - (DOI: <a href="/release/lookup?doi={{ ref.exta.doi }}">{{ ref.extra.doi }}</a>) +{# {% elif ref.extra != None and ref.extra.doi != None %} + (DOI: <a href="/release/lookup?doi={{ ref.exta.get('doi') }}">{{ ref.extra.get('doi') }}</a>) #} {% endif %} {% endfor %} </ol> diff --git a/python/fatcat_client/api/default_api.py b/python/fatcat_client/api/default_api.py index b7f23e25..23c8d7ca 100644 --- a/python/fatcat_client/api/default_api.py +++ b/python/fatcat_client/api/default_api.py @@ -245,6 +245,8 @@ class DefaultApi(object): :param async bool :param list[ContainerEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. @@ -266,12 +268,14 @@ class DefaultApi(object): :param async bool :param list[ContainerEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. """ - all_params = ['entity_list'] # noqa: E501 + all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501 all_params.append('async') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -296,6 +300,10 @@ class DefaultApi(object): path_params = {} query_params = [] + if 'autoaccept' in params: + query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501 + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 header_params = {} @@ -439,6 +447,8 @@ class DefaultApi(object): :param async bool :param list[CreatorEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. @@ -460,12 +470,14 @@ class DefaultApi(object): :param async bool :param list[CreatorEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. """ - all_params = ['entity_list'] # noqa: E501 + all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501 all_params.append('async') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -490,6 +502,10 @@ class DefaultApi(object): path_params = {} query_params = [] + if 'autoaccept' in params: + query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501 + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 header_params = {} @@ -730,6 +746,8 @@ class DefaultApi(object): :param async bool :param list[FileEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. @@ -751,12 +769,14 @@ class DefaultApi(object): :param async bool :param list[FileEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. """ - all_params = ['entity_list'] # noqa: E501 + all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501 all_params.append('async') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -781,6 +801,10 @@ class DefaultApi(object): path_params = {} query_params = [] + if 'autoaccept' in params: + query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501 + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 header_params = {} @@ -924,6 +948,8 @@ class DefaultApi(object): :param async bool :param list[ReleaseEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. @@ -945,12 +971,14 @@ class DefaultApi(object): :param async bool :param list[ReleaseEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. """ - all_params = ['entity_list'] # noqa: E501 + all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501 all_params.append('async') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -975,6 +1003,10 @@ class DefaultApi(object): path_params = {} query_params = [] + if 'autoaccept' in params: + query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501 + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 header_params = {} @@ -1118,6 +1150,8 @@ class DefaultApi(object): :param async bool :param list[WorkEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. @@ -1139,12 +1173,14 @@ class DefaultApi(object): :param async bool :param list[WorkEntity] entity_list: (required) + :param bool autoaccept: If true, and editor is authorized, batch is accepted all at once + :param str editgroup: Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True) :return: list[EntityEdit] If the method is called asynchronously, returns the request thread. """ - all_params = ['entity_list'] # noqa: E501 + all_params = ['entity_list', 'autoaccept', 'editgroup'] # noqa: E501 all_params.append('async') all_params.append('_return_http_data_only') all_params.append('_preload_content') @@ -1169,6 +1205,10 @@ class DefaultApi(object): path_params = {} query_params = [] + if 'autoaccept' in params: + query_params.append(('autoaccept', params['autoaccept'])) # noqa: E501 + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 header_params = {} @@ -1205,6 +1245,511 @@ class DefaultApi(object): _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) + def delete_container(self, id, **kwargs): # noqa: E501 + """delete_container # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_container(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.delete_container_with_http_info(id, **kwargs) # noqa: E501 + else: + (data) = self.delete_container_with_http_info(id, **kwargs) # noqa: E501 + return data + + def delete_container_with_http_info(self, id, **kwargs): # noqa: E501 + """delete_container # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_container_with_http_info(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'editgroup'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_container" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `delete_container`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/container/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_creator(self, id, **kwargs): # noqa: E501 + """delete_creator # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_creator(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.delete_creator_with_http_info(id, **kwargs) # noqa: E501 + else: + (data) = self.delete_creator_with_http_info(id, **kwargs) # noqa: E501 + return data + + def delete_creator_with_http_info(self, id, **kwargs): # noqa: E501 + """delete_creator # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_creator_with_http_info(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'editgroup'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_creator" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `delete_creator`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/creator/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_file(self, id, **kwargs): # noqa: E501 + """delete_file # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_file(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.delete_file_with_http_info(id, **kwargs) # noqa: E501 + else: + (data) = self.delete_file_with_http_info(id, **kwargs) # noqa: E501 + return data + + def delete_file_with_http_info(self, id, **kwargs): # noqa: E501 + """delete_file # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_file_with_http_info(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'editgroup'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_file" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `delete_file`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/file/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_release(self, id, **kwargs): # noqa: E501 + """delete_release # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_release(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.delete_release_with_http_info(id, **kwargs) # noqa: E501 + else: + (data) = self.delete_release_with_http_info(id, **kwargs) # noqa: E501 + return data + + def delete_release_with_http_info(self, id, **kwargs): # noqa: E501 + """delete_release # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_release_with_http_info(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'editgroup'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_release" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `delete_release`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/release/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def delete_work(self, id, **kwargs): # noqa: E501 + """delete_work # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_work(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.delete_work_with_http_info(id, **kwargs) # noqa: E501 + else: + (data) = self.delete_work_with_http_info(id, **kwargs) # noqa: E501 + return data + + def delete_work_with_http_info(self, id, **kwargs): # noqa: E501 + """delete_work # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.delete_work_with_http_info(id, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param str editgroup: + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'editgroup'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method delete_work" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `delete_work`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + if 'editgroup' in params: + query_params.append(('editgroup', params['editgroup'])) # noqa: E501 + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/work/{id}', 'DELETE', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + def get_changelog(self, **kwargs): # noqa: E501 """get_changelog # noqa: E501 @@ -3255,8 +3800,8 @@ class DefaultApi(object): if ('orcid' in params and len(params['orcid']) < 19): raise ValueError("Invalid value for parameter `orcid` when calling `lookup_creator`, length must be greater than or equal to `19`") # noqa: E501 - if 'orcid' in params and not re.search('\\d{4}-\\d{4}-\\d{4}-\\d{4}', params['orcid']): # noqa: E501 - raise ValueError("Invalid value for parameter `orcid` when calling `lookup_creator`, must conform to the pattern `/\\d{4}-\\d{4}-\\d{4}-\\d{4}/`") # noqa: E501 + if 'orcid' in params and not re.search('\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]', params['orcid']): # noqa: E501 + raise ValueError("Invalid value for parameter `orcid` when calling `lookup_creator`, must conform to the pattern `/\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]/`") # noqa: E501 collection_formats = {} path_params = {} @@ -3491,3 +4036,528 @@ class DefaultApi(object): _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats) + + def update_container(self, id, entity, **kwargs): # noqa: E501 + """update_container # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_container(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param ContainerEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.update_container_with_http_info(id, entity, **kwargs) # noqa: E501 + else: + (data) = self.update_container_with_http_info(id, entity, **kwargs) # noqa: E501 + return data + + def update_container_with_http_info(self, id, entity, **kwargs): # noqa: E501 + """update_container # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_container_with_http_info(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param ContainerEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'entity'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method update_container" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `update_container`") # noqa: E501 + # verify the required parameter 'entity' is set + if ('entity' not in params or + params['entity'] is None): + raise ValueError("Missing the required parameter `entity` when calling `update_container`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'entity' in params: + body_params = params['entity'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/container/{id}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def update_creator(self, id, entity, **kwargs): # noqa: E501 + """update_creator # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_creator(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param CreatorEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.update_creator_with_http_info(id, entity, **kwargs) # noqa: E501 + else: + (data) = self.update_creator_with_http_info(id, entity, **kwargs) # noqa: E501 + return data + + def update_creator_with_http_info(self, id, entity, **kwargs): # noqa: E501 + """update_creator # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_creator_with_http_info(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param CreatorEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'entity'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method update_creator" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `update_creator`") # noqa: E501 + # verify the required parameter 'entity' is set + if ('entity' not in params or + params['entity'] is None): + raise ValueError("Missing the required parameter `entity` when calling `update_creator`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'entity' in params: + body_params = params['entity'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/creator/{id}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def update_file(self, id, entity, **kwargs): # noqa: E501 + """update_file # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_file(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param FileEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.update_file_with_http_info(id, entity, **kwargs) # noqa: E501 + else: + (data) = self.update_file_with_http_info(id, entity, **kwargs) # noqa: E501 + return data + + def update_file_with_http_info(self, id, entity, **kwargs): # noqa: E501 + """update_file # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_file_with_http_info(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param FileEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'entity'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method update_file" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `update_file`") # noqa: E501 + # verify the required parameter 'entity' is set + if ('entity' not in params or + params['entity'] is None): + raise ValueError("Missing the required parameter `entity` when calling `update_file`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'entity' in params: + body_params = params['entity'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/file/{id}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def update_release(self, id, entity, **kwargs): # noqa: E501 + """update_release # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_release(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param ReleaseEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.update_release_with_http_info(id, entity, **kwargs) # noqa: E501 + else: + (data) = self.update_release_with_http_info(id, entity, **kwargs) # noqa: E501 + return data + + def update_release_with_http_info(self, id, entity, **kwargs): # noqa: E501 + """update_release # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_release_with_http_info(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param ReleaseEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'entity'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method update_release" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `update_release`") # noqa: E501 + # verify the required parameter 'entity' is set + if ('entity' not in params or + params['entity'] is None): + raise ValueError("Missing the required parameter `entity` when calling `update_release`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'entity' in params: + body_params = params['entity'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/release/{id}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) + + def update_work(self, id, entity, **kwargs): # noqa: E501 + """update_work # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_work(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param WorkEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + kwargs['_return_http_data_only'] = True + if kwargs.get('async'): + return self.update_work_with_http_info(id, entity, **kwargs) # noqa: E501 + else: + (data) = self.update_work_with_http_info(id, entity, **kwargs) # noqa: E501 + return data + + def update_work_with_http_info(self, id, entity, **kwargs): # noqa: E501 + """update_work # noqa: E501 + + This method makes a synchronous HTTP request by default. To make an + asynchronous HTTP request, please pass async=True + >>> thread = api.update_work_with_http_info(id, entity, async=True) + >>> result = thread.get() + + :param async bool + :param str id: (required) + :param WorkEntity entity: (required) + :return: EntityEdit + If the method is called asynchronously, + returns the request thread. + """ + + all_params = ['id', 'entity'] # noqa: E501 + all_params.append('async') + all_params.append('_return_http_data_only') + all_params.append('_preload_content') + all_params.append('_request_timeout') + + params = locals() + for key, val in six.iteritems(params['kwargs']): + if key not in all_params: + raise TypeError( + "Got an unexpected keyword argument '%s'" + " to method update_work" % key + ) + params[key] = val + del params['kwargs'] + # verify the required parameter 'id' is set + if ('id' not in params or + params['id'] is None): + raise ValueError("Missing the required parameter `id` when calling `update_work`") # noqa: E501 + # verify the required parameter 'entity' is set + if ('entity' not in params or + params['entity'] is None): + raise ValueError("Missing the required parameter `entity` when calling `update_work`") # noqa: E501 + + collection_formats = {} + + path_params = {} + if 'id' in params: + path_params['id'] = params['id'] # noqa: E501 + + query_params = [] + + header_params = {} + + form_params = [] + local_var_files = {} + + body_params = None + if 'entity' in params: + body_params = params['entity'] + # HTTP header `Accept` + header_params['Accept'] = self.api_client.select_header_accept( + ['application/json']) # noqa: E501 + + # HTTP header `Content-Type` + header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501 + ['application/json']) # noqa: E501 + + # Authentication setting + auth_settings = [] # noqa: E501 + + return self.api_client.call_api( + '/work/{id}', 'PUT', + path_params, + query_params, + header_params, + body=body_params, + post_params=form_params, + files=local_var_files, + response_type='EntityEdit', # noqa: E501 + auth_settings=auth_settings, + async=params.get('async'), + _return_http_data_only=params.get('_return_http_data_only'), + _preload_content=params.get('_preload_content', True), + _request_timeout=params.get('_request_timeout'), + collection_formats=collection_formats) diff --git a/python/fatcat_client/models/creator_entity.py b/python/fatcat_client/models/creator_entity.py index a9e459ee..de096281 100644 --- a/python/fatcat_client/models/creator_entity.py +++ b/python/fatcat_client/models/creator_entity.py @@ -139,8 +139,8 @@ class CreatorEntity(object): raise ValueError("Invalid value for `orcid`, length must be less than or equal to `19`") # noqa: E501 if orcid is not None and len(orcid) < 19: raise ValueError("Invalid value for `orcid`, length must be greater than or equal to `19`") # noqa: E501 - if orcid is not None and not re.search('\\d{4}-\\d{4}-\\d{4}-\\d{4}', orcid): # noqa: E501 - raise ValueError("Invalid value for `orcid`, must be a follow pattern or equal to `/\\d{4}-\\d{4}-\\d{4}-\\d{4}/`") # noqa: E501 + if orcid is not None and not re.search('\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]', orcid): # noqa: E501 + raise ValueError("Invalid value for `orcid`, must be a follow pattern or equal to `/\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]/`") # noqa: E501 self._orcid = orcid diff --git a/python/fatcat_client/models/release_entity.py b/python/fatcat_client/models/release_entity.py index 3412ca92..79e2c64f 100644 --- a/python/fatcat_client/models/release_entity.py +++ b/python/fatcat_client/models/release_entity.py @@ -48,6 +48,7 @@ class ReleaseEntity(object): 'wikidata_qid': 'str', 'pmcid': 'str', 'pmid': 'str', + 'core_id': 'str', 'isbn13': 'str', 'doi': 'str', 'release_date': 'date', @@ -78,6 +79,7 @@ class ReleaseEntity(object): 'wikidata_qid': 'wikidata_qid', 'pmcid': 'pmcid', 'pmid': 'pmid', + 'core_id': 'core_id', 'isbn13': 'isbn13', 'doi': 'doi', 'release_date': 'release_date', @@ -96,7 +98,7 @@ class ReleaseEntity(object): 'extra': 'extra' } - def __init__(self, abstracts=None, refs=None, contribs=None, language=None, publisher=None, pages=None, issue=None, volume=None, wikidata_qid=None, pmcid=None, pmid=None, isbn13=None, doi=None, release_date=None, release_status=None, release_type=None, container_id=None, files=None, container=None, work_id=None, title=None, state=None, ident=None, revision=None, redirect=None, editgroup_id=None, extra=None): # noqa: E501 + def __init__(self, abstracts=None, refs=None, contribs=None, language=None, publisher=None, pages=None, issue=None, volume=None, wikidata_qid=None, pmcid=None, pmid=None, core_id=None, isbn13=None, doi=None, release_date=None, release_status=None, release_type=None, container_id=None, files=None, container=None, work_id=None, title=None, state=None, ident=None, revision=None, redirect=None, editgroup_id=None, extra=None): # noqa: E501 """ReleaseEntity - a model defined in Swagger""" # noqa: E501 self._abstracts = None @@ -110,6 +112,7 @@ class ReleaseEntity(object): self._wikidata_qid = None self._pmcid = None self._pmid = None + self._core_id = None self._isbn13 = None self._doi = None self._release_date = None @@ -150,6 +153,8 @@ class ReleaseEntity(object): self.pmcid = pmcid if pmid is not None: self.pmid = pmid + if core_id is not None: + self.core_id = core_id if isbn13 is not None: self.isbn13 = isbn13 if doi is not None: @@ -416,6 +421,27 @@ class ReleaseEntity(object): self._pmid = pmid @property + def core_id(self): + """Gets the core_id of this ReleaseEntity. # noqa: E501 + + + :return: The core_id of this ReleaseEntity. # noqa: E501 + :rtype: str + """ + return self._core_id + + @core_id.setter + def core_id(self, core_id): + """Sets the core_id of this ReleaseEntity. + + + :param core_id: The core_id of this ReleaseEntity. # noqa: E501 + :type: str + """ + + self._core_id = core_id + + @property def isbn13(self): """Gets the isbn13 of this ReleaseEntity. # noqa: E501 diff --git a/python/fatcat_export.py b/python/fatcat_export.py new file mode 100755 index 00000000..6c4502af --- /dev/null +++ b/python/fatcat_export.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python3 + +import sys +import json +import argparse +import fatcat_client +from fatcat_client.rest import ApiException +from fatcat.fcid import uuid2fcid + +def run_export_releases(args): + conf = fatcat_client.Configuration() + conf.host = args.host_url + api = fatcat_client.DefaultApi(fatcat_client.ApiClient(conf)) + + for line in args.ident_file: + ident = uuid2fcid(line.split()[0]) + release = api.get_release(id=ident, expand="all") + args.json_output.write(json.dumps(release.to_dict()) + "\n") + +def run_export_changelog(args): + conf = fatcat_client.Configuration() + conf.host = args.host_url + api = fatcat_client.DefaultApi(fatcat_client.ApiClient(conf)) + + end = args.end + if end is None: + latest = api.get_changelog(limit=1)[0] + end = latest.index + + for i in range(args.start, end): + entry = api.get_changelog_entry(id=i) + args.json_output.write(json.dumps(entry.to_dict()) + "\n") + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('--debug', + action='store_true', + help="enable debugging interface") + parser.add_argument('--host-url', + default="http://localhost:9411/v0", + help="connect to this host/port") + subparsers = parser.add_subparsers() + + sub_releases = subparsers.add_parser('releases') + sub_releases.set_defaults(func=run_export_releases) + sub_releases.add_argument('ident_file', + help="TSV list of fatcat release idents to dump", + default=sys.stdin, type=argparse.FileType('r')) + sub_releases.add_argument('json_output', + help="where to send output", + default=sys.stdout, type=argparse.FileType('w')) + + sub_changelog = subparsers.add_parser('changelog') + sub_changelog.set_defaults(func=run_export_changelog) + sub_changelog.add_argument('--start', + help="index to start dumping at", + default=1, type=int) + sub_changelog.add_argument('--end', + help="index to stop dumping at (else detect most recent)", + default=None, type=int) + sub_changelog.add_argument('json_output', + help="where to send output", + default=sys.stdout, type=argparse.FileType('w')) + + args = parser.parse_args() + if not args.__dict__.get("func"): + print("tell me what to do!") + sys.exit(-1) + args.func(args) + +if __name__ == '__main__': + main() diff --git a/python/tests/codegen_tests/test_default_api.py b/python/tests/codegen_tests/test_default_api.py index e3008898..ec36887b 100644 --- a/python/tests/codegen_tests/test_default_api.py +++ b/python/tests/codegen_tests/test_default_api.py @@ -101,6 +101,36 @@ class TestDefaultApi(unittest.TestCase): """ pass + def test_delete_container(self): + """Test case for delete_container + + """ + pass + + def test_delete_creator(self): + """Test case for delete_creator + + """ + pass + + def test_delete_file(self): + """Test case for delete_file + + """ + pass + + def test_delete_release(self): + """Test case for delete_release + + """ + pass + + def test_delete_work(self): + """Test case for delete_work + + """ + pass + def test_get_changelog(self): """Test case for get_changelog @@ -239,6 +269,36 @@ class TestDefaultApi(unittest.TestCase): """ pass + def test_update_container(self): + """Test case for update_container + + """ + pass + + def test_update_creator(self): + """Test case for update_creator + + """ + pass + + def test_update_file(self): + """Test case for update_file + + """ + pass + + def test_update_release(self): + """Test case for update_release + + """ + pass + + def test_update_work(self): + """Test case for update_work + + """ + pass + if __name__ == '__main__': unittest.main() diff --git a/python/tests/files/0000-0001-8254-710X.json b/python/tests/files/0000-0001-8254-710X.json new file mode 100644 index 00000000..094cae67 --- /dev/null +++ b/python/tests/files/0000-0001-8254-710X.json @@ -0,0 +1 @@ +{"orcid-identifier":{"uri":"http://orcid.org/0000-0001-8254-710X","path":"0000-0001-8254-710X","host":"orcid.org"},"preferences":{"locale":"en"},"history":{"creation-method":"Member-referred","completion-date":null,"submission-date":{"value":1407501041999},"last-modified-date":{"value":1465949566770},"claimed":true,"source":null,"deactivation-date":null,"verified-email":true,"verified-primary-email":true},"person":{"last-modified-date":null,"name":{"created-date":{"value":1460755375159},"last-modified-date":{"value":1460755375159},"given-names":{"value":"Man-Hui"},"family-name":{"value":"Li"},"credit-name":null,"source":null,"visibility":"public","path":"0000-0001-8254-710X"},"other-names":{"last-modified-date":null,"other-name":null,"path":"/0000-0001-8254-710X/other-names"},"biography":{"created-date":{"value":1460755375161},"last-modified-date":{"value":1460755375161},"content":null,"visibility":"public","path":"/0000-0001-8254-710X/biography"},"researcher-urls":{"last-modified-date":null,"researcher-url":null,"path":"/0000-0001-8254-710X/researcher-urls"},"emails":{"last-modified-date":null,"email":null,"path":"/0000-0001-8254-710X/email"},"addresses":{"last-modified-date":null,"address":null,"path":"/0000-0001-8254-710X/address"},"keywords":{"last-modified-date":null,"keyword":null,"path":"/0000-0001-8254-710X/keywords"},"external-identifiers":{"last-modified-date":null,"external-identifier":null,"path":"/0000-0001-8254-710X/external-identifiers"},"path":"/0000-0001-8254-710X/person"},"activities-summary":{"last-modified-date":null,"educations":{"last-modified-date":null,"education-summary":null,"path":"/0000-0001-8254-710X/educations"},"employments":{"last-modified-date":null,"employment-summary":null,"path":"/0000-0001-8254-710X/employments"},"fundings":{"last-modified-date":null,"group":null,"path":"/0000-0001-8254-710X/fundings"},"peer-reviews":{"last-modified-date":null,"group":null,"path":"/0000-0001-8254-710X/peer-reviews"},"works":{"last-modified-date":null,"group":null,"path":"/0000-0001-8254-710X/works"},"path":"/0000-0001-8254-710X/activities"},"path":"/0000-0001-8254-710X"} diff --git a/python/tests/files/crossref-works.2018-01-21.badsample.json b/python/tests/files/crossref-works.2018-01-21.badsample.json index d0ce191f..931da7a7 100644 --- a/python/tests/files/crossref-works.2018-01-21.badsample.json +++ b/python/tests/files/crossref-works.2018-01-21.badsample.json @@ -9,5 +9,6 @@ { "_id" : { "$oid" : "5a55196988a035a45bda0cb1" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:41:48Z", "timestamp" : { "$numberLong" : "1508769708308" } }, "reference-count" : 44, "publisher" : "Elsevier BV", "issue" : "1", "license" : [ { "URL" : "http://www.elsevier.com/tdm/userlicense/1.0/", "start" : { "date-parts" : [ [ 1998, 11, 1 ] ], "date-time" : "1998-11-01T00:00:00Z", "timestamp" : { "$numberLong" : "909878400000" } }, "delay-in-days" : 0, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Toxicology and Applied Pharmacology" ], "published-print" : { "date-parts" : [ [ 1998, 11 ] ] }, "DOI" : "10.1006/taap.1998.8543", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 9, 18 ] ], "date-time" : "2002-09-18T22:01:25Z", "timestamp" : { "$numberLong" : "1032386485000" } }, "page" : "102-108", "source" : "Crossref", "is-referenced-by-count" : 44, "title" : [ "Role of CYP1A2 in the Hepatotoxicity of Acetaminophen: Investigations UsingCyp1a2Null Mice" ], "prefix" : "10.1006", "volume" : "153", "author" : [ { "given" : "Robert P.", "family" : "Tonge", "affiliation" : [] }, { "given" : "Edward J.", "family" : "Kelly", "affiliation" : [] }, { "given" : "Sam A.", "family" : "Bruschi", "affiliation" : [] }, { "given" : "Tom", "family" : "Kalhorn", "affiliation" : [] }, { "given" : "David L.", "family" : "Eaton", "affiliation" : [] }, { "given" : "Daniel W.", "family" : "Nebert", "affiliation" : [] }, { "given" : "Sidney D.", "family" : "Nelson", "affiliation" : [] } ], "member" : "78", "container-title" : [ "Toxicology and Applied Pharmacology" ], "link" : [ { "URL" : "http://api.elsevier.com/content/article/PII:S0041008X9898543X?httpAccept=text/xml", "content-type" : "text/xml", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://api.elsevier.com/content/article/PII:S0041008X9898543X?httpAccept=text/plain", "content-type" : "text/plain", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 6, 14 ] ], "date-time" : "2017-06-14T16:51:33Z", "timestamp" : { "$numberLong" : "1497459093000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 1998, 11 ] ] }, "references-count" : 44, "alternative-id" : [ "S0041008X9898543X" ], "URL" : "http://dx.doi.org/10.1006/taap.1998.8543", "ISSN" : [ "0041-008X" ], "issn-type" : [ { "value" : "0041-008X", "type" : "print" } ], "subject" : [ "Toxicology", "Pharmacology" ] } { "_id" : { "$oid" : "5a55170088a035a45bd8490d" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:30:12Z", "timestamp" : { "$numberLong" : "1508769012416" } }, "reference-count" : 37, "publisher" : "Wiley-Blackwell", "issue" : "2", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 5356, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Am. J. Ind. Med." ], "published-print" : { "date-parts" : [ [ 2001, 2 ] ] }, "DOI" : "10.1002/1097-0274(200102)39:2<218::aid-ajim1009>3.0.co;2-4", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T20:41:50Z", "timestamp" : { "$numberLong" : "1030308110000" } }, "page" : "218-226", "source" : "Crossref", "is-referenced-by-count" : 10, "title" : [ "The work environment impact assessment: A methodologic framework for evaluating health-based interventions" ], "prefix" : "10.1002", "volume" : "39", "author" : [ { "given" : "Beth J.", "family" : "Rosenberg", "affiliation" : [] }, { "given" : "Elizabeth M.", "family" : "Barbeau", "affiliation" : [] }, { "given" : "Rafael", "family" : "Moure-Eraso", "affiliation" : [] }, { "given" : "Charles", "family" : "Levenstein", "affiliation" : [] } ], "member" : "311", "published-online" : { "date-parts" : [ [ 2001 ] ] }, "reference" : [ { "key" : "BIB1", "author" : "Barbeau", "year" : "1998", "unstructured" : "1998. Displaced tobacco workers, public health, and tobacco policy: moving beyond jobs versus health. Doctoral thesis, Department of Work Environment, University of Massachusetts, Lowell." }, { "key" : "BIB2", "author" : "Berberian", "volume" : "37", "first-page" : "126", "year" : "1987", "journal-title" : "J Occup Environ Med" }, { "key" : "BIB3", "author" : "Bignami", "volume" : "80", "first-page" : "265", "year" : "1981", "journal-title" : "Mutat Res", "DOI" : "10.1016/0027-5107(81)90099-3", "doi-asserted-by" : "crossref" }, { "key" : "BIB4", "author" : "Britton", "year" : "1989", "unstructured" : "1989. The post-Alar era dawns chilly for apple growers. Boston Globe. Oct. 25, p. 34." }, { "key" : "BIB5", "author" : "Brusick", "year" : "1976", "unstructured" : "1976. Mutagen and oncogen Study on 1,1-dimethylhydrazine. Prepared for the Aerospace Med. Res. Lab., Aeropsace Med. Div. Airforce Systems Command, Wright- Patterson A.F.B., Dayton OH Litton Bionetics, Inc., Kensington, MD. NTIS AD-A035475." }, { "key" : "BIB6", "author" : "Chemical Marketing Reporter", "year" : "1984", "unstructured" : "Chemical Marketing Reporter. 1984. Uniroyal pesticide to be reviewed by EPA: regulatory action prompted by its toxicity. July 23." }, { "key" : "BIB7", "author" : "Chemical Marketing Reporter", "year" : "1989", "unstructured" : "Chemical Marketing Reporter. 1989. Uniroyal pulls apple pesticide from market, citing controversy. June 5." }, { "key" : "BIB8", "year" : "1990", "unstructured" : "Du Pont Chemical Company. 1990. MSDS No. M0000057, p. 2." }, { "key" : "BIB9", "year" : "1993", "unstructured" : "Farm Chemicals Handbook '93. 1993. Willoughby, OH: Meister.", "volume-title" : "Farm Chemicals Handbook '93" }, { "key" : "BIB10", "year" : "1985", "unstructured" : "Farm Chemicals Handbook '85. 1985. Willoughby, OH: Meister.", "volume-title" : "Farm Chemicals Handbook '85" }, { "key" : "BIB11", "author" : "Federal Register", "year" : "1989", "unstructured" : "Federal Register. 1989. Daminozide: termination of special review of food uses. Vol. 54, No. 216, p. 47482, November 14." }, { "key" : "BIB12", "author" : "Fenske", "first-page" : "729", "year" : "2000", "unstructured" : "2000. Agricultural workers. In: editors. Occupational health: recognizing and preventing work-related disease and injury. 4th ed. Philadelphia: Lippincott Williams and Wilkins, p. 729-748.", "volume-title" : "Occupational health: recognizing and preventing work-related disease and injury" }, { "key" : "BIB13", "author" : "Gibson", "volume" : "5", "first-page" : "24", "year" : "1994", "journal-title" : "New Solutions", "DOI" : "10.2190/NS5.1.g", "doi-asserted-by" : "crossref" }, { "key" : "BIB14", "author" : "Goldenhar", "volume" : "29", "first-page" : "289", "year" : "1996", "journal-title" : "Am J Ind Med", "DOI" : "10.1002/(SICI)1097-0274(199604)29:4<289::AID-AJIM2>3.0.CO;2-K", "doi-asserted-by" : "crossref" }, { "key" : "BIB15", "author" : "Haun", "year" : "1984", "unstructured" : "1984. Inhalation studies of UDMH. Air Force Aerospace Medical Res Lab, TR-85-020." }, { "key" : "BIB16", "author" : "International Agency for Research on Cancer (IARC)", "year" : "1997", "unstructured" : "International Agency for Research on Cancer (IARC). 1997. Evaluation of carcinogen risks to humans: man-made mineral fibres and radon. Lyons, France." }, { "key" : "BIB17", "author" : "Lord", "year" : "1969", "unstructured" : "1969 (May-June). Thoughts on the apple harvest problem. Fruit Notes. U. S. Department of Agriculture, Massachusetts Extension Service." }, { "key" : "BIB18", "author" : "Manning", "first-page" : "34", "year" : "1989", "unstructured" : "Sales Agent for J. P. Sullivan and Co., of Ayer, MA, an apple commission house. In 1989. The post-Alar era dawns chilly for apple growers. Boston Globe Oct. 25 p. 34.", "volume-title" : "The post-Alar era dawns chilly for apple growers" }, { "key" : "BIB19", "author" : "National Cancer Institute", "year" : "1978", "unstructured" : "National Cancer Institute. 1978. Bioassay of daminozide for possible carcinogenicity. Washington, D.C., United State Department of Health, Education and Welfare, Public Health Service (NIC Carcinogenesis Technical Report Series No. 83; DHEW Publication No (NIH 78-1333)." }, { "key" : "BIB20", "author" : "Rogers", "volume" : "89", "first-page" : "321", "year" : "1981", "journal-title" : "Mutat Res", "DOI" : "10.1016/0165-1218(81)90113-0", "doi-asserted-by" : "crossref" }, { "key" : "BIB21", "author" : "Rosenberg", "year" : "1995", "unstructured" : "1995. The best laid bans: the impact of pesticide bans on workers. Doctoral thesis, Department of Work Environment, University of Massachusetts Lowell." }, { "key" : "BIB22", "author" : "Rosenberg", "volume" : "6", "first-page" : "34", "year" : "1996", "journal-title" : "New Solutions: A Journal of Environmental and Occupational Health Policy", "DOI" : "10.2190/NS6.2.d", "doi-asserted-by" : "crossref" }, { "key" : "BIB23", "author" : "Rosenberg", "volume" : "8", "first-page" : "365", "year" : "1998", "journal-title" : "New Solutions Environmental Health Policy", "DOI" : "10.2190/A2A1-CT1X-RY6D-RR3M", "doi-asserted-by" : "crossref" }, { "key" : "BIB24", "author" : "Saunders", "volume" : "29", "first-page" : "409", "year" : "1987", "journal-title" : "J Occup Environ Med" }, { "key" : "BIB25", "author" : "Toth", "volume" : "50", "first-page" : "181", "year" : "1973", "journal-title" : "J Natl Cancer Inst", "DOI" : "10.1093/jnci/50.1.181", "doi-asserted-by" : "crossref" }, { "key" : "BIB26", "author" : "Toth", "volume" : "40", "first-page" : "2427", "year" : "1977a", "journal-title" : "Cancer", "DOI" : "10.1002/1097-0142(197711)40:5+<2427::AID-CNCR2820400906>3.0.CO;2-Y", "doi-asserted-by" : "crossref" }, { "key" : "BIB27", "author" : "Toth", "volume" : "37", "first-page" : "3497", "year" : "1977b", "journal-title" : "Cancer Res" }, { "key" : "BIB28", "author" : "U.S. Environmental Protection Agency", "year" : "1986", "unstructured" : "U.S. Environmental Protection Agency. 1986. Integrated Risk Information System (IRIS). Oxamyl. December 9." }, { "key" : "BIB29", "author" : "U.S. Environmental Protection Agency", "year" : "1986", "unstructured" : "U.S. Environmental Protection Agency. 1986. Chemical Fact Sheet Number 26: Daminozide. Office of Pesticides and Toxic Substances, Washington, DC. 10-169." }, { "key" : "BIB30", "author" : "U.S. Environmental Protection Agency", "year" : "1989", "unstructured" : "U.S. Environmental Protection Agency, Office of Pesticide Programs, Office of Pesticides and Toxic Substances. 1989. Daminozide special review technical support document: Preliminary determination to cancel the food uses of Daminozide. Washington, DC: May." }, { "key" : "BIB31", "author" : "U.S. Environmental Protection Agency", "volume" : "54", "first-page" : "10", "year" : "1989", "journal-title" : "Fed Regist." }, { "key" : "BIB32", "author" : "U.S. Environmental Protection Agency", "year" : "1990", "unstructured" : "U.S. Environmental Protection Agency. 1990. Integrated Risk Information System (IRIS). Propargite. May 1." }, { "key" : "BIB33", "author" : "U.S. Environmental Protection Agency", "volume" : "57", "first-page" : "10", "year" : "1992", "journal-title" : "Fed. Regist." }, { "key" : "BIB34", "author" : "U.S. Environmental Protection Agency", "year" : "1993", "unstructured" : "U.S. Environmental Protection Agency, Office of Prevention, Pesticides and Toxic Substances. 1993. R.E.D. Facts, Document number EPA-738-F-93-007. September." }, { "key" : "BIB35", "author" : "U.S. Department of Agriculture", "year" : "1993", "journal-title" : "New England Agricultural Statistics" }, { "key" : "BIB36", "author" : "Warren", "year" : "1992", "unstructured" : "1992. Unanticipated consequences of banning a chemical: the case of Alar. Unpublished manuscript, Department of Work Environment, University of Massachusetts Lowell." }, { "key" : "BIB37", "author" : "Wood", "year" : "1990", "unstructured" : "1990. Memo to Poverty Lane, West Lebanon, New Hampshire, to members of the Risk Assessment/Risk Management Work Group, Keystone National Policy Dialogue on Food Safety, Oct. 26, 1990, cited in Rosenberg, B. 1996." } ], "container-title" : [ "American Journal of Industrial Medicine" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2F1097-0274(200102)39:2%3C218::AID-AJIM1009%3E3.0.CO;2-4", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 8, 4 ] ], "date-time" : "2017-08-04T20:22:16Z", "timestamp" : { "$numberLong" : "1501878136000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2001 ] ] }, "references-count" : 37, "URL" : "http://dx.doi.org/10.1002/1097-0274(200102)39:2<218::aid-ajim1009>3.0.co;2-4", "relation" : { "cites" : [] }, "ISSN" : [ "0271-3586", "1097-0274" ], "issn-type" : [ { "value" : "0271-3586", "type" : "print" }, { "value" : "1097-0274", "type" : "electronic" } ], "subject" : [ "Public Health, Environmental and Occupational Health" ] } { "_id" : { "$oid" : "5a553b4388a035a45bf39150" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T17:10:15Z", "timestamp" : { "$numberLong" : "1508778615346" } }, "reference-count" : 22, "publisher" : "Elsevier BV", "issue" : "4", "license" : [ { "URL" : "http://www.elsevier.com/tdm/userlicense/1.0/", "start" : { "date-parts" : [ [ 2001, 12, 1 ] ], "date-time" : "2001-12-01T00:00:00Z", "timestamp" : { "$numberLong" : "1007164800000" } }, "delay-in-days" : 0, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "International Journal of Hospitality Management" ], "published-print" : { "date-parts" : [ [ 2001, 12 ] ] }, "DOI" : "10.1016/s0278-4319(01)00020-2", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 7, 25 ] ], "date-time" : "2002-07-25T14:28:16Z", "timestamp" : { "$numberLong" : "1027607296000" } }, "page" : "325-338", "source" : "Crossref", "is-referenced-by-count" : 14, "title" : [ "Hotel management style: a study of employee perceptions and preferences" ], "prefix" : "10.1016", "volume" : "20", "author" : [ { "given" : "Margaret", "family" : "Deery", "affiliation" : [] }, { "given" : "Leo K", "family" : "Jago", "affiliation" : [] } ], "member" : "78", "container-title" : [ "International Journal of Hospitality Management" ], "link" : [ { "URL" : "http://api.elsevier.com/content/article/PII:S0278431901000202?httpAccept=text/xml", "content-type" : "text/xml", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://api.elsevier.com/content/article/PII:S0278431901000202?httpAccept=text/plain", "content-type" : "text/plain", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 6, 14 ] ], "date-time" : "2017-06-14T21:24:09Z", "timestamp" : { "$numberLong" : "1497475449000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2001, 12 ] ] }, "references-count" : 22, "alternative-id" : [ "S0278431901000202" ], "URL" : "http://dx.doi.org/10.1016/s0278-4319(01)00020-2", "ISSN" : [ "0278-4319" ], "issn-type" : [ { "value" : "0278-4319", "type" : "print" } ], "subject" : [ "Tourism, Leisure and Hospitality Management", "Strategy and Management" ] } +{ "_id" : { "$oid" : "5a55176088a035a45bd8802c" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:31:47Z", "timestamp" : { "$numberLong" : "1508769107897" } }, "reference-count" : 1, "publisher" : "Hindawi Limited", "issue" : "2", "license" : [ { "URL" : "http://creativecommons.org/licenses/by/3.0/", "start" : { "date-parts" : [ [ 2002, 1, 1 ] ], "date-time" : "2002-01-01T00:00:00Z", "timestamp" : { "$numberLong" : "1009843200000" } }, "delay-in-days" : 0, "content-version" : "vor" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Comparative and Functional Genomics" ], "published-print" : { "date-parts" : [ [ 2002 ] ] }, "abstract" : "<jats:p>This brief meeting review summarizes the recommendations of NSF and NPGI funded bioinformaticians concerning the future requirements for plant bioinformatics systems and databases.</jats:p>", "DOI" : "10.1002/cfg.158", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T23:45:33Z", "timestamp" : { "$numberLong" : "1030319133000" } }, "page" : "176-176", "source" : "Crossref", "is-referenced-by-count" : 4, "title" : [ "Meeting Review: Plant Bioinformatics at the NSF and NPGI (PAMGX Satellite) Meetings" ], "prefix" : "10.1155", "volume" : "3", "author" : [ { "ORCID" : "http://orcid.org/0000-0002-4447-597X", "authenticated-orcid" : true, "given" : "Richard", "family" : "Bruskiewich", "affiliation" : [ { "name" : "International Rice Research Institute (IRRI), Metro Manila DAPO 7777, Philippines" } ] } ], "member" : "98", "reference" : [ { "key" : "10.1002/cfg.158-BIB1", "author" : "Brazma", "volume" : "29", "first-page" : "365", "year" : "2001", "journal-title" : "Nature Genetics", "DOI" : "10.1038/ng1201-365", "doi-asserted-by" : "crossref" } ], "container-title" : [ "Comparative and Functional Genomics" ], "link" : [ { "URL" : "http://downloads.hindawi.com/journals/ijg/2002/250628.pdf", "content-type" : "application/pdf", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://downloads.hindawi.com/journals/ijg/2002/250628.pdf", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "similarity-checking" } ], "deposited" : { "date-parts" : [ [ 2017, 8, 5 ] ], "date-time" : "2017-08-05T06:32:00Z", "timestamp" : { "$numberLong" : "1501914720000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2002 ] ] }, "references-count" : 1, "alternative-id" : [ "250628" ], "URL" : "http://dx.doi.org/10.1002/cfg.158", "relation" : { "cites" : [] }, "ISSN" : [ "1531-6912", "1532-6268" ], "issn-type" : [ { "value" : "1531-6912", "type" : "print" }, { "value" : "1532-6268", "type" : "electronic" } ], "subject" : [ "Biotechnology", "Genetics", "Molecular Biology" ] } { "_id" : { "$oid" : "5a55176088a035a45bd8802c" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:31:47Z", "timestamp" : { "$numberLong" : "1508769107897" } }, "reference-count" : 1, "publisher" : "Hindawi Limited", "issue" : "2", "license" : [ { "URL" : "http://creativecommons.org/licenses/by/3.0/", "start" : { "date-parts" : [ [ 2002, 1, 1 ] ], "date-time" : "2002-01-01T00:00:00Z", "timestamp" : { "$numberLong" : "1009843200000" } }, "delay-in-days" : 0, "content-version" : "vor" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Comparative and Functional Genomics" ], "published-print" : { "date-parts" : [ [ 2002 ] ] }, "abstract" : "<jats:p>This brief meeting review summarizes the recommendations of NSF and NPGI funded bioinformaticians concerning the future requirements for plant bioinformatics systems and databases.</jats:p>", "DOI" : "10.1002/cfg.158", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T23:45:33Z", "timestamp" : { "$numberLong" : "1030319133000" } }, "page" : "176-176", "source" : "Crossref", "is-referenced-by-count" : 4, "title" : [ "Meeting Review: Plant Bioinformatics at the NSF and NPGI (PAMGX Satellite) Meetings" ], "prefix" : "10.1155", "volume" : "3", "author" : [ { "ORCID" : "http://orcid.org/0000-0002-4447-5978", "authenticated-orcid" : true, "given" : "Richard", "family" : "Bruskiewich", "affiliation" : [ { "name" : "International Rice Research Institute (IRRI), Metro Manila DAPO 7777, Philippines" } ] } ], "member" : "98", "reference" : [ { "key" : "10.1002/cfg.158-BIB1", "author" : "Brazma", "volume" : "29", "first-page" : "365", "year" : "2001", "journal-title" : "Nature Genetics", "DOI" : "10.1038/ng1201-365", "doi-asserted-by" : "crossref" } ], "container-title" : [ "Comparative and Functional Genomics" ], "link" : [ { "URL" : "http://downloads.hindawi.com/journals/ijg/2002/250628.pdf", "content-type" : "application/pdf", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://downloads.hindawi.com/journals/ijg/2002/250628.pdf", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "similarity-checking" } ], "deposited" : { "date-parts" : [ [ 2017, 8, 5 ] ], "date-time" : "2017-08-05T06:32:00Z", "timestamp" : { "$numberLong" : "1501914720000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2002 ] ] }, "references-count" : 1, "alternative-id" : [ "250628" ], "URL" : "http://dx.doi.org/10.1002/cfg.158", "relation" : { "cites" : [] }, "ISSN" : [ "1531-6912", "1532-6268" ], "issn-type" : [ { "value" : "1531-6912", "type" : "print" }, { "value" : "1532-6268", "type" : "electronic" } ], "subject" : [ "Biotechnology", "Genetics", "Molecular Biology" ] } { "_id" : { "$oid" : "5a551fbe88a035a45bdf19fd" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T15:12:12Z", "timestamp" : { "$numberLong" : "1508771532055" } }, "reference-count" : 0, "publisher" : "Springer Nature", "issue" : "11", "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Skeletal Radiol" ], "published-print" : { "date-parts" : [ [ 2001, 11 ] ] }, "DOI" : "10.1007/s002560100423", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 10, 6 ] ], "date-time" : "2002-10-06T13:44:04Z", "timestamp" : { "$numberLong" : "1033911844000" } }, "page" : "643-647", "source" : "Crossref", "is-referenced-by-count" : 2, "title" : [ "Unilateral osteonecrosis in a patient with bilateral os centrale carpi: imaging findings" ], "prefix" : "10.1007", "volume" : "30", "author" : [ { "given" : "F.", "family" : "Abascal", "affiliation" : [] }, { "given" : "L.", "family" : "Cerezal", "affiliation" : [] }, { "given" : "F.", "family" : "del Piñal", "affiliation" : [] }, { "given" : "R.", "family" : "GarcÃa-Valtuille", "affiliation" : [] }, { "given" : "A.", "family" : "GarcÃa-Valtuille", "affiliation" : [] }, { "given" : "A.", "family" : "Canga", "affiliation" : [] }, { "given" : "J.", "family" : "Torcida", "affiliation" : [] } ], "member" : "297", "published-online" : { "date-parts" : [ [ 2001, 9, 14 ] ] }, "container-title" : [ "Skeletal Radiology" ], "link" : [ { "URL" : "http://link.springer.com/content/pdf/10.1007/s002560100423", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "similarity-checking" } ], "deposited" : { "date-parts" : [ [ 2014, 4, 9 ] ], "date-time" : "2014-04-09T05:21:52Z", "timestamp" : { "$numberLong" : "1397020912000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2001, 9, 14 ] ] }, "references-count" : 0, "alternative-id" : [ "423" ], "URL" : "http://dx.doi.org/10.1007/s002560100423", "ISSN" : [ "0364-2348", "1432-2161" ], "issn-type" : [ { "value" : "0364-2348", "type" : "print" }, { "value" : "1432-2161", "type" : "electronic" } ], "subject" : [ "Radiology Nuclear Medicine and imaging" ] } diff --git a/python/tests/importer.py b/python/tests/importer.py index 190acbed..4d49e794 100644 --- a/python/tests/importer.py +++ b/python/tests/importer.py @@ -13,3 +13,23 @@ def test_issnl_mapping_lookup(): assert fi.issn2issnl('9999-0027') == None assert fi.lookup_issnl('9999-9999') == None + +def test_identifiers(): + + with open('tests/files/ISSN-to-ISSN-L.snip.txt', 'r') as issn_file: + fi = FatcatImporter("http://localhost:9411/v0", issn_file) + + assert fi.is_issnl("1234-5678") == True + assert fi.is_issnl("1234-5678.") == False + assert fi.is_issnl("12345678") == False + assert fi.is_issnl("1-2345678") == False + + assert fi.is_doi("10.1234/56789") == True + assert fi.is_doi("101234/56789") == False + assert fi.is_doi("10.1234_56789") == False + + assert fi.is_orcid("0000-0003-3118-6591") == True + assert fi.is_orcid("0000-00x3-3118-659") == False + assert fi.is_orcid("0000-00033118-659") == False + assert fi.is_orcid("0000-0003-3118-659.") == False + diff --git a/python/tests/orcid.py b/python/tests/orcid.py index 00748972..e07583ac 100644 --- a/python/tests/orcid.py +++ b/python/tests/orcid.py @@ -13,6 +13,10 @@ def test_orcid_importer_batch(orcid_importer): with open('tests/files/0000-0001-8254-7103.json', 'r') as f: orcid_importer.process_batch(f) +def test_orcid_importer_badid(orcid_importer): + with open('tests/files/0000-0001-8254-710X.json', 'r') as f: + orcid_importer.process_batch(f) + def test_orcid_importer(orcid_importer): with open('tests/files/0000-0001-8254-7103.json', 'r') as f: orcid_importer.process_source(f) diff --git a/rust/HACKING.md b/rust/HACKING.md new file mode 100644 index 00000000..622a4b5a --- /dev/null +++ b/rust/HACKING.md @@ -0,0 +1,58 @@ + +## Code Structure + +Almost all of the rust code is either auto-generated or "glue" between the +swagger API spec on one side and the SQL schema on the other. + +- `./migrations/*/up.sql`: SQL schema +- `./src/database_schema.rs`: autogenerated per-table Diesel schemas +- `./src/database_models.rs`: hand- and macro-generated Rust structs matching + Diesel schemas, and a small number of row-level helpers +- `./src/database_entity_crud.rs`: "struct-relational-mapping"; trait + implementations of CRUD (create, read, update, delete) actions for each + entity model, hitting the database (and building on `database_model` structs) +- `./src/api_server.rs`: one function for each API endpoint, with rust-style + arguments and return types. mostly calls in to `database_entity_crud`. +- `./src/api_wrappers.rs`: hand- and macro-generated wrapper functions, one per + API endpoint, that map between API request and return types, and + rust-idiomatic request and return types (plus API models). +- `./fatcat-api`: autogenerated API models and endpoint types/signatures +- `../fatcat-openapi2.yaml`: + +When deciding to use this structure, it wasn't expected that either +`api_wrappers.rs` or `database_models.rs` would need to hand-maintained; both +are verbose and implemented in a very mechanical fashion. The return type +mapping in `api_wrappers` might be necessary, but `database_models.rs` in +particular feels unnecessary; other projects have attempted to completely +automate generation of this file, but it doesn't sound reliable. In particular, +both regular "Row" (queriable) and "NewRow" (insertable) structs need to be +defined. + +## Test Structure + +- `./tests/test_api_server.rs`: Iron (web framework) level raw HTTP JSON + request/response tests of API endpoints. + +## Updating Schemas + +Regenerate API schemas after editing the fatcat-openapi2 schema. This will, as +a side-effect, also run `cargo fmt` on the whole project, so don't run it with +your editor open! + + cargo install cargo-swagger # uses docker + ./codegen_openapi2.sh + +Update Rust database schema (after changing raw SQL schema): + + diesel database reset + diesel print-schema > src/database_schema.rs + +Debug SQL schema errors (if diesel commands fail): + + psql fatcat_test < migrations/2018-05-12-001226_init/up.sql + +## Direct API Interaction + +Creating entities via API: + + http --json post localhost:9411/v0/container name=asdf issn=1234-5678 diff --git a/rust/INSTALL.md b/rust/INSTALL.md new file mode 100644 index 00000000..c2b86c51 --- /dev/null +++ b/rust/INSTALL.md @@ -0,0 +1,36 @@ + +Canonical IA production/QA ansible scripts are in the journal-infra repo. These +directions are likely to end up out-of-date. + +## Simple Deployment + +To install manually, on a bare server, as root: + + adduser fatcat + apt install postgresql-9.6 postgresql-contrib postgresql-client-9.6 \ + nginx build-essential git pkg-config libssl-dev libpq-dev \ + htop screen + mkdir -p /srv/fatcat + chown fatcat:fatcat /srv/fatcat + + # setup new postgres user + su - postgres + createuser -P -s fatcat # strong random password + # DELETE: createdb fatcat + + # as fatcat user + su - fatcat + ssh-keygen + curl https://sh.rustup.rs -sSf | sh + source $HOME/.cargo/env + cargo install diesel_cli --no-default-features --features "postgres" + cd /srv/fatcat + git clone git@git.archive.org:webgroup/fatcat + cd rust + cargo build + echo "DATABASE_URL=postgres://fatcat@localhost/fatcat" > .env + diesel database reset + + # as fatcat, in a screen or something + cd /srv/fatcat/fatcat/rust + cargo run diff --git a/rust/README.md b/rust/README.md index a6873345..c061a1f9 100644 --- a/rust/README.md +++ b/rust/README.md @@ -29,87 +29,4 @@ Tests: cargo test -- --test-threads 1 -## Simple Deployment - -Canonical ansible scripts are in the journal-infra repo. To install manually, -on a bare server, as root: - - adduser fatcat - apt install postgresql-9.6 postgresql-contrib postgresql-client-9.6 \ - nginx build-essential git pkg-config libssl-dev libpq-dev \ - htop screen - mkdir -p /srv/fatcat - chown fatcat:fatcat /srv/fatcat - - # setup new postgres user - su - postgres - createuser -P -s fatcat # strong random password - # DELETE: createdb fatcat - - # as fatcat user - su - fatcat - ssh-keygen - curl https://sh.rustup.rs -sSf | sh - source $HOME/.cargo/env - cargo install diesel_cli --no-default-features --features "postgres" - cd /srv/fatcat - git clone git@git.archive.org:webgroup/fatcat - cd rust - cargo build - echo "DATABASE_URL=postgres://fatcat@localhost/fatcat" > .env - diesel database reset - - # as fatcat, in a screen or something - cd /srv/fatcat/fatcat/rust - cargo run - -### Dumps and Backups - -There are a few different databaase dump formats folks might want: - -- raw native database backups, for disaster recovery (would include - volatile/unsupported schema details, user API credentials, full history, - in-process edits, comments, etc) -- a sanitized version of the above: roughly per-table dumps of the full state - of the database. Could use per-table SQL expressions with sub-queries to pull - in small tables ("partial transform") and export JSON for each table; would - be extra work to maintain, so not pursuing for now. -- full history, full public schema exports, in a form that might be used to - mirror or enitrely fork the project. Propose supplying the full "changelog" - in API schema format, in a single file to capture all entity history, without - "hydrating" any inter-entity references. Rely on separate dumps of - non-entity, non-versioned tables (editors, abstracts, etc). Note that a - variant of this could use the public interface, in particular to do - incremental updates (though that wouldn't capture schema changes). -- transformed exports of the current state of the database (aka, without - history). Useful for data analysis, search engines, etc. Propose supplying - just the Release table in a fully "hydrated" state to start. Unclear if - should be on a work or release basis; will go with release for now. Harder to - do using public interface because of the need for transaction locking. - -Backing up the entire database using `pg_dump`, with parallelism 1 (use more on -larger machine with fast disks; try 4 or 8?), assuming the database name is -'fatcat', and the current user has access: - - pg_dump -j1 -Fd -f test-dump fatcat - -### Special Tricks - -Regenerate API schemas (this will, as a side-effect, also run `cargo fmt` on -the whole project, so don't run it with your editor open): - - cargo install cargo-swagger # uses docker - ./codegen_openapi2.sh - -Regenerate SQL schema: - - diesel database reset - diesel print-schema > src/database_schema.rs - -Debugging SQL schema errors: - - psql fatcat_test < migrations/2018-05-12-001226_init/up.sql - -Creating entities via API: - - http --json post localhost:9411/v0/container name=asdf issn=1234-5678 +See `HACKING` for some more advanced tips and commands. @@ -1,12 +1,26 @@ +finish refactor: +- database_entity_crud -> api_entity_crud +x merge autoaccept branch in with http-verbs branch +- direct CRUD calls from api_wrappers (except, maybe, batch?) + => generally, standardize "edit" actions +- FatCatId and edit context between wrappers and handlers +- review editgroup accept code + +verbs: +- enforce "previous_rev" required in updates + +- fatcat_api -> fatcat_api_schema (or spec? models? types?) +- fatcat -> fatcat-api-server +- editgroup param to update + => also for creation? for consistency - editor_id vs. editor username; return editor_id (in addition to name?) later: +- have editgroup_id be a request-level param everywhere (not entity-level; for batch) - editgroup: state to track review status? - re-implement old python tests - enforce "no editing if editgroup accepted" behavior -- refactor entity creation/editing (DRY) -- refactor to allow Result<> in all handlers - real auth - metrics, jwt, config, sentry - ansible/deployment/DNS story @@ -18,5 +32,4 @@ schema/api questions: - "types" - define release field stuff - what should entity POST return? include both the entity and the edit? -- PUTs (updates) to entities diff --git a/rust/fatcat-api/README.md b/rust/fatcat-api/README.md index af4e6a5b..c971b88c 100644 --- a/rust/fatcat-api/README.md +++ b/rust/fatcat-api/README.md @@ -13,7 +13,7 @@ To see how to make this your own, look here: [README](https://github.com/swagger-api/swagger-codegen/blob/master/README.md) - API version: 0.1.0 -- Build date: 2018-08-15T00:04:03.771Z +- Build date: 2018-09-08T04:52:59.479Z This autogenerated project defines an API crate `fatcat` which contains: * An `Api` trait defining the API in Rust. @@ -68,6 +68,11 @@ cargo run --example client CreateRelease cargo run --example client CreateReleaseBatch cargo run --example client CreateWork cargo run --example client CreateWorkBatch +cargo run --example client DeleteContainer +cargo run --example client DeleteCreator +cargo run --example client DeleteFile +cargo run --example client DeleteRelease +cargo run --example client DeleteWork cargo run --example client GetChangelog cargo run --example client GetChangelogEntry cargo run --example client GetContainer @@ -91,6 +96,11 @@ cargo run --example client LookupContainer cargo run --example client LookupCreator cargo run --example client LookupFile cargo run --example client LookupRelease +cargo run --example client UpdateContainer +cargo run --example client UpdateCreator +cargo run --example client UpdateFile +cargo run --example client UpdateRelease +cargo run --example client UpdateWork ``` ### HTTPS diff --git a/rust/fatcat-api/api.yaml b/rust/fatcat-api/api.yaml index 7ec91bad..a8919216 100644 --- a/rust/fatcat-api/api.yaml +++ b/rust/fatcat-api/api.yaml @@ -37,7 +37,7 @@ x-issn: &FATCATISSN x-orcid: &FATCATORCID type: string example: "0000-0002-1825-0097" - pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{4}" + pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]" minLength: 19 maxLength: 19 @@ -57,6 +57,10 @@ x-entity-props: &ENTITYPROPS extra: type: object additionalProperties: {} +# TODO: +# edit_extra: +# type: object +# additionalProperties: {} definitions: error_response: @@ -438,6 +442,16 @@ paths: post: operationId: "create_container_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -459,19 +473,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_container" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/container_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_container" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/container_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_container" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /container/{id}/history: parameters: - name: id @@ -526,6 +568,16 @@ paths: post: operationId: "create_creator_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -547,19 +599,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_creator" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/creator_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_creator" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/creator_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_creator" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /creator/{id}/history: parameters: - name: id @@ -630,6 +710,16 @@ paths: post: operationId: "create_file_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -651,19 +741,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_file" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/file_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_file" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/file_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_file" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /file/{id}/history: parameters: - name: id @@ -718,6 +836,16 @@ paths: post: operationId: "create_release_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -739,19 +867,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_release" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/release_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_release" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/release_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_release" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /release/{id}/history: parameters: - name: id @@ -822,6 +978,16 @@ paths: post: operationId: "create_work_batch" parameters: + - name: autoaccept + in: query + type: boolean + required: false + description: "If true, and editor is authorized, batch is accepted all at once" + - name: editgroup + in: query + type: string + required: false + description: "Editgroup to auto-accept and apply to all entities (required if 'autoaccept' is True)" - name: entity_list in: body required: true @@ -843,19 +1009,47 @@ paths: in: path type: string required: true - - name: expand - in: query - type: string - required: false - description: "List of sub-entities to expand in response. For now, only 'all' accepted." get: operationId: "get_work" + parameters: + - name: expand + in: query + type: string + required: false + description: "List of sub-entities to expand in response. For now, only 'all' accepted." responses: 200: description: Found Entity schema: $ref: "#/definitions/work_entity" <<: *ENTITYRESPONSES + put: + operationId: "update_work" + parameters: + - name: entity + in: body + required: true + schema: + $ref: "#/definitions/work_entity" + responses: + 200: + description: Updated Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES + delete: + operationId: "delete_work" + parameters: + - name: editgroup + in: query + required: false + type: string + responses: + 200: + description: Deleted Entity + schema: + $ref: "#/definitions/entity_edit" + <<: *ENTITYRESPONSES /work/{id}/history: parameters: - name: id @@ -1001,10 +1195,18 @@ paths: description: Unmergable schema: $ref: "#/definitions/error_response" + 400: + description: Bad Request + schema: + $ref: "#/definitions/error_response" 404: description: Not Found schema: $ref: "#/definitions/error_response" + 409: + description: Edit Conflict + schema: + $ref: "#/definitions/error_response" 500: description: Generic Error schema: diff --git a/rust/fatcat-api/api/swagger.yaml b/rust/fatcat-api/api/swagger.yaml index c572fd29..0b1ca88a 100644 --- a/rust/fatcat-api/api/swagger.yaml +++ b/rust/fatcat-api/api/swagger.yaml @@ -76,6 +76,22 @@ paths: post: operationId: "create_container_batch" parameters: + - name: "autoaccept" + in: "query" + description: "If true, and editor is authorized, batch is accepted all at\ + \ once" + required: false + type: "boolean" + formatString: "{:?}" + example: "Some(true)" + - name: "editgroup" + in: "query" + description: "Editgroup to auto-accept and apply to all entities (required\ + \ if 'autoaccept' is True)" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" - in: "body" name: "entity_list" required: true @@ -192,6 +208,127 @@ paths: path: "/container/:id" HttpMethod: "Get" httpmethod: "get" + put: + operationId: "update_container" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - in: "body" + name: "entity" + required: true + schema: + $ref: "#/definitions/container_entity" + uppercase_data_type: "CONTAINERENTITY" + refName: "container_entity" + formatString: "{:?}" + example: "???" + model_key: "editgroup_edits" + uppercase_operation_id: "UPDATE_CONTAINER" + consumesJson: true + responses: + 200: + description: "Updated Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "UpdatedEntity" + x-uppercaseResponseId: "UPDATED_ENTITY" + uppercase_operation_id: "UPDATE_CONTAINER" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "UPDATE_CONTAINER" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "UPDATE_CONTAINER" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "UPDATE_CONTAINER" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "update_container" + uppercase_operation_id: "UPDATE_CONTAINER" + path: "/container/:id" + HttpMethod: "Put" + httpmethod: "put" + noClientExample: true + delete: + operationId: "delete_container" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - name: "editgroup" + in: "query" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" + responses: + 200: + description: "Deleted Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "DeletedEntity" + x-uppercaseResponseId: "DELETED_ENTITY" + uppercase_operation_id: "DELETE_CONTAINER" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "DELETE_CONTAINER" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "DELETE_CONTAINER" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "DELETE_CONTAINER" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "delete_container" + uppercase_operation_id: "DELETE_CONTAINER" + path: "/container/:id" + HttpMethod: "Delete" + httpmethod: "delete" /container/{id}/history: get: operationId: "get_container_history" @@ -371,6 +508,22 @@ paths: post: operationId: "create_creator_batch" parameters: + - name: "autoaccept" + in: "query" + description: "If true, and editor is authorized, batch is accepted all at\ + \ once" + required: false + type: "boolean" + formatString: "{:?}" + example: "Some(true)" + - name: "editgroup" + in: "query" + description: "Editgroup to auto-accept and apply to all entities (required\ + \ if 'autoaccept' is True)" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" - in: "body" name: "entity_list" required: true @@ -487,6 +640,127 @@ paths: path: "/creator/:id" HttpMethod: "Get" httpmethod: "get" + put: + operationId: "update_creator" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - in: "body" + name: "entity" + required: true + schema: + $ref: "#/definitions/creator_entity" + uppercase_data_type: "CREATORENTITY" + refName: "creator_entity" + formatString: "{:?}" + example: "???" + model_key: "editgroup_edits" + uppercase_operation_id: "UPDATE_CREATOR" + consumesJson: true + responses: + 200: + description: "Updated Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "UpdatedEntity" + x-uppercaseResponseId: "UPDATED_ENTITY" + uppercase_operation_id: "UPDATE_CREATOR" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "UPDATE_CREATOR" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "UPDATE_CREATOR" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "UPDATE_CREATOR" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "update_creator" + uppercase_operation_id: "UPDATE_CREATOR" + path: "/creator/:id" + HttpMethod: "Put" + httpmethod: "put" + noClientExample: true + delete: + operationId: "delete_creator" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - name: "editgroup" + in: "query" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" + responses: + 200: + description: "Deleted Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "DeletedEntity" + x-uppercaseResponseId: "DELETED_ENTITY" + uppercase_operation_id: "DELETE_CREATOR" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "DELETE_CREATOR" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "DELETE_CREATOR" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "DELETE_CREATOR" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "delete_creator" + uppercase_operation_id: "DELETE_CREATOR" + path: "/creator/:id" + HttpMethod: "Delete" + httpmethod: "delete" /creator/{id}/history: get: operationId: "get_creator_history" @@ -612,7 +886,7 @@ paths: type: "string" maxLength: 19 minLength: 19 - pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{4}" + pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]" formatString: "\\\"{}\\\"" example: "\"orcid_example\".to_string()" responses: @@ -720,6 +994,22 @@ paths: post: operationId: "create_file_batch" parameters: + - name: "autoaccept" + in: "query" + description: "If true, and editor is authorized, batch is accepted all at\ + \ once" + required: false + type: "boolean" + formatString: "{:?}" + example: "Some(true)" + - name: "editgroup" + in: "query" + description: "Editgroup to auto-accept and apply to all entities (required\ + \ if 'autoaccept' is True)" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" - in: "body" name: "entity_list" required: true @@ -836,6 +1126,127 @@ paths: path: "/file/:id" HttpMethod: "Get" httpmethod: "get" + put: + operationId: "update_file" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - in: "body" + name: "entity" + required: true + schema: + $ref: "#/definitions/file_entity" + uppercase_data_type: "FILEENTITY" + refName: "file_entity" + formatString: "{:?}" + example: "???" + model_key: "editgroup_edits" + uppercase_operation_id: "UPDATE_FILE" + consumesJson: true + responses: + 200: + description: "Updated Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "UpdatedEntity" + x-uppercaseResponseId: "UPDATED_ENTITY" + uppercase_operation_id: "UPDATE_FILE" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "UPDATE_FILE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "UPDATE_FILE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "UPDATE_FILE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "update_file" + uppercase_operation_id: "UPDATE_FILE" + path: "/file/:id" + HttpMethod: "Put" + httpmethod: "put" + noClientExample: true + delete: + operationId: "delete_file" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - name: "editgroup" + in: "query" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" + responses: + 200: + description: "Deleted Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "DeletedEntity" + x-uppercaseResponseId: "DELETED_ENTITY" + uppercase_operation_id: "DELETE_FILE" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "DELETE_FILE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "DELETE_FILE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "DELETE_FILE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "delete_file" + uppercase_operation_id: "DELETE_FILE" + path: "/file/:id" + HttpMethod: "Delete" + httpmethod: "delete" /file/{id}/history: get: operationId: "get_file_history" @@ -1012,6 +1423,22 @@ paths: post: operationId: "create_release_batch" parameters: + - name: "autoaccept" + in: "query" + description: "If true, and editor is authorized, batch is accepted all at\ + \ once" + required: false + type: "boolean" + formatString: "{:?}" + example: "Some(true)" + - name: "editgroup" + in: "query" + description: "Editgroup to auto-accept and apply to all entities (required\ + \ if 'autoaccept' is True)" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" - in: "body" name: "entity_list" required: true @@ -1128,6 +1555,127 @@ paths: path: "/release/:id" HttpMethod: "Get" httpmethod: "get" + put: + operationId: "update_release" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - in: "body" + name: "entity" + required: true + schema: + $ref: "#/definitions/release_entity" + uppercase_data_type: "RELEASEENTITY" + refName: "release_entity" + formatString: "{:?}" + example: "???" + model_key: "editgroup_edits" + uppercase_operation_id: "UPDATE_RELEASE" + consumesJson: true + responses: + 200: + description: "Updated Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "UpdatedEntity" + x-uppercaseResponseId: "UPDATED_ENTITY" + uppercase_operation_id: "UPDATE_RELEASE" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "UPDATE_RELEASE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "UPDATE_RELEASE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "UPDATE_RELEASE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "update_release" + uppercase_operation_id: "UPDATE_RELEASE" + path: "/release/:id" + HttpMethod: "Put" + httpmethod: "put" + noClientExample: true + delete: + operationId: "delete_release" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - name: "editgroup" + in: "query" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" + responses: + 200: + description: "Deleted Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "DeletedEntity" + x-uppercaseResponseId: "DELETED_ENTITY" + uppercase_operation_id: "DELETE_RELEASE" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "DELETE_RELEASE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "DELETE_RELEASE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "DELETE_RELEASE" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "delete_release" + uppercase_operation_id: "DELETE_RELEASE" + path: "/release/:id" + HttpMethod: "Delete" + httpmethod: "delete" /release/{id}/history: get: operationId: "get_release_history" @@ -1358,6 +1906,22 @@ paths: post: operationId: "create_work_batch" parameters: + - name: "autoaccept" + in: "query" + description: "If true, and editor is authorized, batch is accepted all at\ + \ once" + required: false + type: "boolean" + formatString: "{:?}" + example: "Some(true)" + - name: "editgroup" + in: "query" + description: "Editgroup to auto-accept and apply to all entities (required\ + \ if 'autoaccept' is True)" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" - in: "body" name: "entity_list" required: true @@ -1474,6 +2038,127 @@ paths: path: "/work/:id" HttpMethod: "Get" httpmethod: "get" + put: + operationId: "update_work" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - in: "body" + name: "entity" + required: true + schema: + $ref: "#/definitions/work_entity" + uppercase_data_type: "WORKENTITY" + refName: "work_entity" + formatString: "{:?}" + example: "???" + model_key: "editgroup_edits" + uppercase_operation_id: "UPDATE_WORK" + consumesJson: true + responses: + 200: + description: "Updated Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "UpdatedEntity" + x-uppercaseResponseId: "UPDATED_ENTITY" + uppercase_operation_id: "UPDATE_WORK" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "UPDATE_WORK" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "UPDATE_WORK" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "UPDATE_WORK" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "update_work" + uppercase_operation_id: "UPDATE_WORK" + path: "/work/:id" + HttpMethod: "Put" + httpmethod: "put" + noClientExample: true + delete: + operationId: "delete_work" + parameters: + - name: "id" + in: "path" + required: true + type: "string" + formatString: "\\\"{}\\\"" + example: "\"id_example\".to_string()" + - name: "editgroup" + in: "query" + required: false + type: "string" + formatString: "{:?}" + example: "Some(\"editgroup_example\".to_string())" + responses: + 200: + description: "Deleted Entity" + schema: + $ref: "#/definitions/entity_edit" + x-responseId: "DeletedEntity" + x-uppercaseResponseId: "DELETED_ENTITY" + uppercase_operation_id: "DELETE_WORK" + uppercase_data_type: "ENTITYEDIT" + producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "DELETE_WORK" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 404: + description: "Not Found" + schema: + $ref: "#/definitions/error_response" + x-responseId: "NotFound" + x-uppercaseResponseId: "NOT_FOUND" + uppercase_operation_id: "DELETE_WORK" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + 500: + description: "Generic Error" + schema: + $ref: "#/definitions/error_response" + x-responseId: "GenericError" + x-uppercaseResponseId: "GENERIC_ERROR" + uppercase_operation_id: "DELETE_WORK" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true + operation_id: "delete_work" + uppercase_operation_id: "DELETE_WORK" + path: "/work/:id" + HttpMethod: "Delete" + httpmethod: "delete" /work/{id}/history: get: operationId: "get_work_history" @@ -1808,11 +2493,11 @@ paths: uppercase_data_type: "SUCCESS" producesJson: true 400: - description: "Unmergable" + description: "Bad Request" schema: $ref: "#/definitions/error_response" - x-responseId: "Unmergable" - x-uppercaseResponseId: "UNMERGABLE" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" uppercase_operation_id: "ACCEPT_EDITGROUP" uppercase_data_type: "ERRORRESPONSE" producesJson: true @@ -1825,6 +2510,15 @@ paths: uppercase_operation_id: "ACCEPT_EDITGROUP" uppercase_data_type: "ERRORRESPONSE" producesJson: true + 409: + description: "Edit Conflict" + schema: + $ref: "#/definitions/error_response" + x-responseId: "EditConflict" + x-uppercaseResponseId: "EDIT_CONFLICT" + uppercase_operation_id: "ACCEPT_EDITGROUP" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true 500: description: "Generic Error" schema: @@ -2061,7 +2755,7 @@ definitions: example: "0000-0002-1825-0097" minLength: 19 maxLength: 19 - pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{4}" + pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]" surname: type: "string" given_name: @@ -3180,7 +3874,7 @@ x-issn: x-orcid: type: "string" example: "0000-0002-1825-0097" - pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{4}" + pattern: "\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]" minLength: 19 maxLength: 19 x-entity-props: diff --git a/rust/fatcat-api/examples/client.rs b/rust/fatcat-api/examples/client.rs index f98518cc..cc94af11 100644 --- a/rust/fatcat-api/examples/client.rs +++ b/rust/fatcat-api/examples/client.rs @@ -13,10 +13,11 @@ use clap::{App, Arg}; #[allow(unused_imports)] use fatcat::{ AcceptEditgroupResponse, ApiError, ApiNoContext, ContextWrapperExt, CreateContainerBatchResponse, CreateContainerResponse, CreateCreatorBatchResponse, CreateCreatorResponse, - CreateEditgroupResponse, CreateFileBatchResponse, CreateFileResponse, CreateReleaseBatchResponse, CreateReleaseResponse, CreateWorkBatchResponse, CreateWorkResponse, GetChangelogEntryResponse, - GetChangelogResponse, GetContainerHistoryResponse, GetContainerResponse, GetCreatorHistoryResponse, GetCreatorReleasesResponse, GetCreatorResponse, GetEditgroupResponse, - GetEditorChangelogResponse, GetEditorResponse, GetFileHistoryResponse, GetFileResponse, GetReleaseFilesResponse, GetReleaseHistoryResponse, GetReleaseResponse, GetStatsResponse, - GetWorkHistoryResponse, GetWorkReleasesResponse, GetWorkResponse, LookupContainerResponse, LookupCreatorResponse, LookupFileResponse, LookupReleaseResponse, + CreateEditgroupResponse, CreateFileBatchResponse, CreateFileResponse, CreateReleaseBatchResponse, CreateReleaseResponse, CreateWorkBatchResponse, CreateWorkResponse, DeleteContainerResponse, + DeleteCreatorResponse, DeleteFileResponse, DeleteReleaseResponse, DeleteWorkResponse, GetChangelogEntryResponse, GetChangelogResponse, GetContainerHistoryResponse, GetContainerResponse, + GetCreatorHistoryResponse, GetCreatorReleasesResponse, GetCreatorResponse, GetEditgroupResponse, GetEditorChangelogResponse, GetEditorResponse, GetFileHistoryResponse, GetFileResponse, + GetReleaseFilesResponse, GetReleaseHistoryResponse, GetReleaseResponse, GetStatsResponse, GetWorkHistoryResponse, GetWorkReleasesResponse, GetWorkResponse, LookupContainerResponse, + LookupCreatorResponse, LookupFileResponse, LookupReleaseResponse, UpdateContainerResponse, UpdateCreatorResponse, UpdateFileResponse, UpdateReleaseResponse, UpdateWorkResponse, }; #[allow(unused_imports)] use futures::{future, stream, Future, Stream}; @@ -33,6 +34,11 @@ fn main() { "CreateFileBatch", "CreateReleaseBatch", "CreateWorkBatch", + "DeleteContainer", + "DeleteCreator", + "DeleteFile", + "DeleteRelease", + "DeleteWork", "GetChangelog", "GetChangelogEntry", "GetContainer", @@ -95,7 +101,7 @@ fn main() { // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); // }, Some("CreateContainerBatch") => { - let result = client.create_container_batch(&Vec::new()).wait(); + let result = client.create_container_batch(&Vec::new(), Some(true), Some("editgroup_example".to_string())).wait(); println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); } @@ -105,7 +111,7 @@ fn main() { // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); // }, Some("CreateCreatorBatch") => { - let result = client.create_creator_batch(&Vec::new()).wait(); + let result = client.create_creator_batch(&Vec::new(), Some(true), Some("editgroup_example".to_string())).wait(); println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); } @@ -121,7 +127,7 @@ fn main() { // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); // }, Some("CreateFileBatch") => { - let result = client.create_file_batch(&Vec::new()).wait(); + let result = client.create_file_batch(&Vec::new(), Some(true), Some("editgroup_example".to_string())).wait(); println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); } @@ -131,7 +137,7 @@ fn main() { // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); // }, Some("CreateReleaseBatch") => { - let result = client.create_release_batch(&Vec::new()).wait(); + let result = client.create_release_batch(&Vec::new(), Some(true), Some("editgroup_example".to_string())).wait(); println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); } @@ -141,7 +147,32 @@ fn main() { // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); // }, Some("CreateWorkBatch") => { - let result = client.create_work_batch(&Vec::new()).wait(); + let result = client.create_work_batch(&Vec::new(), Some(true), Some("editgroup_example".to_string())).wait(); + println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + } + + Some("DeleteContainer") => { + let result = client.delete_container("id_example".to_string(), Some("editgroup_example".to_string())).wait(); + println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + } + + Some("DeleteCreator") => { + let result = client.delete_creator("id_example".to_string(), Some("editgroup_example".to_string())).wait(); + println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + } + + Some("DeleteFile") => { + let result = client.delete_file("id_example".to_string(), Some("editgroup_example".to_string())).wait(); + println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + } + + Some("DeleteRelease") => { + let result = client.delete_release("id_example".to_string(), Some("editgroup_example".to_string())).wait(); + println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + } + + Some("DeleteWork") => { + let result = client.delete_work("id_example".to_string(), Some("editgroup_example".to_string())).wait(); println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); } @@ -260,6 +291,35 @@ fn main() { println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); } + // Disabled because there's no example. + // Some("UpdateContainer") => { + // let result = client.update_container("id_example".to_string(), ???).wait(); + // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + // }, + + // Disabled because there's no example. + // Some("UpdateCreator") => { + // let result = client.update_creator("id_example".to_string(), ???).wait(); + // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + // }, + + // Disabled because there's no example. + // Some("UpdateFile") => { + // let result = client.update_file("id_example".to_string(), ???).wait(); + // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + // }, + + // Disabled because there's no example. + // Some("UpdateRelease") => { + // let result = client.update_release("id_example".to_string(), ???).wait(); + // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + // }, + + // Disabled because there's no example. + // Some("UpdateWork") => { + // let result = client.update_work("id_example".to_string(), ???).wait(); + // println!("{:?} (X-Span-ID: {:?})", result, client.context().x_span_id.clone().unwrap_or(String::from("<none>"))); + // }, _ => panic!("Invalid operation provided"), } } diff --git a/rust/fatcat-api/examples/server_lib/server.rs b/rust/fatcat-api/examples/server_lib/server.rs index 572025f0..ab08f594 100644 --- a/rust/fatcat-api/examples/server_lib/server.rs +++ b/rust/fatcat-api/examples/server_lib/server.rs @@ -12,10 +12,11 @@ use swagger; use fatcat::models; use fatcat::{ AcceptEditgroupResponse, Api, ApiError, Context, CreateContainerBatchResponse, CreateContainerResponse, CreateCreatorBatchResponse, CreateCreatorResponse, CreateEditgroupResponse, - CreateFileBatchResponse, CreateFileResponse, CreateReleaseBatchResponse, CreateReleaseResponse, CreateWorkBatchResponse, CreateWorkResponse, GetChangelogEntryResponse, GetChangelogResponse, - GetContainerHistoryResponse, GetContainerResponse, GetCreatorHistoryResponse, GetCreatorReleasesResponse, GetCreatorResponse, GetEditgroupResponse, GetEditorChangelogResponse, GetEditorResponse, - GetFileHistoryResponse, GetFileResponse, GetReleaseFilesResponse, GetReleaseHistoryResponse, GetReleaseResponse, GetStatsResponse, GetWorkHistoryResponse, GetWorkReleasesResponse, - GetWorkResponse, LookupContainerResponse, LookupCreatorResponse, LookupFileResponse, LookupReleaseResponse, + CreateFileBatchResponse, CreateFileResponse, CreateReleaseBatchResponse, CreateReleaseResponse, CreateWorkBatchResponse, CreateWorkResponse, DeleteContainerResponse, DeleteCreatorResponse, + DeleteFileResponse, DeleteReleaseResponse, DeleteWorkResponse, GetChangelogEntryResponse, GetChangelogResponse, GetContainerHistoryResponse, GetContainerResponse, GetCreatorHistoryResponse, + GetCreatorReleasesResponse, GetCreatorResponse, GetEditgroupResponse, GetEditorChangelogResponse, GetEditorResponse, GetFileHistoryResponse, GetFileResponse, GetReleaseFilesResponse, + GetReleaseHistoryResponse, GetReleaseResponse, GetStatsResponse, GetWorkHistoryResponse, GetWorkReleasesResponse, GetWorkResponse, LookupContainerResponse, LookupCreatorResponse, + LookupFileResponse, LookupReleaseResponse, UpdateContainerResponse, UpdateCreatorResponse, UpdateFileResponse, UpdateReleaseResponse, UpdateWorkResponse, }; #[derive(Copy, Clone)] @@ -34,11 +35,19 @@ impl Api for Server { Box::new(futures::failed("Generic failure".into())) } - fn create_container_batch(&self, entity_list: &Vec<models::ContainerEntity>, context: &Context) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> { + fn create_container_batch( + &self, + entity_list: &Vec<models::ContainerEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> { let context = context.clone(); println!( - "create_container_batch({:?}) - X-Span-ID: {:?}", + "create_container_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}", entity_list, + autoaccept, + editgroup, context.x_span_id.unwrap_or(String::from("<none>")).clone() ); Box::new(futures::failed("Generic failure".into())) @@ -50,9 +59,21 @@ impl Api for Server { Box::new(futures::failed("Generic failure".into())) } - fn create_creator_batch(&self, entity_list: &Vec<models::CreatorEntity>, context: &Context) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> { + fn create_creator_batch( + &self, + entity_list: &Vec<models::CreatorEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> { let context = context.clone(); - println!("create_creator_batch({:?}) - X-Span-ID: {:?}", entity_list, context.x_span_id.unwrap_or(String::from("<none>")).clone()); + println!( + "create_creator_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}", + entity_list, + autoaccept, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); Box::new(futures::failed("Generic failure".into())) } @@ -68,9 +89,21 @@ impl Api for Server { Box::new(futures::failed("Generic failure".into())) } - fn create_file_batch(&self, entity_list: &Vec<models::FileEntity>, context: &Context) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> { + fn create_file_batch( + &self, + entity_list: &Vec<models::FileEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> { let context = context.clone(); - println!("create_file_batch({:?}) - X-Span-ID: {:?}", entity_list, context.x_span_id.unwrap_or(String::from("<none>")).clone()); + println!( + "create_file_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}", + entity_list, + autoaccept, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); Box::new(futures::failed("Generic failure".into())) } @@ -80,9 +113,21 @@ impl Api for Server { Box::new(futures::failed("Generic failure".into())) } - fn create_release_batch(&self, entity_list: &Vec<models::ReleaseEntity>, context: &Context) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> { + fn create_release_batch( + &self, + entity_list: &Vec<models::ReleaseEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> { let context = context.clone(); - println!("create_release_batch({:?}) - X-Span-ID: {:?}", entity_list, context.x_span_id.unwrap_or(String::from("<none>")).clone()); + println!( + "create_release_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}", + entity_list, + autoaccept, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); Box::new(futures::failed("Generic failure".into())) } @@ -92,9 +137,76 @@ impl Api for Server { Box::new(futures::failed("Generic failure".into())) } - fn create_work_batch(&self, entity_list: &Vec<models::WorkEntity>, context: &Context) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> { + fn create_work_batch( + &self, + entity_list: &Vec<models::WorkEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!( + "create_work_batch({:?}, {:?}, {:?}) - X-Span-ID: {:?}", + entity_list, + autoaccept, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); + Box::new(futures::failed("Generic failure".into())) + } + + fn delete_container(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteContainerResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!( + "delete_container(\"{}\", {:?}) - X-Span-ID: {:?}", + id, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); + Box::new(futures::failed("Generic failure".into())) + } + + fn delete_creator(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteCreatorResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!( + "delete_creator(\"{}\", {:?}) - X-Span-ID: {:?}", + id, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); + Box::new(futures::failed("Generic failure".into())) + } + + fn delete_file(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteFileResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!( + "delete_file(\"{}\", {:?}) - X-Span-ID: {:?}", + id, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); + Box::new(futures::failed("Generic failure".into())) + } + + fn delete_release(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteReleaseResponse, Error = ApiError> + Send> { let context = context.clone(); - println!("create_work_batch({:?}) - X-Span-ID: {:?}", entity_list, context.x_span_id.unwrap_or(String::from("<none>")).clone()); + println!( + "delete_release(\"{}\", {:?}) - X-Span-ID: {:?}", + id, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); + Box::new(futures::failed("Generic failure".into())) + } + + fn delete_work(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteWorkResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!( + "delete_work(\"{}\", {:?}) - X-Span-ID: {:?}", + id, + editgroup, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); Box::new(futures::failed("Generic failure".into())) } @@ -260,4 +372,49 @@ impl Api for Server { println!("lookup_release(\"{}\") - X-Span-ID: {:?}", doi, context.x_span_id.unwrap_or(String::from("<none>")).clone()); Box::new(futures::failed("Generic failure".into())) } + + fn update_container(&self, id: String, entity: models::ContainerEntity, context: &Context) -> Box<Future<Item = UpdateContainerResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!( + "update_container(\"{}\", {:?}) - X-Span-ID: {:?}", + id, + entity, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); + Box::new(futures::failed("Generic failure".into())) + } + + fn update_creator(&self, id: String, entity: models::CreatorEntity, context: &Context) -> Box<Future<Item = UpdateCreatorResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!( + "update_creator(\"{}\", {:?}) - X-Span-ID: {:?}", + id, + entity, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); + Box::new(futures::failed("Generic failure".into())) + } + + fn update_file(&self, id: String, entity: models::FileEntity, context: &Context) -> Box<Future<Item = UpdateFileResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!("update_file(\"{}\", {:?}) - X-Span-ID: {:?}", id, entity, context.x_span_id.unwrap_or(String::from("<none>")).clone()); + Box::new(futures::failed("Generic failure".into())) + } + + fn update_release(&self, id: String, entity: models::ReleaseEntity, context: &Context) -> Box<Future<Item = UpdateReleaseResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!( + "update_release(\"{}\", {:?}) - X-Span-ID: {:?}", + id, + entity, + context.x_span_id.unwrap_or(String::from("<none>")).clone() + ); + Box::new(futures::failed("Generic failure".into())) + } + + fn update_work(&self, id: String, entity: models::WorkEntity, context: &Context) -> Box<Future<Item = UpdateWorkResponse, Error = ApiError> + Send> { + let context = context.clone(); + println!("update_work(\"{}\", {:?}) - X-Span-ID: {:?}", id, entity, context.x_span_id.unwrap_or(String::from("<none>")).clone()); + Box::new(futures::failed("Generic failure".into())) + } } diff --git a/rust/fatcat-api/src/client.rs b/rust/fatcat-api/src/client.rs index bc1992de..6f61f773 100644 --- a/rust/fatcat-api/src/client.rs +++ b/rust/fatcat-api/src/client.rs @@ -36,10 +36,11 @@ use swagger::{ApiError, Context, XSpanId}; use models; use { AcceptEditgroupResponse, Api, CreateContainerBatchResponse, CreateContainerResponse, CreateCreatorBatchResponse, CreateCreatorResponse, CreateEditgroupResponse, CreateFileBatchResponse, - CreateFileResponse, CreateReleaseBatchResponse, CreateReleaseResponse, CreateWorkBatchResponse, CreateWorkResponse, GetChangelogEntryResponse, GetChangelogResponse, GetContainerHistoryResponse, - GetContainerResponse, GetCreatorHistoryResponse, GetCreatorReleasesResponse, GetCreatorResponse, GetEditgroupResponse, GetEditorChangelogResponse, GetEditorResponse, GetFileHistoryResponse, - GetFileResponse, GetReleaseFilesResponse, GetReleaseHistoryResponse, GetReleaseResponse, GetStatsResponse, GetWorkHistoryResponse, GetWorkReleasesResponse, GetWorkResponse, - LookupContainerResponse, LookupCreatorResponse, LookupFileResponse, LookupReleaseResponse, + CreateFileResponse, CreateReleaseBatchResponse, CreateReleaseResponse, CreateWorkBatchResponse, CreateWorkResponse, DeleteContainerResponse, DeleteCreatorResponse, DeleteFileResponse, + DeleteReleaseResponse, DeleteWorkResponse, GetChangelogEntryResponse, GetChangelogResponse, GetContainerHistoryResponse, GetContainerResponse, GetCreatorHistoryResponse, + GetCreatorReleasesResponse, GetCreatorResponse, GetEditgroupResponse, GetEditorChangelogResponse, GetEditorResponse, GetFileHistoryResponse, GetFileResponse, GetReleaseFilesResponse, + GetReleaseHistoryResponse, GetReleaseResponse, GetStatsResponse, GetWorkHistoryResponse, GetWorkReleasesResponse, GetWorkResponse, LookupContainerResponse, LookupCreatorResponse, + LookupFileResponse, LookupReleaseResponse, UpdateContainerResponse, UpdateCreatorResponse, UpdateFileResponse, UpdateReleaseResponse, UpdateWorkResponse, }; /// Convert input into a base path, e.g. "http://example:123". Also checks the scheme as it goes. @@ -191,7 +192,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; - Ok(AcceptEditgroupResponse::Unmergable(body)) + Ok(AcceptEditgroupResponse::BadRequest(body)) } 404 => { let mut buf = String::new(); @@ -200,6 +201,13 @@ impl Api for Client { Ok(AcceptEditgroupResponse::NotFound(body)) } + 409 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(AcceptEditgroupResponse::EditConflict(body)) + } 500 => { let mut buf = String::new(); response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; @@ -290,8 +298,23 @@ impl Api for Client { Box::new(futures::done(result)) } - fn create_container_batch(&self, param_entity_list: &Vec<models::ContainerEntity>, context: &Context) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> { - let url = format!("{}/v0/container/batch", self.base_path); + fn create_container_batch( + &self, + param_entity_list: &Vec<models::ContainerEntity>, + param_autoaccept: Option<bool>, + param_editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> { + // Query parameters + let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string())); + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/container/batch?{autoaccept}{editgroup}", + self.base_path, + autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); let body = serde_json::to_string(¶m_entity_list).expect("impossible to fail to serialize"); @@ -420,8 +443,23 @@ impl Api for Client { Box::new(futures::done(result)) } - fn create_creator_batch(&self, param_entity_list: &Vec<models::CreatorEntity>, context: &Context) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> { - let url = format!("{}/v0/creator/batch", self.base_path); + fn create_creator_batch( + &self, + param_entity_list: &Vec<models::CreatorEntity>, + param_autoaccept: Option<bool>, + param_editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> { + // Query parameters + let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string())); + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/creator/batch?{autoaccept}{editgroup}", + self.base_path, + autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); let body = serde_json::to_string(¶m_entity_list).expect("impossible to fail to serialize"); @@ -608,8 +646,23 @@ impl Api for Client { Box::new(futures::done(result)) } - fn create_file_batch(&self, param_entity_list: &Vec<models::FileEntity>, context: &Context) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> { - let url = format!("{}/v0/file/batch", self.base_path); + fn create_file_batch( + &self, + param_entity_list: &Vec<models::FileEntity>, + param_autoaccept: Option<bool>, + param_editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> { + // Query parameters + let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string())); + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/file/batch?{autoaccept}{editgroup}", + self.base_path, + autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); let body = serde_json::to_string(¶m_entity_list).expect("impossible to fail to serialize"); @@ -738,8 +791,23 @@ impl Api for Client { Box::new(futures::done(result)) } - fn create_release_batch(&self, param_entity_list: &Vec<models::ReleaseEntity>, context: &Context) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> { - let url = format!("{}/v0/release/batch", self.base_path); + fn create_release_batch( + &self, + param_entity_list: &Vec<models::ReleaseEntity>, + param_autoaccept: Option<bool>, + param_editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> { + // Query parameters + let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string())); + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/release/batch?{autoaccept}{editgroup}", + self.base_path, + autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); let body = serde_json::to_string(¶m_entity_list).expect("impossible to fail to serialize"); @@ -868,8 +936,23 @@ impl Api for Client { Box::new(futures::done(result)) } - fn create_work_batch(&self, param_entity_list: &Vec<models::WorkEntity>, context: &Context) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> { - let url = format!("{}/v0/work/batch", self.base_path); + fn create_work_batch( + &self, + param_entity_list: &Vec<models::WorkEntity>, + param_autoaccept: Option<bool>, + param_editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> { + // Query parameters + let query_autoaccept = param_autoaccept.map_or_else(String::new, |query| format!("autoaccept={autoaccept}&", autoaccept = query.to_string())); + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/work/batch?{autoaccept}{editgroup}", + self.base_path, + autoaccept = utf8_percent_encode(&query_autoaccept, QUERY_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); let body = serde_json::to_string(¶m_entity_list).expect("impossible to fail to serialize"); @@ -933,6 +1016,346 @@ impl Api for Client { Box::new(futures::done(result)) } + fn delete_container(&self, param_id: String, param_editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteContainerResponse, Error = ApiError> + Send> { + // Query parameters + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/container/{id}?{editgroup}", + self.base_path, + id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Delete, &url); + let mut custom_headers = hyper::header::Headers::new(); + + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<DeleteContainerResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(DeleteContainerResponse::DeletedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteContainerResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteContainerResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteContainerResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + + fn delete_creator(&self, param_id: String, param_editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteCreatorResponse, Error = ApiError> + Send> { + // Query parameters + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/creator/{id}?{editgroup}", + self.base_path, + id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Delete, &url); + let mut custom_headers = hyper::header::Headers::new(); + + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<DeleteCreatorResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(DeleteCreatorResponse::DeletedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteCreatorResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteCreatorResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteCreatorResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + + fn delete_file(&self, param_id: String, param_editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteFileResponse, Error = ApiError> + Send> { + // Query parameters + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/file/{id}?{editgroup}", + self.base_path, + id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Delete, &url); + let mut custom_headers = hyper::header::Headers::new(); + + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<DeleteFileResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(DeleteFileResponse::DeletedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteFileResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteFileResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteFileResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + + fn delete_release(&self, param_id: String, param_editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteReleaseResponse, Error = ApiError> + Send> { + // Query parameters + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/release/{id}?{editgroup}", + self.base_path, + id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Delete, &url); + let mut custom_headers = hyper::header::Headers::new(); + + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<DeleteReleaseResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(DeleteReleaseResponse::DeletedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteReleaseResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteReleaseResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteReleaseResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + + fn delete_work(&self, param_id: String, param_editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteWorkResponse, Error = ApiError> + Send> { + // Query parameters + let query_editgroup = param_editgroup.map_or_else(String::new, |query| format!("editgroup={editgroup}&", editgroup = query.to_string())); + + let url = format!( + "{}/v0/work/{id}?{editgroup}", + self.base_path, + id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET), + editgroup = utf8_percent_encode(&query_editgroup, QUERY_ENCODE_SET) + ); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Delete, &url); + let mut custom_headers = hyper::header::Headers::new(); + + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<DeleteWorkResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(DeleteWorkResponse::DeletedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteWorkResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteWorkResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(DeleteWorkResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + fn get_changelog(&self, param_limit: Option<i64>, context: &Context) -> Box<Future<Item = GetChangelogResponse, Error = ApiError> + Send> { // Query parameters let query_limit = param_limit.map_or_else(String::new, |query| format!("limit={limit}&", limit = query.to_string())); @@ -2361,6 +2784,331 @@ impl Api for Client { let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); Box::new(futures::done(result)) } + + fn update_container(&self, param_id: String, param_entity: models::ContainerEntity, context: &Context) -> Box<Future<Item = UpdateContainerResponse, Error = ApiError> + Send> { + let url = format!("{}/v0/container/{id}", self.base_path, id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET)); + + let body = serde_json::to_string(¶m_entity).expect("impossible to fail to serialize"); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Put, &url); + let mut custom_headers = hyper::header::Headers::new(); + + let request = request.body(&body); + + custom_headers.set(ContentType(mimetypes::requests::UPDATE_CONTAINER.clone())); + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<UpdateContainerResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(UpdateContainerResponse::UpdatedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateContainerResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateContainerResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateContainerResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + + fn update_creator(&self, param_id: String, param_entity: models::CreatorEntity, context: &Context) -> Box<Future<Item = UpdateCreatorResponse, Error = ApiError> + Send> { + let url = format!("{}/v0/creator/{id}", self.base_path, id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET)); + + let body = serde_json::to_string(¶m_entity).expect("impossible to fail to serialize"); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Put, &url); + let mut custom_headers = hyper::header::Headers::new(); + + let request = request.body(&body); + + custom_headers.set(ContentType(mimetypes::requests::UPDATE_CREATOR.clone())); + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<UpdateCreatorResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(UpdateCreatorResponse::UpdatedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateCreatorResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateCreatorResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateCreatorResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + + fn update_file(&self, param_id: String, param_entity: models::FileEntity, context: &Context) -> Box<Future<Item = UpdateFileResponse, Error = ApiError> + Send> { + let url = format!("{}/v0/file/{id}", self.base_path, id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET)); + + let body = serde_json::to_string(¶m_entity).expect("impossible to fail to serialize"); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Put, &url); + let mut custom_headers = hyper::header::Headers::new(); + + let request = request.body(&body); + + custom_headers.set(ContentType(mimetypes::requests::UPDATE_FILE.clone())); + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<UpdateFileResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(UpdateFileResponse::UpdatedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateFileResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateFileResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateFileResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + + fn update_release(&self, param_id: String, param_entity: models::ReleaseEntity, context: &Context) -> Box<Future<Item = UpdateReleaseResponse, Error = ApiError> + Send> { + let url = format!("{}/v0/release/{id}", self.base_path, id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET)); + + let body = serde_json::to_string(¶m_entity).expect("impossible to fail to serialize"); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Put, &url); + let mut custom_headers = hyper::header::Headers::new(); + + let request = request.body(&body); + + custom_headers.set(ContentType(mimetypes::requests::UPDATE_RELEASE.clone())); + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<UpdateReleaseResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(UpdateReleaseResponse::UpdatedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateReleaseResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateReleaseResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateReleaseResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } + + fn update_work(&self, param_id: String, param_entity: models::WorkEntity, context: &Context) -> Box<Future<Item = UpdateWorkResponse, Error = ApiError> + Send> { + let url = format!("{}/v0/work/{id}", self.base_path, id = utf8_percent_encode(¶m_id.to_string(), PATH_SEGMENT_ENCODE_SET)); + + let body = serde_json::to_string(¶m_entity).expect("impossible to fail to serialize"); + + let hyper_client = (self.hyper_client)(); + let request = hyper_client.request(hyper::method::Method::Put, &url); + let mut custom_headers = hyper::header::Headers::new(); + + let request = request.body(&body); + + custom_headers.set(ContentType(mimetypes::requests::UPDATE_WORK.clone())); + context.x_span_id.as_ref().map(|header| custom_headers.set(XSpanId(header.clone()))); + + let request = request.headers(custom_headers); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn parse_response(mut response: hyper::client::response::Response) -> Result<UpdateWorkResponse, ApiError> { + match response.status.to_u16() { + 200 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::EntityEdit>(&buf)?; + + Ok(UpdateWorkResponse::UpdatedEntity(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateWorkResponse::BadRequest(body)) + } + 404 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateWorkResponse::NotFound(body)) + } + 500 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(UpdateWorkResponse::GenericError(body)) + } + code => { + let mut buf = [0; 100]; + let debug_body = match response.read(&mut buf) { + Ok(len) => match str::from_utf8(&buf[..len]) { + Ok(body) => Cow::from(body), + Err(_) => Cow::from(format!("<Body was not UTF8: {:?}>", &buf[..len].to_vec())), + }, + Err(e) => Cow::from(format!("<Failed to read body: {}>", e)), + }; + Err(ApiError(format!("Unexpected response code {}:\n{:?}\n\n{}", code, response.headers, debug_body))) + } + } + } + + let result = request.send().map_err(|e| ApiError(format!("No response received: {}", e))).and_then(parse_response); + Box::new(futures::done(result)) + } } #[derive(Debug)] diff --git a/rust/fatcat-api/src/lib.rs b/rust/fatcat-api/src/lib.rs index fac8ecac..fc1ae2a1 100644 --- a/rust/fatcat-api/src/lib.rs +++ b/rust/fatcat-api/src/lib.rs @@ -36,10 +36,12 @@ pub use swagger::{ApiError, Context, ContextWrapper}; pub enum AcceptEditgroupResponse { /// Merged Successfully MergedSuccessfully(models::Success), - /// Unmergable - Unmergable(models::ErrorResponse), + /// Bad Request + BadRequest(models::ErrorResponse), /// Not Found NotFound(models::ErrorResponse), + /// Edit Conflict + EditConflict(models::ErrorResponse), /// Generic Error GenericError(models::ErrorResponse), } @@ -175,6 +177,66 @@ pub enum CreateWorkBatchResponse { } #[derive(Debug, PartialEq)] +pub enum DeleteContainerResponse { + /// Deleted Entity + DeletedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] +pub enum DeleteCreatorResponse { + /// Deleted Entity + DeletedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] +pub enum DeleteFileResponse { + /// Deleted Entity + DeletedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] +pub enum DeleteReleaseResponse { + /// Deleted Entity + DeletedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] +pub enum DeleteWorkResponse { + /// Deleted Entity + DeletedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] pub enum GetChangelogResponse { /// Success Success(Vec<models::ChangelogEntry>), @@ -436,31 +498,131 @@ pub enum LookupReleaseResponse { GenericError(models::ErrorResponse), } +#[derive(Debug, PartialEq)] +pub enum UpdateContainerResponse { + /// Updated Entity + UpdatedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] +pub enum UpdateCreatorResponse { + /// Updated Entity + UpdatedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] +pub enum UpdateFileResponse { + /// Updated Entity + UpdatedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] +pub enum UpdateReleaseResponse { + /// Updated Entity + UpdatedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + +#[derive(Debug, PartialEq)] +pub enum UpdateWorkResponse { + /// Updated Entity + UpdatedEntity(models::EntityEdit), + /// Bad Request + BadRequest(models::ErrorResponse), + /// Not Found + NotFound(models::ErrorResponse), + /// Generic Error + GenericError(models::ErrorResponse), +} + /// API pub trait Api { fn accept_editgroup(&self, id: String, context: &Context) -> Box<Future<Item = AcceptEditgroupResponse, Error = ApiError> + Send>; fn create_container(&self, entity: models::ContainerEntity, context: &Context) -> Box<Future<Item = CreateContainerResponse, Error = ApiError> + Send>; - fn create_container_batch(&self, entity_list: &Vec<models::ContainerEntity>, context: &Context) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send>; + fn create_container_batch( + &self, + entity_list: &Vec<models::ContainerEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send>; fn create_creator(&self, entity: models::CreatorEntity, context: &Context) -> Box<Future<Item = CreateCreatorResponse, Error = ApiError> + Send>; - fn create_creator_batch(&self, entity_list: &Vec<models::CreatorEntity>, context: &Context) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send>; + fn create_creator_batch( + &self, + entity_list: &Vec<models::CreatorEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send>; fn create_editgroup(&self, entity: models::Editgroup, context: &Context) -> Box<Future<Item = CreateEditgroupResponse, Error = ApiError> + Send>; fn create_file(&self, entity: models::FileEntity, context: &Context) -> Box<Future<Item = CreateFileResponse, Error = ApiError> + Send>; - fn create_file_batch(&self, entity_list: &Vec<models::FileEntity>, context: &Context) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send>; + fn create_file_batch( + &self, + entity_list: &Vec<models::FileEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send>; fn create_release(&self, entity: models::ReleaseEntity, context: &Context) -> Box<Future<Item = CreateReleaseResponse, Error = ApiError> + Send>; - fn create_release_batch(&self, entity_list: &Vec<models::ReleaseEntity>, context: &Context) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send>; + fn create_release_batch( + &self, + entity_list: &Vec<models::ReleaseEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send>; fn create_work(&self, entity: models::WorkEntity, context: &Context) -> Box<Future<Item = CreateWorkResponse, Error = ApiError> + Send>; - fn create_work_batch(&self, entity_list: &Vec<models::WorkEntity>, context: &Context) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send>; + fn create_work_batch( + &self, + entity_list: &Vec<models::WorkEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + context: &Context, + ) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send>; + + fn delete_container(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteContainerResponse, Error = ApiError> + Send>; + + fn delete_creator(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteCreatorResponse, Error = ApiError> + Send>; + + fn delete_file(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteFileResponse, Error = ApiError> + Send>; + + fn delete_release(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteReleaseResponse, Error = ApiError> + Send>; + + fn delete_work(&self, id: String, editgroup: Option<String>, context: &Context) -> Box<Future<Item = DeleteWorkResponse, Error = ApiError> + Send>; fn get_changelog(&self, limit: Option<i64>, context: &Context) -> Box<Future<Item = GetChangelogResponse, Error = ApiError> + Send>; @@ -507,6 +669,16 @@ pub trait Api { fn lookup_file(&self, sha1: String, context: &Context) -> Box<Future<Item = LookupFileResponse, Error = ApiError> + Send>; fn lookup_release(&self, doi: String, context: &Context) -> Box<Future<Item = LookupReleaseResponse, Error = ApiError> + Send>; + + fn update_container(&self, id: String, entity: models::ContainerEntity, context: &Context) -> Box<Future<Item = UpdateContainerResponse, Error = ApiError> + Send>; + + fn update_creator(&self, id: String, entity: models::CreatorEntity, context: &Context) -> Box<Future<Item = UpdateCreatorResponse, Error = ApiError> + Send>; + + fn update_file(&self, id: String, entity: models::FileEntity, context: &Context) -> Box<Future<Item = UpdateFileResponse, Error = ApiError> + Send>; + + fn update_release(&self, id: String, entity: models::ReleaseEntity, context: &Context) -> Box<Future<Item = UpdateReleaseResponse, Error = ApiError> + Send>; + + fn update_work(&self, id: String, entity: models::WorkEntity, context: &Context) -> Box<Future<Item = UpdateWorkResponse, Error = ApiError> + Send>; } /// API without a `Context` @@ -515,25 +687,50 @@ pub trait ApiNoContext { fn create_container(&self, entity: models::ContainerEntity) -> Box<Future<Item = CreateContainerResponse, Error = ApiError> + Send>; - fn create_container_batch(&self, entity_list: &Vec<models::ContainerEntity>) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send>; + fn create_container_batch( + &self, + entity_list: &Vec<models::ContainerEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + ) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send>; fn create_creator(&self, entity: models::CreatorEntity) -> Box<Future<Item = CreateCreatorResponse, Error = ApiError> + Send>; - fn create_creator_batch(&self, entity_list: &Vec<models::CreatorEntity>) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send>; + fn create_creator_batch( + &self, + entity_list: &Vec<models::CreatorEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + ) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send>; fn create_editgroup(&self, entity: models::Editgroup) -> Box<Future<Item = CreateEditgroupResponse, Error = ApiError> + Send>; fn create_file(&self, entity: models::FileEntity) -> Box<Future<Item = CreateFileResponse, Error = ApiError> + Send>; - fn create_file_batch(&self, entity_list: &Vec<models::FileEntity>) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send>; + fn create_file_batch(&self, entity_list: &Vec<models::FileEntity>, autoaccept: Option<bool>, editgroup: Option<String>) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send>; fn create_release(&self, entity: models::ReleaseEntity) -> Box<Future<Item = CreateReleaseResponse, Error = ApiError> + Send>; - fn create_release_batch(&self, entity_list: &Vec<models::ReleaseEntity>) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send>; + fn create_release_batch( + &self, + entity_list: &Vec<models::ReleaseEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + ) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send>; fn create_work(&self, entity: models::WorkEntity) -> Box<Future<Item = CreateWorkResponse, Error = ApiError> + Send>; - fn create_work_batch(&self, entity_list: &Vec<models::WorkEntity>) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send>; + fn create_work_batch(&self, entity_list: &Vec<models::WorkEntity>, autoaccept: Option<bool>, editgroup: Option<String>) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send>; + + fn delete_container(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteContainerResponse, Error = ApiError> + Send>; + + fn delete_creator(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteCreatorResponse, Error = ApiError> + Send>; + + fn delete_file(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteFileResponse, Error = ApiError> + Send>; + + fn delete_release(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteReleaseResponse, Error = ApiError> + Send>; + + fn delete_work(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteWorkResponse, Error = ApiError> + Send>; fn get_changelog(&self, limit: Option<i64>) -> Box<Future<Item = GetChangelogResponse, Error = ApiError> + Send>; @@ -580,6 +777,16 @@ pub trait ApiNoContext { fn lookup_file(&self, sha1: String) -> Box<Future<Item = LookupFileResponse, Error = ApiError> + Send>; fn lookup_release(&self, doi: String) -> Box<Future<Item = LookupReleaseResponse, Error = ApiError> + Send>; + + fn update_container(&self, id: String, entity: models::ContainerEntity) -> Box<Future<Item = UpdateContainerResponse, Error = ApiError> + Send>; + + fn update_creator(&self, id: String, entity: models::CreatorEntity) -> Box<Future<Item = UpdateCreatorResponse, Error = ApiError> + Send>; + + fn update_file(&self, id: String, entity: models::FileEntity) -> Box<Future<Item = UpdateFileResponse, Error = ApiError> + Send>; + + fn update_release(&self, id: String, entity: models::ReleaseEntity) -> Box<Future<Item = UpdateReleaseResponse, Error = ApiError> + Send>; + + fn update_work(&self, id: String, entity: models::WorkEntity) -> Box<Future<Item = UpdateWorkResponse, Error = ApiError> + Send>; } /// Trait to extend an API to make it easy to bind it to a context. @@ -606,16 +813,26 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> { self.api().create_container(entity, &self.context()) } - fn create_container_batch(&self, entity_list: &Vec<models::ContainerEntity>) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> { - self.api().create_container_batch(entity_list, &self.context()) + fn create_container_batch( + &self, + entity_list: &Vec<models::ContainerEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + ) -> Box<Future<Item = CreateContainerBatchResponse, Error = ApiError> + Send> { + self.api().create_container_batch(entity_list, autoaccept, editgroup, &self.context()) } fn create_creator(&self, entity: models::CreatorEntity) -> Box<Future<Item = CreateCreatorResponse, Error = ApiError> + Send> { self.api().create_creator(entity, &self.context()) } - fn create_creator_batch(&self, entity_list: &Vec<models::CreatorEntity>) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> { - self.api().create_creator_batch(entity_list, &self.context()) + fn create_creator_batch( + &self, + entity_list: &Vec<models::CreatorEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + ) -> Box<Future<Item = CreateCreatorBatchResponse, Error = ApiError> + Send> { + self.api().create_creator_batch(entity_list, autoaccept, editgroup, &self.context()) } fn create_editgroup(&self, entity: models::Editgroup) -> Box<Future<Item = CreateEditgroupResponse, Error = ApiError> + Send> { @@ -626,24 +843,49 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> { self.api().create_file(entity, &self.context()) } - fn create_file_batch(&self, entity_list: &Vec<models::FileEntity>) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> { - self.api().create_file_batch(entity_list, &self.context()) + fn create_file_batch(&self, entity_list: &Vec<models::FileEntity>, autoaccept: Option<bool>, editgroup: Option<String>) -> Box<Future<Item = CreateFileBatchResponse, Error = ApiError> + Send> { + self.api().create_file_batch(entity_list, autoaccept, editgroup, &self.context()) } fn create_release(&self, entity: models::ReleaseEntity) -> Box<Future<Item = CreateReleaseResponse, Error = ApiError> + Send> { self.api().create_release(entity, &self.context()) } - fn create_release_batch(&self, entity_list: &Vec<models::ReleaseEntity>) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> { - self.api().create_release_batch(entity_list, &self.context()) + fn create_release_batch( + &self, + entity_list: &Vec<models::ReleaseEntity>, + autoaccept: Option<bool>, + editgroup: Option<String>, + ) -> Box<Future<Item = CreateReleaseBatchResponse, Error = ApiError> + Send> { + self.api().create_release_batch(entity_list, autoaccept, editgroup, &self.context()) } fn create_work(&self, entity: models::WorkEntity) -> Box<Future<Item = CreateWorkResponse, Error = ApiError> + Send> { self.api().create_work(entity, &self.context()) } - fn create_work_batch(&self, entity_list: &Vec<models::WorkEntity>) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> { - self.api().create_work_batch(entity_list, &self.context()) + fn create_work_batch(&self, entity_list: &Vec<models::WorkEntity>, autoaccept: Option<bool>, editgroup: Option<String>) -> Box<Future<Item = CreateWorkBatchResponse, Error = ApiError> + Send> { + self.api().create_work_batch(entity_list, autoaccept, editgroup, &self.context()) + } + + fn delete_container(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteContainerResponse, Error = ApiError> + Send> { + self.api().delete_container(id, editgroup, &self.context()) + } + + fn delete_creator(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteCreatorResponse, Error = ApiError> + Send> { + self.api().delete_creator(id, editgroup, &self.context()) + } + + fn delete_file(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteFileResponse, Error = ApiError> + Send> { + self.api().delete_file(id, editgroup, &self.context()) + } + + fn delete_release(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteReleaseResponse, Error = ApiError> + Send> { + self.api().delete_release(id, editgroup, &self.context()) + } + + fn delete_work(&self, id: String, editgroup: Option<String>) -> Box<Future<Item = DeleteWorkResponse, Error = ApiError> + Send> { + self.api().delete_work(id, editgroup, &self.context()) } fn get_changelog(&self, limit: Option<i64>) -> Box<Future<Item = GetChangelogResponse, Error = ApiError> + Send> { @@ -737,6 +979,26 @@ impl<'a, T: Api> ApiNoContext for ContextWrapper<'a, T> { fn lookup_release(&self, doi: String) -> Box<Future<Item = LookupReleaseResponse, Error = ApiError> + Send> { self.api().lookup_release(doi, &self.context()) } + + fn update_container(&self, id: String, entity: models::ContainerEntity) -> Box<Future<Item = UpdateContainerResponse, Error = ApiError> + Send> { + self.api().update_container(id, entity, &self.context()) + } + + fn update_creator(&self, id: String, entity: models::CreatorEntity) -> Box<Future<Item = UpdateCreatorResponse, Error = ApiError> + Send> { + self.api().update_creator(id, entity, &self.context()) + } + + fn update_file(&self, id: String, entity: models::FileEntity) -> Box<Future<Item = UpdateFileResponse, Error = ApiError> + Send> { + self.api().update_file(id, entity, &self.context()) + } + + fn update_release(&self, id: String, entity: models::ReleaseEntity) -> Box<Future<Item = UpdateReleaseResponse, Error = ApiError> + Send> { + self.api().update_release(id, entity, &self.context()) + } + + fn update_work(&self, id: String, entity: models::WorkEntity) -> Box<Future<Item = UpdateWorkResponse, Error = ApiError> + Send> { + self.api().update_work(id, entity, &self.context()) + } } #[cfg(feature = "client")] diff --git a/rust/fatcat-api/src/mimetypes.rs b/rust/fatcat-api/src/mimetypes.rs index 53b582dc..2c54a313 100644 --- a/rust/fatcat-api/src/mimetypes.rs +++ b/rust/fatcat-api/src/mimetypes.rs @@ -10,7 +10,7 @@ pub mod responses { } /// Create Mime objects for the response content types for AcceptEditgroup lazy_static! { - pub static ref ACCEPT_EDITGROUP_UNMERGABLE: Mime = mime!(Application / Json); + pub static ref ACCEPT_EDITGROUP_BAD_REQUEST: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for AcceptEditgroup lazy_static! { @@ -18,6 +18,10 @@ pub mod responses { } /// Create Mime objects for the response content types for AcceptEditgroup lazy_static! { + pub static ref ACCEPT_EDITGROUP_EDIT_CONFLICT: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for AcceptEditgroup + lazy_static! { pub static ref ACCEPT_EDITGROUP_GENERIC_ERROR: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for CreateContainer @@ -192,6 +196,86 @@ pub mod responses { lazy_static! { pub static ref CREATE_WORK_BATCH_GENERIC_ERROR: Mime = mime!(Application / Json); } + /// Create Mime objects for the response content types for DeleteContainer + lazy_static! { + pub static ref DELETE_CONTAINER_DELETED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteContainer + lazy_static! { + pub static ref DELETE_CONTAINER_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteContainer + lazy_static! { + pub static ref DELETE_CONTAINER_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteContainer + lazy_static! { + pub static ref DELETE_CONTAINER_GENERIC_ERROR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteCreator + lazy_static! { + pub static ref DELETE_CREATOR_DELETED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteCreator + lazy_static! { + pub static ref DELETE_CREATOR_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteCreator + lazy_static! { + pub static ref DELETE_CREATOR_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteCreator + lazy_static! { + pub static ref DELETE_CREATOR_GENERIC_ERROR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteFile + lazy_static! { + pub static ref DELETE_FILE_DELETED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteFile + lazy_static! { + pub static ref DELETE_FILE_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteFile + lazy_static! { + pub static ref DELETE_FILE_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteFile + lazy_static! { + pub static ref DELETE_FILE_GENERIC_ERROR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteRelease + lazy_static! { + pub static ref DELETE_RELEASE_DELETED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteRelease + lazy_static! { + pub static ref DELETE_RELEASE_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteRelease + lazy_static! { + pub static ref DELETE_RELEASE_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteRelease + lazy_static! { + pub static ref DELETE_RELEASE_GENERIC_ERROR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteWork + lazy_static! { + pub static ref DELETE_WORK_DELETED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteWork + lazy_static! { + pub static ref DELETE_WORK_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteWork + lazy_static! { + pub static ref DELETE_WORK_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for DeleteWork + lazy_static! { + pub static ref DELETE_WORK_GENERIC_ERROR: Mime = mime!(Application / Json); + } /// Create Mime objects for the response content types for GetChangelog lazy_static! { pub static ref GET_CHANGELOG_SUCCESS: Mime = mime!(Application / Json); @@ -532,6 +616,86 @@ pub mod responses { lazy_static! { pub static ref LOOKUP_RELEASE_GENERIC_ERROR: Mime = mime!(Application / Json); } + /// Create Mime objects for the response content types for UpdateContainer + lazy_static! { + pub static ref UPDATE_CONTAINER_UPDATED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateContainer + lazy_static! { + pub static ref UPDATE_CONTAINER_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateContainer + lazy_static! { + pub static ref UPDATE_CONTAINER_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateContainer + lazy_static! { + pub static ref UPDATE_CONTAINER_GENERIC_ERROR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateCreator + lazy_static! { + pub static ref UPDATE_CREATOR_UPDATED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateCreator + lazy_static! { + pub static ref UPDATE_CREATOR_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateCreator + lazy_static! { + pub static ref UPDATE_CREATOR_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateCreator + lazy_static! { + pub static ref UPDATE_CREATOR_GENERIC_ERROR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateFile + lazy_static! { + pub static ref UPDATE_FILE_UPDATED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateFile + lazy_static! { + pub static ref UPDATE_FILE_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateFile + lazy_static! { + pub static ref UPDATE_FILE_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateFile + lazy_static! { + pub static ref UPDATE_FILE_GENERIC_ERROR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateRelease + lazy_static! { + pub static ref UPDATE_RELEASE_UPDATED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateRelease + lazy_static! { + pub static ref UPDATE_RELEASE_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateRelease + lazy_static! { + pub static ref UPDATE_RELEASE_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateRelease + lazy_static! { + pub static ref UPDATE_RELEASE_GENERIC_ERROR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateWork + lazy_static! { + pub static ref UPDATE_WORK_UPDATED_ENTITY: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateWork + lazy_static! { + pub static ref UPDATE_WORK_BAD_REQUEST: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateWork + lazy_static! { + pub static ref UPDATE_WORK_NOT_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for UpdateWork + lazy_static! { + pub static ref UPDATE_WORK_GENERIC_ERROR: Mime = mime!(Application / Json); + } } @@ -581,5 +745,25 @@ pub mod requests { lazy_static! { pub static ref CREATE_WORK_BATCH: Mime = mime!(Application / Json); } + /// Create Mime objects for the request content types for UpdateContainer + lazy_static! { + pub static ref UPDATE_CONTAINER: Mime = mime!(Application / Json); + } + /// Create Mime objects for the request content types for UpdateCreator + lazy_static! { + pub static ref UPDATE_CREATOR: Mime = mime!(Application / Json); + } + /// Create Mime objects for the request content types for UpdateFile + lazy_static! { + pub static ref UPDATE_FILE: Mime = mime!(Application / Json); + } + /// Create Mime objects for the request content types for UpdateRelease + lazy_static! { + pub static ref UPDATE_RELEASE: Mime = mime!(Application / Json); + } + /// Create Mime objects for the request content types for UpdateWork + lazy_static! { + pub static ref UPDATE_WORK: Mime = mime!(Application / Json); + } } diff --git a/rust/fatcat-api/src/server.rs b/rust/fatcat-api/src/server.rs index 68e08515..04d10e14 100644 --- a/rust/fatcat-api/src/server.rs +++ b/rust/fatcat-api/src/server.rs @@ -38,10 +38,11 @@ use swagger::{ApiError, Context, XSpanId}; use models; use { AcceptEditgroupResponse, Api, CreateContainerBatchResponse, CreateContainerResponse, CreateCreatorBatchResponse, CreateCreatorResponse, CreateEditgroupResponse, CreateFileBatchResponse, - CreateFileResponse, CreateReleaseBatchResponse, CreateReleaseResponse, CreateWorkBatchResponse, CreateWorkResponse, GetChangelogEntryResponse, GetChangelogResponse, GetContainerHistoryResponse, - GetContainerResponse, GetCreatorHistoryResponse, GetCreatorReleasesResponse, GetCreatorResponse, GetEditgroupResponse, GetEditorChangelogResponse, GetEditorResponse, GetFileHistoryResponse, - GetFileResponse, GetReleaseFilesResponse, GetReleaseHistoryResponse, GetReleaseResponse, GetStatsResponse, GetWorkHistoryResponse, GetWorkReleasesResponse, GetWorkResponse, - LookupContainerResponse, LookupCreatorResponse, LookupFileResponse, LookupReleaseResponse, + CreateFileResponse, CreateReleaseBatchResponse, CreateReleaseResponse, CreateWorkBatchResponse, CreateWorkResponse, DeleteContainerResponse, DeleteCreatorResponse, DeleteFileResponse, + DeleteReleaseResponse, DeleteWorkResponse, GetChangelogEntryResponse, GetChangelogResponse, GetContainerHistoryResponse, GetContainerResponse, GetCreatorHistoryResponse, + GetCreatorReleasesResponse, GetCreatorResponse, GetEditgroupResponse, GetEditorChangelogResponse, GetEditorResponse, GetFileHistoryResponse, GetFileResponse, GetReleaseFilesResponse, + GetReleaseHistoryResponse, GetReleaseResponse, GetStatsResponse, GetWorkHistoryResponse, GetWorkReleasesResponse, GetWorkResponse, LookupContainerResponse, LookupCreatorResponse, + LookupFileResponse, LookupReleaseResponse, UpdateContainerResponse, UpdateCreatorResponse, UpdateFileResponse, UpdateReleaseResponse, UpdateWorkResponse, }; header! { (Warning, "Warning") => [String] } @@ -130,11 +131,11 @@ where Ok(response) } - AcceptEditgroupResponse::Unmergable(body) => { + AcceptEditgroupResponse::BadRequest(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(400), body_string)); - response.headers.set(ContentType(mimetypes::responses::ACCEPT_EDITGROUP_UNMERGABLE.clone())); + response.headers.set(ContentType(mimetypes::responses::ACCEPT_EDITGROUP_BAD_REQUEST.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -150,6 +151,16 @@ where Ok(response) } + AcceptEditgroupResponse::EditConflict(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(409), body_string)); + response.headers.set(ContentType(mimetypes::responses::ACCEPT_EDITGROUP_EDIT_CONFLICT.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } AcceptEditgroupResponse::GenericError(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); @@ -298,6 +309,11 @@ where context.auth_data = req.extensions.remove::<AuthData>(); context.authorization = req.extensions.remove::<Authorization>(); + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok()); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + // Body parameters (note that non-required body parameters will ignore garbage // values, rather than causing a 400 response). Produce warning header and logs for // any unused fields. @@ -322,7 +338,7 @@ where }; let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?; - match api.create_container_batch(param_entity_list.as_ref(), context).wait() { + match api.create_container_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup, context).wait() { Ok(rsp) => match rsp { CreateContainerBatchResponse::CreatedEntities(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); @@ -510,6 +526,11 @@ where context.auth_data = req.extensions.remove::<AuthData>(); context.authorization = req.extensions.remove::<Authorization>(); + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok()); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + // Body parameters (note that non-required body parameters will ignore garbage // values, rather than causing a 400 response). Produce warning header and logs for // any unused fields. @@ -534,7 +555,7 @@ where }; let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?; - match api.create_creator_batch(param_entity_list.as_ref(), context).wait() { + match api.create_creator_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup, context).wait() { Ok(rsp) => match rsp { CreateCreatorBatchResponse::CreatedEntities(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); @@ -815,6 +836,11 @@ where context.auth_data = req.extensions.remove::<AuthData>(); context.authorization = req.extensions.remove::<Authorization>(); + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok()); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + // Body parameters (note that non-required body parameters will ignore garbage // values, rather than causing a 400 response). Produce warning header and logs for // any unused fields. @@ -839,7 +865,7 @@ where }; let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?; - match api.create_file_batch(param_entity_list.as_ref(), context).wait() { + match api.create_file_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup, context).wait() { Ok(rsp) => match rsp { CreateFileBatchResponse::CreatedEntities(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); @@ -1027,6 +1053,11 @@ where context.auth_data = req.extensions.remove::<AuthData>(); context.authorization = req.extensions.remove::<Authorization>(); + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok()); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + // Body parameters (note that non-required body parameters will ignore garbage // values, rather than causing a 400 response). Produce warning header and logs for // any unused fields. @@ -1051,7 +1082,7 @@ where }; let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?; - match api.create_release_batch(param_entity_list.as_ref(), context).wait() { + match api.create_release_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup, context).wait() { Ok(rsp) => match rsp { CreateReleaseBatchResponse::CreatedEntities(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); @@ -1239,6 +1270,11 @@ where context.auth_data = req.extensions.remove::<AuthData>(); context.authorization = req.extensions.remove::<Authorization>(); + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_autoaccept = query_params.get("autoaccept").and_then(|list| list.first()).and_then(|x| x.parse::<bool>().ok()); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + // Body parameters (note that non-required body parameters will ignore garbage // values, rather than causing a 400 response). Produce warning header and logs for // any unused fields. @@ -1263,7 +1299,7 @@ where }; let param_entity_list = param_entity_list.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity_list".to_string())))?; - match api.create_work_batch(param_entity_list.as_ref(), context).wait() { + match api.create_work_batch(param_entity_list.as_ref(), param_autoaccept, param_editgroup, context).wait() { Ok(rsp) => match rsp { CreateWorkBatchResponse::CreatedEntities(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); @@ -1331,6 +1367,466 @@ where ); let api_clone = api.clone(); + router.delete( + "/v0/container/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + + match api.delete_container(param_id, param_editgroup, context).wait() { + Ok(rsp) => match rsp { + DeleteContainerResponse::DeletedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_CONTAINER_DELETED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteContainerResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_CONTAINER_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteContainerResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_CONTAINER_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteContainerResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_CONTAINER_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "DeleteContainer", + ); + + let api_clone = api.clone(); + router.delete( + "/v0/creator/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + + match api.delete_creator(param_id, param_editgroup, context).wait() { + Ok(rsp) => match rsp { + DeleteCreatorResponse::DeletedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_CREATOR_DELETED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteCreatorResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_CREATOR_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteCreatorResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_CREATOR_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteCreatorResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_CREATOR_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "DeleteCreator", + ); + + let api_clone = api.clone(); + router.delete( + "/v0/file/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + + match api.delete_file(param_id, param_editgroup, context).wait() { + Ok(rsp) => match rsp { + DeleteFileResponse::DeletedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_FILE_DELETED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteFileResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_FILE_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteFileResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_FILE_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteFileResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_FILE_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "DeleteFile", + ); + + let api_clone = api.clone(); + router.delete( + "/v0/release/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + + match api.delete_release(param_id, param_editgroup, context).wait() { + Ok(rsp) => match rsp { + DeleteReleaseResponse::DeletedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_RELEASE_DELETED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteReleaseResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_RELEASE_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteReleaseResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_RELEASE_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteReleaseResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_RELEASE_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "DeleteRelease", + ); + + let api_clone = api.clone(); + router.delete( + "/v0/work/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Query parameters (note that non-required or collection query parameters will ignore garbage values, rather than causing a 400 response) + let query_params = req.get::<UrlEncodedQuery>().unwrap_or_default(); + let param_editgroup = query_params.get("editgroup").and_then(|list| list.first()).and_then(|x| x.parse::<String>().ok()); + + match api.delete_work(param_id, param_editgroup, context).wait() { + Ok(rsp) => match rsp { + DeleteWorkResponse::DeletedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_WORK_DELETED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteWorkResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_WORK_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteWorkResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_WORK_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + DeleteWorkResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::DELETE_WORK_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "DeleteWork", + ); + + let api_clone = api.clone(); router.get( "/v0/changelog", move |req: &mut Request| { @@ -3287,6 +3783,606 @@ where }, "LookupRelease", ); + + let api_clone = api.clone(); + router.put( + "/v0/container/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Body parameters (note that non-required body parameters will ignore garbage + // values, rather than causing a 400 response). Produce warning header and logs for + // any unused fields. + + let param_entity = req.get::<bodyparser::Raw>() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - not valid UTF-8: {}", e))))?; + + let mut unused_elements = Vec::new(); + + let param_entity = if let Some(param_entity_raw) = param_entity { + let deserializer = &mut serde_json::Deserializer::from_str(¶m_entity_raw); + + let param_entity: Option<models::ContainerEntity> = + serde_ignored::deserialize(deserializer, |path| { + warn!("Ignoring unknown field in body: {}", path); + unused_elements.push(path.to_string()); + }).map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - doesn't match schema: {}", e))))?; + + param_entity + } else { + None + }; + let param_entity = param_entity.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity".to_string())))?; + + match api.update_container(param_id, param_entity, context).wait() { + Ok(rsp) => match rsp { + UpdateContainerResponse::UpdatedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_CONTAINER_UPDATED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateContainerResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_CONTAINER_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateContainerResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_CONTAINER_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateContainerResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_CONTAINER_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "UpdateContainer", + ); + + let api_clone = api.clone(); + router.put( + "/v0/creator/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Body parameters (note that non-required body parameters will ignore garbage + // values, rather than causing a 400 response). Produce warning header and logs for + // any unused fields. + + let param_entity = req.get::<bodyparser::Raw>() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - not valid UTF-8: {}", e))))?; + + let mut unused_elements = Vec::new(); + + let param_entity = if let Some(param_entity_raw) = param_entity { + let deserializer = &mut serde_json::Deserializer::from_str(¶m_entity_raw); + + let param_entity: Option<models::CreatorEntity> = + serde_ignored::deserialize(deserializer, |path| { + warn!("Ignoring unknown field in body: {}", path); + unused_elements.push(path.to_string()); + }).map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - doesn't match schema: {}", e))))?; + + param_entity + } else { + None + }; + let param_entity = param_entity.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity".to_string())))?; + + match api.update_creator(param_id, param_entity, context).wait() { + Ok(rsp) => match rsp { + UpdateCreatorResponse::UpdatedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_CREATOR_UPDATED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateCreatorResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_CREATOR_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateCreatorResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_CREATOR_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateCreatorResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_CREATOR_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "UpdateCreator", + ); + + let api_clone = api.clone(); + router.put( + "/v0/file/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Body parameters (note that non-required body parameters will ignore garbage + // values, rather than causing a 400 response). Produce warning header and logs for + // any unused fields. + + let param_entity = req.get::<bodyparser::Raw>() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - not valid UTF-8: {}", e))))?; + + let mut unused_elements = Vec::new(); + + let param_entity = if let Some(param_entity_raw) = param_entity { + let deserializer = &mut serde_json::Deserializer::from_str(¶m_entity_raw); + + let param_entity: Option<models::FileEntity> = + serde_ignored::deserialize(deserializer, |path| { + warn!("Ignoring unknown field in body: {}", path); + unused_elements.push(path.to_string()); + }).map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - doesn't match schema: {}", e))))?; + + param_entity + } else { + None + }; + let param_entity = param_entity.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity".to_string())))?; + + match api.update_file(param_id, param_entity, context).wait() { + Ok(rsp) => match rsp { + UpdateFileResponse::UpdatedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_FILE_UPDATED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateFileResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_FILE_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateFileResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_FILE_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateFileResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_FILE_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "UpdateFile", + ); + + let api_clone = api.clone(); + router.put( + "/v0/release/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Body parameters (note that non-required body parameters will ignore garbage + // values, rather than causing a 400 response). Produce warning header and logs for + // any unused fields. + + let param_entity = req.get::<bodyparser::Raw>() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - not valid UTF-8: {}", e))))?; + + let mut unused_elements = Vec::new(); + + let param_entity = if let Some(param_entity_raw) = param_entity { + let deserializer = &mut serde_json::Deserializer::from_str(¶m_entity_raw); + + let param_entity: Option<models::ReleaseEntity> = + serde_ignored::deserialize(deserializer, |path| { + warn!("Ignoring unknown field in body: {}", path); + unused_elements.push(path.to_string()); + }).map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - doesn't match schema: {}", e))))?; + + param_entity + } else { + None + }; + let param_entity = param_entity.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity".to_string())))?; + + match api.update_release(param_id, param_entity, context).wait() { + Ok(rsp) => match rsp { + UpdateReleaseResponse::UpdatedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_RELEASE_UPDATED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateReleaseResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_RELEASE_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateReleaseResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_RELEASE_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateReleaseResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_RELEASE_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "UpdateRelease", + ); + + let api_clone = api.clone(); + router.put( + "/v0/work/:id", + move |req: &mut Request| { + let mut context = Context::default(); + + // Helper function to provide a code block to use `?` in (to be replaced by the `catch` block when it exists). + fn handle_request<T>(req: &mut Request, api: &T, context: &mut Context) -> Result<Response, Response> + where + T: Api, + { + context.x_span_id = Some(req.headers.get::<XSpanId>().map(XSpanId::to_string).unwrap_or_else(|| self::uuid::Uuid::new_v4().to_string())); + context.auth_data = req.extensions.remove::<AuthData>(); + context.authorization = req.extensions.remove::<Authorization>(); + + // Path parameters + let param_id = { + let param = req.extensions + .get::<Router>() + .ok_or_else(|| Response::with((status::InternalServerError, "An internal error occurred".to_string())))? + .find("id") + .ok_or_else(|| Response::with((status::BadRequest, "Missing path parameter id".to_string())))?; + percent_decode(param.as_bytes()) + .decode_utf8() + .map_err(|_| Response::with((status::BadRequest, format!("Couldn't percent-decode path parameter as UTF-8: {}", param))))? + .parse() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse path parameter id: {}", e))))? + }; + + // Body parameters (note that non-required body parameters will ignore garbage + // values, rather than causing a 400 response). Produce warning header and logs for + // any unused fields. + + let param_entity = req.get::<bodyparser::Raw>() + .map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - not valid UTF-8: {}", e))))?; + + let mut unused_elements = Vec::new(); + + let param_entity = if let Some(param_entity_raw) = param_entity { + let deserializer = &mut serde_json::Deserializer::from_str(¶m_entity_raw); + + let param_entity: Option<models::WorkEntity> = + serde_ignored::deserialize(deserializer, |path| { + warn!("Ignoring unknown field in body: {}", path); + unused_elements.push(path.to_string()); + }).map_err(|e| Response::with((status::BadRequest, format!("Couldn't parse body parameter entity - doesn't match schema: {}", e))))?; + + param_entity + } else { + None + }; + let param_entity = param_entity.ok_or_else(|| Response::with((status::BadRequest, "Missing required body parameter entity".to_string())))?; + + match api.update_work(param_id, param_entity, context).wait() { + Ok(rsp) => match rsp { + UpdateWorkResponse::UpdatedEntity(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(200), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_WORK_UPDATED_ENTITY.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateWorkResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_WORK_BAD_REQUEST.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateWorkResponse::NotFound(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(404), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_WORK_NOT_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + UpdateWorkResponse::GenericError(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(500), body_string)); + response.headers.set(ContentType(mimetypes::responses::UPDATE_WORK_GENERIC_ERROR.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + if !unused_elements.is_empty() { + response.headers.set(Warning(format!("Ignoring unknown fields in body: {:?}", unused_elements))); + } + Ok(response) + } + }, + Err(_) => { + // Application code returned an error. This should not happen, as the implementation should + // return a valid response. + Err(Response::with((status::InternalServerError, "An internal error occurred".to_string()))) + } + } + } + + handle_request(req, &api_clone, &mut context).or_else(|mut response| { + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + Ok(response) + }) + }, + "UpdateWork", + ); } /// Middleware to extract authentication data from request diff --git a/rust/migrations/2018-05-12-001226_init/up.sql b/rust/migrations/2018-05-12-001226_init/up.sql index 873700ab..6b0e9c6e 100644 --- a/rust/migrations/2018-05-12-001226_init/up.sql +++ b/rust/migrations/2018-05-12-001226_init/up.sql @@ -147,6 +147,8 @@ CREATE TABLE file_rev_url ( url TEXT NOT NULL ); +CREATE INDEX file_rev_url_rev_idx ON file_rev_url(file_rev); + CREATE TABLE file_ident ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), is_live BOOL NOT NULL DEFAULT false, diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs index 489631b3..925a6073 100644 --- a/rust/src/api_helpers.rs +++ b/rust/src/api_helpers.rs @@ -5,8 +5,12 @@ use diesel; use diesel::prelude::*; use errors::*; use regex::Regex; +use std::str::FromStr; use uuid::Uuid; +pub type DbConn = + diesel::r2d2::PooledConnection<diesel::r2d2::ConnectionManager<diesel::PgConnection>>; + /// This function should always be run within a transaction pub fn get_or_create_editgroup(editor_id: Uuid, conn: &PgConnection) -> Result<Uuid> { // check for current active @@ -34,10 +38,7 @@ pub fn accept_editgroup(editgroup_id: Uuid, conn: &PgConnection) -> Result<Chang .count() .get_result(conn)?; if count > 0 { - bail!( - "editgroup {} has already been accepted", - editgroup_id.to_string() - ); + return Err(ErrorKind::EditgroupAlreadyAccepted(uuid2fcid(&editgroup_id)).into()); } // for each entity type... @@ -90,6 +91,31 @@ pub fn accept_editgroup(editgroup_id: Uuid, conn: &PgConnection) -> Result<Chang Ok(entry) } +#[derive(Clone)] +pub struct FatCatId(Uuid); + +impl ToString for FatCatId { + fn to_string(&self) -> String { + uuid2fcid(&self.to_uuid()) + } +} + +impl FromStr for FatCatId { + type Err = Error; + fn from_str(s: &str) -> Result<FatCatId> { + fcid2uuid(s).map(|u| FatCatId(u)) + } +} + +impl FatCatId { + pub fn to_uuid(&self) -> Uuid { + self.0 + } + pub fn from_uuid(u: &Uuid) -> FatCatId { + FatCatId(u.clone()) + } +} + /// Convert fatcat IDs (base32 strings) to UUID pub fn fcid2uuid(fcid: &str) -> Result<Uuid> { if fcid.len() != 26 { @@ -181,7 +207,7 @@ pub fn check_issn(raw: &str) -> Result<()> { pub fn check_orcid(raw: &str) -> Result<()> { lazy_static! { - static ref RE: Regex = Regex::new(r"^\d{4}-\d{4}-\d{4}-\d{4}$").unwrap(); + static ref RE: Regex = Regex::new(r"^\d{4}-\d{4}-\d{4}-\d{3}[\dX]$").unwrap(); } if RE.is_match(raw) { Ok(()) @@ -193,6 +219,14 @@ pub fn check_orcid(raw: &str) -> Result<()> { } } +#[test] +fn test_check_orcid() { + assert!(check_orcid("0123-4567-3456-6789").is_ok()); + assert!(check_orcid("0123-4567-3456-678X").is_ok()); + assert!(check_orcid("01234567-3456-6780").is_err()); + assert!(check_orcid("0x23-4567-3456-6780").is_err()); +} + // TODO: make the above checks "more correct" // TODO: check ISBN-13 // TODO: check hashes (SHA-1, etc) diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs index b445d63a..31b71395 100644 --- a/rust/src/api_server.rs +++ b/rust/src/api_server.rs @@ -2,6 +2,7 @@ use api_helpers::*; use chrono; +use database_entity_crud::{EditContext, EntityCrud}; use database_models::*; use database_schema::{ abstracts, changelog, container_edit, container_ident, container_rev, creator_edit, @@ -14,54 +15,34 @@ use diesel::{self, insert_into}; use errors::*; use fatcat_api::models; use fatcat_api::models::*; -use sha1::Sha1; +use std::str::FromStr; use uuid::Uuid; use ConnectionPool; -type DbConn = diesel::r2d2::PooledConnection<diesel::r2d2::ConnectionManager<diesel::PgConnection>>; - macro_rules! entity_batch_handler { - ($post_handler:ident, $post_batch_handler:ident, $model:ident) => { + ($post_batch_handler:ident, $model:ident) => { pub fn $post_batch_handler( &self, entity_list: &[models::$model], + autoaccept: bool, + editgroup: Option<String>, conn: &DbConn, ) -> Result<Vec<EntityEdit>> { - let mut ret: Vec<EntityEdit> = vec![]; - for entity in entity_list { - ret.push(self.$post_handler(entity.clone(), conn)?); - } - Ok(ret) - } - } -} - -macro_rules! entity_history_handler { - ($history_handler:ident, $edit_row_type:ident, $edit_table:ident) => { - pub fn $history_handler( - &self, - id: &Uuid, - limit: Option<i64>, - conn: &DbConn, - ) -> Result<Vec<EntityHistoryEntry>> { - let limit = limit.unwrap_or(50); - - let rows: Vec<(EditgroupRow, ChangelogRow, $edit_row_type)> = editgroup::table - .inner_join(changelog::table) - .inner_join($edit_table::table) - .filter($edit_table::ident_id.eq(id)) - .order(changelog::id.desc()) - .limit(limit) - .get_results(conn)?; - let history: Vec<EntityHistoryEntry> = rows.into_iter() - .map(|(eg_row, cl_row, e_row)| EntityHistoryEntry { - edit: e_row.into_model().expect("edit row to model"), - editgroup: eg_row.into_model_partial(), - changelog_entry: cl_row.into_model(), - }) - .collect(); - Ok(history) + let editgroup_id: Option<FatCatId> = match editgroup { + Some(s) => Some(FatCatId::from_str(&s)?), + None => None, + }; + let edit_context = make_edit_context(conn, editgroup_id.clone(), autoaccept)?; + let model_list: Vec<&models::$model> = entity_list.iter().map(|e| e).collect(); + let edits = $model::db_create_batch(conn, &edit_context, model_list.as_slice())?; + + if autoaccept { + let _clr: ChangelogRow = diesel::insert_into(changelog::table) + .values((changelog::editgroup_id.eq(edit_context.editgroup_id.to_uuid()),)) + .get_result(conn)?; + } + edits.into_iter().map(|e| e.into_model()).collect() } } } @@ -77,224 +58,30 @@ macro_rules! count_entity { }}; } -#[derive(Clone)] -pub struct Server { - pub db_pool: ConnectionPool, -} - -fn container_row2entity( - ident: Option<ContainerIdentRow>, - rev: ContainerRevRow, -) -> Result<ContainerEntity> { - let (state, ident_id, redirect_id) = match ident { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(uuid2fcid(&i.id)), - i.redirect_id.map(|u| uuid2fcid(&u)), - ), - None => (None, None, None), +fn make_edit_context(conn: &DbConn, editgroup_id: Option<FatCatId>, autoaccept: bool) -> Result<EditContext> { + let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth + let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { + (Some(eg), _) => eg, + // If autoaccept and no editgroup_id passed, always create a new one for this transaction + (None, true) => { + let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) + .values((editgroup::editor_id.eq(editor_id),)) + .get_result(conn)?; + FatCatId::from_uuid(&eg_row.id) + }, + (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), }; - Ok(ContainerEntity { - issnl: rev.issnl, - wikidata_qid: rev.wikidata_qid, - publisher: rev.publisher, - name: rev.name, - abbrev: rev.abbrev, - coden: rev.coden, - state: state, - ident: ident_id, - revision: Some(rev.id.to_string()), - redirect: redirect_id, - extra: rev.extra_json, - editgroup_id: None, - }) -} - -fn creator_row2entity(ident: Option<CreatorIdentRow>, rev: CreatorRevRow) -> Result<CreatorEntity> { - let (state, ident_id, redirect_id) = match ident { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(uuid2fcid(&i.id)), - i.redirect_id.map(|u| uuid2fcid(&u)), - ), - None => (None, None, None), - }; - Ok(CreatorEntity { - display_name: rev.display_name, - given_name: rev.given_name, - surname: rev.surname, - orcid: rev.orcid, - wikidata_qid: rev.wikidata_qid, - state: state, - ident: ident_id, - revision: Some(rev.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev.extra_json, - }) -} - -fn file_row2entity( - ident: Option<FileIdentRow>, - rev: FileRevRow, - conn: &DbConn, -) -> Result<FileEntity> { - let (state, ident_id, redirect_id) = match ident { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(uuid2fcid(&i.id)), - i.redirect_id.map(|u| uuid2fcid(&u)), - ), - None => (None, None, None), - }; - - let releases: Vec<String> = file_release::table - .filter(file_release::file_rev.eq(rev.id)) - .get_results(conn)? - .into_iter() - .map(|r: FileReleaseRow| uuid2fcid(&r.target_release_ident_id)) - .collect(); - - let urls: Vec<FileEntityUrls> = file_rev_url::table - .filter(file_rev_url::file_rev.eq(rev.id)) - .get_results(conn)? - .into_iter() - .map(|r: FileRevUrlRow| FileEntityUrls { - rel: r.rel, - url: r.url, - }) - .collect(); - - Ok(FileEntity { - sha1: rev.sha1, - sha256: rev.sha256, - md5: rev.md5, - size: rev.size.map(|v| v as i64), - urls: Some(urls), - mimetype: rev.mimetype, - releases: Some(releases), - state: state, - ident: ident_id, - revision: Some(rev.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev.extra_json, + Ok(EditContext { + editor_id: FatCatId::from_uuid(&editor_id), + editgroup_id: editgroup_id, + extra_json: None, + autoaccept: autoaccept, }) } -fn release_row2entity( - ident: Option<ReleaseIdentRow>, - rev: ReleaseRevRow, - conn: &DbConn, -) -> Result<ReleaseEntity> { - let (state, ident_id, redirect_id) = match ident { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(uuid2fcid(&i.id)), - i.redirect_id.map(|u| uuid2fcid(&u)), - ), - None => (None, None, None), - }; - - let refs: Vec<ReleaseRef> = release_ref::table - .filter(release_ref::release_rev.eq(rev.id)) - .order(release_ref::index_val.asc()) - .get_results(conn) - .expect("fetch release refs") - .into_iter() - .map(|r: ReleaseRefRow| ReleaseRef { - index: r.index_val, - key: r.key, - extra: r.extra_json, - container_title: r.container_title, - year: r.year, - title: r.title, - locator: r.locator, - target_release_id: r.target_release_ident_id.map(|v| uuid2fcid(&v)), - }) - .collect(); - - let contribs: Vec<ReleaseContrib> = release_contrib::table - .filter(release_contrib::release_rev.eq(rev.id)) - .order((release_contrib::role.asc(), release_contrib::index_val.asc())) - .get_results(conn) - .expect("fetch release refs") - .into_iter() - .map(|c: ReleaseContribRow| ReleaseContrib { - index: c.index_val, - raw_name: c.raw_name, - role: c.role, - extra: c.extra_json, - creator_id: c.creator_ident_id.map(|v| uuid2fcid(&v)), - creator: None, - }) - .collect(); - - let abstracts: Vec<ReleaseEntityAbstracts> = release_rev_abstract::table - .inner_join(abstracts::table) - .filter(release_rev_abstract::release_rev.eq(rev.id)) - .get_results(conn)? - .into_iter() - .map( - |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts { - sha1: Some(r.0.abstract_sha1), - mimetype: r.0.mimetype, - lang: r.0.lang, - content: Some(r.1.content), - }, - ) - .collect(); - - Ok(ReleaseEntity { - title: rev.title, - release_type: rev.release_type, - release_status: rev.release_status, - release_date: rev.release_date - .map(|v| chrono::DateTime::from_utc(v.and_hms(0, 0, 0), chrono::Utc)), - doi: rev.doi, - pmid: rev.pmid, - pmcid: rev.pmcid, - isbn13: rev.isbn13, - core_id: rev.core_id, - wikidata_qid: rev.wikidata_qid, - volume: rev.volume, - issue: rev.issue, - pages: rev.pages, - files: None, - container: None, - container_id: rev.container_ident_id.map(|u| uuid2fcid(&u)), - publisher: rev.publisher, - language: rev.language, - work_id: Some(uuid2fcid(&rev.work_ident_id)), - refs: Some(refs), - contribs: Some(contribs), - abstracts: Some(abstracts), - state: state, - ident: ident_id, - revision: Some(rev.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev.extra_json, - }) -} - -fn work_row2entity(ident: Option<WorkIdentRow>, rev: WorkRevRow) -> Result<WorkEntity> { - let (state, ident_id, redirect_id) = match ident { - Some(i) => ( - Some(i.state().unwrap().shortname()), - Some(uuid2fcid(&i.id)), - i.redirect_id.map(|u| uuid2fcid(&u)), - ), - None => (None, None, None), - }; - Ok(WorkEntity { - state: state, - ident: ident_id, - revision: Some(rev.id.to_string()), - redirect: redirect_id, - editgroup_id: None, - extra: rev.extra_json, - }) +#[derive(Clone)] +pub struct Server { + pub db_pool: ConnectionPool, } impl Server { @@ -304,13 +91,7 @@ impl Server { _expand: Option<String>, conn: &DbConn, ) -> Result<ContainerEntity> { - // TODO: handle Deletions - let (ident, rev): (ContainerIdentRow, ContainerRevRow) = container_ident::table - .find(id) - .inner_join(container_rev::table) - .first(conn)?; - - container_row2entity(Some(ident), rev) + ContainerEntity::db_get(conn, FatCatId::from_uuid(id)) } pub fn lookup_container_handler(&self, issnl: &str, conn: &DbConn) -> Result<ContainerEntity> { @@ -318,11 +99,14 @@ impl Server { let (ident, rev): (ContainerIdentRow, ContainerRevRow) = container_ident::table .inner_join(container_rev::table) .filter(container_rev::issnl.eq(issnl)) + // This NOT NULL is here to ensure the postgresql query planner that it can use an + // index + .filter(container_rev::issnl.is_not_null()) .filter(container_ident::is_live.eq(true)) .filter(container_ident::redirect_id.is_null()) .first(conn)?; - container_row2entity(Some(ident), rev) + ContainerEntity::db_from_row(conn, rev, Some(ident)) } pub fn get_creator_handler( @@ -331,12 +115,7 @@ impl Server { _expand: Option<String>, conn: &DbConn, ) -> Result<CreatorEntity> { - let (ident, rev): (CreatorIdentRow, CreatorRevRow) = creator_ident::table - .find(id) - .inner_join(creator_rev::table) - .first(conn)?; - - creator_row2entity(Some(ident), rev) + CreatorEntity::db_get(conn, FatCatId::from_uuid(id)) } pub fn lookup_creator_handler(&self, orcid: &str, conn: &DbConn) -> Result<CreatorEntity> { @@ -344,11 +123,14 @@ impl Server { let (ident, rev): (CreatorIdentRow, CreatorRevRow) = creator_ident::table .inner_join(creator_rev::table) .filter(creator_rev::orcid.eq(orcid)) + // This NOT NULL is here to ensure the postgresql query planner that it can use an + // index + .filter(creator_rev::orcid.is_not_null()) .filter(creator_ident::is_live.eq(true)) .filter(creator_ident::redirect_id.is_null()) .first(conn)?; - creator_row2entity(Some(ident), rev) + CreatorEntity::db_from_row(conn, rev, Some(ident)) } pub fn get_creator_releases_handler( @@ -367,8 +149,9 @@ impl Server { .filter(release_ident::redirect_id.is_null()) .load(conn)?; + // TODO: from_rows, not from_row? rows.into_iter() - .map(|(rev, ident, _)| release_row2entity(Some(ident), rev, conn)) + .map(|(rev, ident, _)| ReleaseEntity::db_from_row(conn, rev, Some(ident))) .collect() } @@ -378,23 +161,21 @@ impl Server { _expand: Option<String>, conn: &DbConn, ) -> Result<FileEntity> { - let (ident, rev): (FileIdentRow, FileRevRow) = file_ident::table - .find(id) - .inner_join(file_rev::table) - .first(conn)?; - - file_row2entity(Some(ident), rev, conn) + FileEntity::db_get(conn, FatCatId::from_uuid(id)) } pub fn lookup_file_handler(&self, sha1: &str, conn: &DbConn) -> Result<FileEntity> { let (ident, rev): (FileIdentRow, FileRevRow) = file_ident::table .inner_join(file_rev::table) .filter(file_rev::sha1.eq(sha1)) + // This NOT NULL is here to ensure the postgresql query planner that it can use an + // index + .filter(file_rev::sha1.is_not_null()) .filter(file_ident::is_live.eq(true)) .filter(file_ident::redirect_id.is_null()) .first(conn)?; - file_row2entity(Some(ident), rev, conn) + FileEntity::db_from_row(conn, rev, Some(ident)) } pub fn get_release_handler( @@ -403,12 +184,7 @@ impl Server { expand: Option<String>, conn: &DbConn, ) -> Result<ReleaseEntity> { - let (ident, rev): (ReleaseIdentRow, ReleaseRevRow) = release_ident::table - .find(id) - .inner_join(release_rev::table) - .first(conn)?; - - let mut release = release_row2entity(Some(ident), rev, conn)?; + let mut release = ReleaseEntity::db_get(conn, FatCatId::from_uuid(id))?; // For now, if there is any expand param we do them all if expand.is_some() { @@ -419,7 +195,6 @@ impl Server { Some(self.get_container_handler(&fcid2uuid(&cid)?, None, conn)?); } } - Ok(release) } @@ -428,26 +203,29 @@ impl Server { let (ident, rev): (ReleaseIdentRow, ReleaseRevRow) = release_ident::table .inner_join(release_rev::table) .filter(release_rev::doi.eq(doi)) + // This NOT NULL is here to ensure the postgresql query planner that it can use an + // index + .filter(release_rev::doi.is_not_null()) .filter(release_ident::is_live.eq(true)) .filter(release_ident::redirect_id.is_null()) .first(conn)?; - release_row2entity(Some(ident), rev, conn) + ReleaseEntity::db_from_row(conn, rev, Some(ident)) } pub fn get_release_files_handler(&self, id: &str, conn: &DbConn) -> Result<Vec<FileEntity>> { - let id = fcid2uuid(&id)?; + let ident = FatCatId::from_str(id)?; let rows: Vec<(FileRevRow, FileIdentRow, FileReleaseRow)> = file_rev::table .inner_join(file_ident::table) .inner_join(file_release::table) - .filter(file_release::target_release_ident_id.eq(&id)) + .filter(file_release::target_release_ident_id.eq(&ident.to_uuid())) .filter(file_ident::is_live.eq(true)) .filter(file_ident::redirect_id.is_null()) .load(conn)?; rows.into_iter() - .map(|(rev, ident, _)| file_row2entity(Some(ident), rev, conn)) + .map(|(rev, ident, _)| FileEntity::db_from_row(conn, rev, Some(ident))) .collect() } @@ -457,12 +235,7 @@ impl Server { _expand: Option<String>, conn: &DbConn, ) -> Result<WorkEntity> { - let (ident, rev): (WorkIdentRow, WorkRevRow) = work_ident::table - .find(id) - .inner_join(work_rev::table) - .first(conn)?; - - work_row2entity(Some(ident), rev) + WorkEntity::db_get(conn, FatCatId::from_uuid(id)) } pub fn get_work_releases_handler(&self, id: &str, conn: &DbConn) -> Result<Vec<ReleaseEntity>> { @@ -476,7 +249,7 @@ impl Server { .load(conn)?; rows.into_iter() - .map(|(rev, ident)| release_row2entity(Some(ident), rev, conn)) + .map(|(rev, ident)| ReleaseEntity::db_from_row(conn, rev, Some(ident))) .collect() } @@ -485,37 +258,29 @@ impl Server { entity: models::ContainerEntity, conn: &DbConn, ) -> Result<EntityEdit> { - let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth - let editgroup_id: Uuid = match entity.editgroup_id { - None => get_or_create_editgroup(editor_id, conn)?, - Some(param) => fcid2uuid(¶m)?, - }; - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - if let Some(ref extid) = entity.issnl { - check_issn(extid)?; - } + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_create(conn, &edit_context)?; + edit.into_model() + } - let rev_id: Uuid = insert_into(container_rev::table) - .values((container_rev::name.eq(entity.name), - container_rev::publisher.eq(entity.publisher), - container_rev::issnl.eq(entity.issnl), - container_rev::wikidata_qid.eq(entity.wikidata_qid), - container_rev::abbrev.eq(entity.abbrev), - container_rev::coden.eq(entity.coden), - container_rev::extra_json.eq(entity.extra))) - .returning(container_rev::id) - .get_result(conn)?; - let ident_id: Uuid = insert_into(container_ident::table) - .values(container_ident::rev_id.eq(rev_id)) - .returning(container_ident::id) - .get_result(conn)?; - let edit: ContainerEditRow = insert_into(container_edit::table) - .values((container_edit::editgroup_id.eq(editgroup_id), - container_edit::ident_id.eq(ident_id), - container_edit::rev_id.eq(rev_id))) - .get_result(conn)?; + pub fn update_container_handler( + &self, + id: &Uuid, + entity: models::ContainerEntity, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; + edit.into_model() + } + pub fn delete_container_handler( + &self, + id: &Uuid, + editgroup_id: Option<Uuid>, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; + let edit = ContainerEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; edit.into_model() } @@ -524,37 +289,29 @@ impl Server { entity: models::CreatorEntity, conn: &DbConn, ) -> Result<EntityEdit> { - let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth - let editgroup_id = match entity.editgroup_id { - None => get_or_create_editgroup(editor_id, conn).expect("current editgroup"), - Some(param) => fcid2uuid(¶m)?, - }; - if let Some(ref extid) = entity.orcid { - check_orcid(extid)?; - } - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - - let rev_id: Uuid = insert_into(creator_rev::table) - .values((creator_rev::display_name.eq(entity.display_name), - creator_rev::given_name.eq(entity.given_name), - creator_rev::surname.eq(entity.surname), - creator_rev::orcid.eq(entity.orcid), - creator_rev::wikidata_qid.eq(entity.wikidata_qid), - creator_rev::extra_json.eq(entity.extra))) - .returning(creator_rev::id) - .get_result(conn)?; - let ident_id: Uuid = insert_into(creator_ident::table) - .values(creator_ident::rev_id.eq(rev_id)) - .returning(creator_ident::id) - .get_result(conn)?; - let edit: CreatorEditRow = insert_into(creator_edit::table) - .values((creator_edit::editgroup_id.eq(editgroup_id), - creator_edit::ident_id.eq(ident_id), - creator_edit::rev_id.eq(rev_id))) - .get_result(conn)?; + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_create(conn, &edit_context)?; + edit.into_model() + } + pub fn update_creator_handler( + &self, + id: &Uuid, + entity: models::CreatorEntity, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; + edit.into_model() + } + pub fn delete_creator_handler( + &self, + id: &Uuid, + editgroup_id: Option<Uuid>, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; + let edit = CreatorEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; edit.into_model() } @@ -563,77 +320,29 @@ impl Server { entity: models::FileEntity, conn: &DbConn, ) -> Result<EntityEdit> { - let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth - let editgroup_id = match entity.editgroup_id { - None => get_or_create_editgroup(editor_id, conn).expect("current editgroup"), - Some(param) => fcid2uuid(¶m)?, - }; - - let rev_id: Uuid = insert_into(file_rev::table) - .values((file_rev::size.eq(entity.size), - file_rev::sha1.eq(entity.sha1), - file_rev::sha256.eq(entity.sha256), - file_rev::md5.eq(entity.md5), - file_rev::mimetype.eq(entity.mimetype), - file_rev::extra_json.eq(entity.extra))) - .returning(file_rev::id) - .get_result(conn)?; - let ident_id: Uuid = insert_into(file_ident::table) - .values(file_ident::rev_id.eq(rev_id)) - .returning(file_ident::id) - .get_result(conn)?; - let edit: FileEditRow = insert_into(file_edit::table) - .values((file_edit::editgroup_id.eq(editgroup_id), - file_edit::ident_id.eq(ident_id), - file_edit::rev_id.eq(rev_id))) - .get_result(conn)?; - - let _releases: Option<Vec<FileReleaseRow>> = match entity.releases { - None => None, - Some(release_list) => { - if release_list.is_empty() { - Some(vec![]) - } else { - let release_rows: Vec<FileReleaseRow> = release_list - .iter() - .map(|r| FileReleaseRow { - file_rev: edit.rev_id.unwrap(), - target_release_ident_id: fcid2uuid(r) - .expect("invalid fatcat identifier"), - }) - .collect(); - let release_rows: Vec<FileReleaseRow> = insert_into(file_release::table) - .values(release_rows) - .get_results(conn) - .expect("error inserting file_releases"); - Some(release_rows) - } - } - }; - - let _urls: Option<Vec<FileRevUrlRow>> = match entity.urls { - None => None, - Some(url_list) => { - if url_list.is_empty() { - Some(vec![]) - } else { - let url_rows: Vec<FileRevUrlNewRow> = url_list - .into_iter() - .map(|u| FileRevUrlNewRow { - file_rev: edit.rev_id.unwrap(), - rel: u.rel, - url: u.url, - }) - .collect(); - let url_rows: Vec<FileRevUrlRow> = insert_into(file_rev_url::table) - .values(url_rows) - .get_results(conn) - .expect("error inserting file_rev_url"); - Some(url_rows) - } - } - }; + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_create(conn, &edit_context)?; + edit.into_model() + } + pub fn update_file_handler( + &self, + id: &Uuid, + entity: models::FileEntity, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; + edit.into_model() + } + pub fn delete_file_handler( + &self, + id: &Uuid, + editgroup_id: Option<Uuid>, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; + let edit = FileEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; edit.into_model() } @@ -642,172 +351,29 @@ impl Server { entity: models::ReleaseEntity, conn: &DbConn, ) -> Result<EntityEdit> { - let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth - let editgroup_id = match entity.editgroup_id { - None => get_or_create_editgroup(editor_id, conn).expect("current editgroup"), - Some(param) => fcid2uuid(¶m)?, - }; - if let Some(ref extid) = entity.doi { - check_doi(extid)?; - } - if let Some(ref extid) = entity.pmid { - check_pmid(extid)?; - } - if let Some(ref extid) = entity.pmcid { - check_pmcid(extid)?; - } - if let Some(ref extid) = entity.wikidata_qid { - check_wikidata_qid(extid)?; - } - - let work_id = match entity.work_id { - Some(work_id) => fcid2uuid(&work_id)?, - None => { - // If a work_id wasn't passed, create a new work under the current editgroup - let work_model = models::WorkEntity { - ident: None, - revision: None, - redirect: None, - state: None, - editgroup_id: Some(uuid2fcid(&editgroup_id)), - extra: None, - }; - let new_entity = self.create_work_handler(work_model, conn)?; - fcid2uuid(&new_entity.ident)? - } - }; - - let container_id: Option<Uuid> = match entity.container_id { - Some(id) => Some(fcid2uuid(&id)?), - None => None, - }; - - let rev_id: Uuid = insert_into(release_rev::table) - .values((release_rev::title.eq(entity.title), - release_rev::release_type.eq(entity.release_type), - release_rev::release_status.eq(entity.release_status), - release_rev::release_date.eq(entity.release_date.map(|v| v.naive_utc().date())), - release_rev::doi.eq(entity.doi), - release_rev::pmid.eq(entity.pmid), - release_rev::pmcid.eq(entity.pmcid), - release_rev::wikidata_qid.eq(entity.wikidata_qid), - release_rev::isbn13.eq(entity.isbn13), - release_rev::core_id.eq(entity.core_id), - release_rev::volume.eq(entity.volume), - release_rev::issue.eq(entity.issue), - release_rev::pages.eq(entity.pages), - release_rev::work_ident_id.eq(work_id), - release_rev::container_ident_id.eq(container_id), - release_rev::publisher.eq(entity.publisher), - release_rev::language.eq(entity.language), - release_rev::extra_json.eq(entity.extra))) - .returning(release_rev::id) - .get_result(conn)?; - let ident_id: Uuid = insert_into(release_ident::table) - .values(release_ident::rev_id.eq(rev_id)) - .returning(release_ident::id) - .get_result(conn)?; - let edit: ReleaseEditRow = insert_into(release_edit::table) - .values((release_edit::editgroup_id.eq(editgroup_id), - release_edit::ident_id.eq(ident_id), - release_edit::rev_id.eq(rev_id))) - .get_result(conn)?; - - let _refs: Option<Vec<ReleaseRefRow>> = match entity.refs { - None => None, - Some(ref_list) => { - if ref_list.is_empty() { - Some(vec![]) - } else { - let ref_rows: Vec<ReleaseRefNewRow> = ref_list - .iter() - .map(|r| ReleaseRefNewRow { - release_rev: edit.rev_id.unwrap(), - target_release_ident_id: r.target_release_id - .clone() - .map(|v| fcid2uuid(&v).expect("valid fatcat identifier")), - index_val: r.index, - key: r.key.clone(), - container_title: r.container_title.clone(), - year: r.year, - title: r.title.clone(), - locator: r.locator.clone(), - extra_json: r.extra.clone(), - }) - .collect(); - let ref_rows: Vec<ReleaseRefRow> = insert_into(release_ref::table) - .values(ref_rows) - .get_results(conn) - .expect("error inserting release_refs"); - Some(ref_rows) - } - } - }; - - let _contribs: Option<Vec<ReleaseContribRow>> = match entity.contribs { - None => None, - Some(contrib_list) => { - if contrib_list.is_empty() { - Some(vec![]) - } else { - let contrib_rows: Vec<ReleaseContribNewRow> = contrib_list - .iter() - .map(|c| ReleaseContribNewRow { - release_rev: edit.rev_id.unwrap(), - creator_ident_id: c.creator_id - .clone() - .map(|v| fcid2uuid(&v).expect("valid fatcat identifier")), - raw_name: c.raw_name.clone(), - index_val: c.index, - role: c.role.clone(), - extra_json: c.extra.clone(), - }) - .collect(); - let contrib_rows: Vec<ReleaseContribRow> = insert_into(release_contrib::table) - .values(contrib_rows) - .get_results(conn) - .expect("error inserting release_contribs"); - Some(contrib_rows) - } - } - }; - - if let Some(abstract_list) = entity.abstracts { - // For rows that specify content, we need to insert the abstract if it doesn't exist - // already - let new_abstracts: Vec<AbstractsRow> = abstract_list - .iter() - .filter(|ea| ea.content.is_some()) - .map(|c| AbstractsRow { - sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(), - content: c.content.clone().unwrap(), - }) - .collect(); - if !new_abstracts.is_empty() { - // Sort of an "upsert"; only inserts new abstract rows if they don't already exist - insert_into(abstracts::table) - .values(&new_abstracts) - .on_conflict(abstracts::sha1) - .do_nothing() - .execute(conn)?; - } - let release_abstract_rows: Vec<ReleaseRevAbstractNewRow> = abstract_list - .into_iter() - .map(|c| ReleaseRevAbstractNewRow { - release_rev: edit.rev_id.unwrap(), - abstract_sha1: match c.content { - Some(ref content) => Sha1::from(content).hexdigest(), - None => c.sha1.expect("either abstract_sha1 or content is required"), - }, - lang: c.lang, - mimetype: c.mimetype, - }) - .collect(); - insert_into(release_rev_abstract::table) - .values(release_abstract_rows) - .execute(conn)?; - } + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_create(conn, &edit_context)?; + edit.into_model() + } + pub fn update_release_handler( + &self, + id: &Uuid, + entity: models::ReleaseEntity, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; + edit.into_model() + } + pub fn delete_release_handler( + &self, + id: &Uuid, + editgroup_id: Option<Uuid>, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; + let edit = ReleaseEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; edit.into_model() } @@ -816,25 +382,31 @@ impl Server { entity: models::WorkEntity, conn: &DbConn, ) -> Result<EntityEdit> { - let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth - let editgroup_id = match entity.editgroup_id { - None => get_or_create_editgroup(editor_id, conn).expect("current editgroup"), - Some(param) => fcid2uuid(¶m)?, - }; + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_create(conn, &edit_context)?; + edit.into_model() + } + + pub fn update_work_handler( + &self, + id: &Uuid, + entity: models::WorkEntity, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; + let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; + edit.into_model() + } + + pub fn delete_work_handler( + &self, + id: &Uuid, + editgroup_id: Option<Uuid>, + conn: &DbConn, + ) -> Result<EntityEdit> { + let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; + let edit = WorkEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - let rev_id: Uuid = insert_into(work_rev::table) - .values(work_rev::extra_json.eq(entity.extra)) - .returning(work_rev::id) - .get_result(conn)?; - let ident_id: Uuid = insert_into(work_ident::table) - .values(work_ident::rev_id.eq(rev_id)) - .returning(work_ident::id) - .get_result(conn)?; - let edit: WorkEditRow = insert_into(work_edit::table) - .values((work_edit::editgroup_id.eq(editgroup_id), - work_edit::ident_id.eq(ident_id), - work_edit::rev_id.eq(rev_id))) - .get_result(conn)?; edit.into_model() } @@ -854,8 +426,7 @@ impl Server { editgroup::description.eq(entity.description), editgroup::extra_json.eq(entity.extra), )) - .get_result(conn) - .expect("error creating edit group"); + .get_result(conn)?; Ok(Editgroup { id: Some(uuid2fcid(&row.id)), @@ -1067,31 +638,50 @@ impl Server { Ok(StatsResponse { extra: Some(val) }) } - entity_batch_handler!( - create_container_handler, - create_container_batch_handler, - ContainerEntity - ); - entity_batch_handler!( - create_creator_handler, - create_creator_batch_handler, - CreatorEntity - ); - entity_batch_handler!(create_file_handler, create_file_batch_handler, FileEntity); - entity_batch_handler!( - create_release_handler, - create_release_batch_handler, - ReleaseEntity - ); - entity_batch_handler!(create_work_handler, create_work_batch_handler, WorkEntity); + entity_batch_handler!(create_container_batch_handler, ContainerEntity); + entity_batch_handler!(create_creator_batch_handler, CreatorEntity); + entity_batch_handler!(create_file_batch_handler, FileEntity); + entity_batch_handler!(create_release_batch_handler, ReleaseEntity); + entity_batch_handler!(create_work_batch_handler, WorkEntity); - entity_history_handler!( - get_container_history_handler, - ContainerEditRow, - container_edit - ); - entity_history_handler!(get_creator_history_handler, CreatorEditRow, creator_edit); - entity_history_handler!(get_file_history_handler, FileEditRow, file_edit); - entity_history_handler!(get_release_history_handler, ReleaseEditRow, release_edit); - entity_history_handler!(get_work_history_handler, WorkEditRow, work_edit); + pub fn get_container_history_handler( + &self, + id: &Uuid, + limit: Option<i64>, + conn: &DbConn, + ) -> Result<Vec<EntityHistoryEntry>> { + ContainerEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) + } + pub fn get_creator_history_handler( + &self, + id: &Uuid, + limit: Option<i64>, + conn: &DbConn, + ) -> Result<Vec<EntityHistoryEntry>> { + CreatorEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) + } + pub fn get_file_history_handler( + &self, + id: &Uuid, + limit: Option<i64>, + conn: &DbConn, + ) -> Result<Vec<EntityHistoryEntry>> { + FileEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) + } + pub fn get_release_history_handler( + &self, + id: &Uuid, + limit: Option<i64>, + conn: &DbConn, + ) -> Result<Vec<EntityHistoryEntry>> { + ReleaseEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) + } + pub fn get_work_history_handler( + &self, + id: &Uuid, + limit: Option<i64>, + conn: &DbConn, + ) -> Result<Vec<EntityHistoryEntry>> { + WorkEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) + } } diff --git a/rust/src/api_wrappers.rs b/rust/src/api_wrappers.rs index 95336d3f..faafe984 100644 --- a/rust/src/api_wrappers.rs +++ b/rust/src/api_wrappers.rs @@ -19,8 +19,9 @@ macro_rules! wrap_entity_handlers { // The only stable approach I know of would be: https://github.com/dtolnay/mashup ($get_fn:ident, $get_handler:ident, $get_resp:ident, $post_fn:ident, $post_handler:ident, $post_resp:ident, $post_batch_fn:ident, $post_batch_handler:ident, - $post_batch_resp:ident, $get_history_fn:ident, $get_history_handler:ident, - $get_history_resp:ident, $model:ident) => { + $post_batch_resp:ident, $update_fn:ident, $update_handler:ident, $update_resp:ident, + $delete_fn:ident, $delete_handler:ident, $delete_resp:ident, $get_history_fn:ident, + $get_history_handler:ident, $get_history_resp:ident, $model:ident) => { fn $get_fn( &self, @@ -83,10 +84,12 @@ macro_rules! wrap_entity_handlers { fn $post_batch_fn( &self, entity_list: &Vec<models::$model>, + autoaccept: Option<bool>, + editgroup: Option<String>, _context: &Context, ) -> Box<Future<Item = $post_batch_resp, Error = ApiError> + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$post_batch_handler(entity_list, &conn)) { + let ret = match conn.transaction(|| self.$post_batch_handler(entity_list, autoaccept.unwrap_or(false), editgroup, &conn)) { Ok(edit) => $post_batch_resp::CreatedEntities(edit), Err(Error(ErrorKind::Diesel(e), _)) => @@ -106,6 +109,79 @@ macro_rules! wrap_entity_handlers { Box::new(futures::done(Ok(ret))) } + fn $update_fn( + &self, + id: String, + entity: models::$model, + _context: &Context, + ) -> Box<Future<Item = $update_resp, Error = ApiError> + Send> { + let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { + return Box::new(futures::done(Ok($update_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(id).to_string() })))); + }; + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| self.$update_handler(&id, entity, &conn)) { + Ok(edit) => + $update_resp::UpdatedEntity(edit), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $update_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), id) }), + Err(Error(ErrorKind::Diesel(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::Uuid(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $update_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $update_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $update_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + + fn $delete_fn( + &self, + id: String, + editgroup_id: Option<String>, + _context: &Context, + ) -> Box<Future<Item = $delete_resp, Error = ApiError> + Send> { + let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { + return Box::new(futures::done(Ok($delete_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(id).to_string() })))); + }; + let editgroup_id = match editgroup_id { + Some(raw) => if let Ok(parsed) = fcid2uuid(&raw) { Some(parsed) } else { + return Box::new(futures::done(Ok($delete_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(raw).to_string() })))) + } + None => None + }; + let conn = self.db_pool.get().expect("db_pool error"); + let ret = match conn.transaction(|| self.$delete_handler(&id, editgroup_id, &conn)) { + Ok(edit) => + $delete_resp::DeletedEntity(edit), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $delete_resp::NotFound(ErrorResponse { message: format!("No such entity {}: {}", stringify!($model), id) }), + Err(Error(ErrorKind::Diesel(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::Uuid(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(Error(ErrorKind::InvalidFatcatId(e), _)) => + $delete_resp::BadRequest(ErrorResponse { + message: ErrorKind::InvalidFatcatId(e).to_string() }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $delete_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $delete_resp::GenericError(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + fn $get_history_fn( &self, id: String, @@ -175,6 +251,12 @@ impl Api for Server { create_container_batch, create_container_batch_handler, CreateContainerBatchResponse, + update_container, + update_container_handler, + UpdateContainerResponse, + delete_container, + delete_container_handler, + DeleteContainerResponse, get_container_history, get_container_history_handler, GetContainerHistoryResponse, @@ -191,6 +273,12 @@ impl Api for Server { create_creator_batch, create_creator_batch_handler, CreateCreatorBatchResponse, + update_creator, + update_creator_handler, + UpdateCreatorResponse, + delete_creator, + delete_creator_handler, + DeleteCreatorResponse, get_creator_history, get_creator_history_handler, GetCreatorHistoryResponse, @@ -206,6 +294,12 @@ impl Api for Server { create_file_batch, create_file_batch_handler, CreateFileBatchResponse, + update_file, + update_file_handler, + UpdateFileResponse, + delete_file, + delete_file_handler, + DeleteFileResponse, get_file_history, get_file_history_handler, GetFileHistoryResponse, @@ -221,6 +315,12 @@ impl Api for Server { create_release_batch, create_release_batch_handler, CreateReleaseBatchResponse, + update_release, + update_release_handler, + UpdateReleaseResponse, + delete_release, + delete_release_handler, + DeleteReleaseResponse, get_release_history, get_release_history_handler, GetReleaseHistoryResponse, @@ -236,6 +336,12 @@ impl Api for Server { create_work_batch, create_work_batch_handler, CreateWorkBatchResponse, + update_work, + update_work_handler, + UpdateWorkResponse, + delete_work, + delete_work_handler, + DeleteWorkResponse, get_work_history, get_work_history_handler, GetWorkHistoryResponse, @@ -308,6 +414,11 @@ impl Api for Server { message: format!("No such editgroup: {}", id), }) } + Err(Error(ErrorKind::EditgroupAlreadyAccepted(e), _)) => { + AcceptEditgroupResponse::BadRequest(ErrorResponse { + message: ErrorKind::EditgroupAlreadyAccepted(e).to_string(), + }) + } Err(e) => AcceptEditgroupResponse::GenericError(ErrorResponse { message: e.to_string(), }), diff --git a/rust/src/database_entity_crud.rs b/rust/src/database_entity_crud.rs new file mode 100644 index 00000000..88c89e84 --- /dev/null +++ b/rust/src/database_entity_crud.rs @@ -0,0 +1,1005 @@ +use api_helpers::*; +use chrono; +use database_models::*; +use database_schema::*; +use diesel::prelude::*; +use diesel::insert_into; +use errors::*; +use fatcat_api::models::*; +use serde_json; +use sha1::Sha1; +use std::marker::Sized; +use std::str::FromStr; +use uuid::Uuid; + +pub struct EditContext { + pub editor_id: FatCatId, + pub editgroup_id: FatCatId, + pub extra_json: Option<serde_json::Value>, + pub autoaccept: bool, +} + +/* One goal here is to abstract the non-entity-specific bits into generic traits or functions, + * instead of macros. + * + * Notably: + * + * db_get + * db_get_rev + * db_create + * db_create_batch + * db_update + * db_delete + * db_get_history + * + * For now, these will probably be macros, until we can level up our trait/generics foo. + */ + +// Associated Type, not parametric +pub trait EntityCrud +where + Self: Sized, +{ + // TODO: could EditRow and IdentRow be generic structs? Or do they need to be bound to a + // specific table? + type EditRow; // EntityEditRow + type EditNewRow; + type IdentRow; // EntityIdentRow + type IdentNewRow; + type RevRow; + + fn parse_editgroup_id(&self) -> Result<Option<FatCatId>>; + + // Generic Methods + fn db_get(conn: &DbConn, ident: FatCatId) -> Result<Self>; + fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result<Self>; + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result<Self::EditRow>; + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result<Vec<Self::EditRow>>; + fn db_update( + &self, + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result<Self::EditRow>; + fn db_delete( + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result<Self::EditRow>; + fn db_get_history( + conn: &DbConn, + ident: FatCatId, + limit: Option<i64>, + ) -> Result<Vec<EntityHistoryEntry>>; + + // Entity-specific Methods + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option<Self::IdentRow>, + ) -> Result<Self>; + fn db_insert_rev(&self, conn: &DbConn) -> Result<Uuid>; + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>>; +} + +// TODO: this could be a separate trait on all entities +macro_rules! generic_parse_editgroup_id{ + () => { + fn parse_editgroup_id(&self) -> Result<Option<FatCatId>> { + match &self.editgroup_id { + Some(s) => Ok(Some(FatCatId::from_str(&s)?)), + None => Ok(None), + } + } + } +} + +macro_rules! generic_db_get { + ($ident_table:ident, $rev_table:ident) => { + fn db_get(conn: &DbConn, ident: FatCatId) -> Result<Self> { + let (ident, rev): (Self::IdentRow, Self::RevRow) = $ident_table::table + .find(ident.to_uuid()) + .inner_join($rev_table::table) + .first(conn)?; + + Self::db_from_row(conn, rev, Some(ident)) + } + }; +} + +macro_rules! generic_db_get_rev { + ($rev_table:ident) => { + fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result<Self> { + let rev = $rev_table::table.find(rev_id).first(conn)?; + + Self::db_from_row(conn, rev, None) + } + }; +} + +macro_rules! generic_db_create { + // TODO: this path should call generic_db_create_batch + ($ident_table: ident, $edit_table: ident) => { + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result<Self::EditRow> { + let rev_id = self.db_insert_rev(conn)?; + let ident: Uuid = insert_into($ident_table::table) + .values($ident_table::rev_id.eq(&rev_id)) + .returning($ident_table::id) + .get_result(conn)?; + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::ident_id.eq(&ident), + )) + .get_result(conn)?; + Ok(edit) + } + } +} + +macro_rules! generic_db_create_batch { + ($ident_table:ident, $edit_table:ident) => { + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result<Vec<Self::EditRow>> { + let rev_ids: Vec<Uuid> = Self::db_insert_revs(conn, models)?; + let ident_ids: Vec<Uuid> = insert_into($ident_table::table) + .values( + rev_ids + .iter() + .map(|rev_id| Self::IdentNewRow { + rev_id: Some(rev_id.clone()), + is_live: edit_context.autoaccept, + redirect_id: None, + }) + .collect::<Vec<Self::IdentNewRow>>(), + ) + .returning($ident_table::id) + .get_results(conn)?; + let edits: Vec<Self::EditRow> = insert_into($edit_table::table) + .values( + rev_ids + .into_iter() + .zip(ident_ids.into_iter()) + .map(|(rev_id, ident_id)| Self::EditNewRow { + editgroup_id: edit_context.editgroup_id.to_uuid(), + rev_id: Some(rev_id), + ident_id: ident_id, + redirect_id: None, + prev_rev: None, + extra_json: edit_context.extra_json.clone(), + }) + .collect::<Vec<Self::EditNewRow>>(), + ) + .get_results(conn)?; + Ok(edits) + } + }; +} + +macro_rules! generic_db_update { + ($ident_table: ident, $edit_table: ident) => { + fn db_update(&self, conn: &DbConn, edit_context: &EditContext, ident: FatCatId) -> Result<Self::EditRow> { + let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + if current.is_live != true { + // TODO: what if isn't live? 4xx not 5xx + bail!("can't delete an entity that doesn't exist yet"); + } + if current.rev_id.is_none() { + // TODO: what if it's already deleted? 4xx not 5xx + bail!("entity was already deleted"); + } + + let rev_id = self.db_insert_rev(conn)?; + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(&ident.to_uuid()), + $edit_table::rev_id.eq(&rev_id), + $edit_table::prev_rev.eq(current.rev_id.unwrap()), + $edit_table::extra_json.eq(&self.extra), + )) + .get_result(conn)?; + + Ok(edit) + } + } +} + +macro_rules! generic_db_delete { + ($ident_table:ident, $edit_table:ident) => { + fn db_delete( + conn: &DbConn, + edit_context: &EditContext, + ident: FatCatId, + ) -> Result<Self::EditRow> { + let current: Self::IdentRow = $ident_table::table.find(ident.to_uuid()).first(conn)?; + if current.is_live != true { + // TODO: what if isn't live? 4xx not 5xx + bail!("can't delete an entity that doesn't exist yet"); + } + if current.rev_id.is_none() { + // TODO: what if it's already deleted? 4xx not 5xx + bail!("entity was already deleted"); + } + let edit: Self::EditRow = insert_into($edit_table::table) + .values(( + $edit_table::editgroup_id.eq(edit_context.editgroup_id.to_uuid()), + $edit_table::ident_id.eq(ident.to_uuid()), + $edit_table::rev_id.eq(None::<Uuid>), + $edit_table::redirect_id.eq(None::<Uuid>), + $edit_table::prev_rev.eq(current.rev_id), + $edit_table::extra_json.eq(&edit_context.extra_json), + )) + .get_result(conn)?; + + Ok(edit) + } + }; +} + +macro_rules! generic_db_get_history { + ($edit_table:ident) => { + fn db_get_history( + conn: &DbConn, + ident: FatCatId, + limit: Option<i64>, + ) -> Result<Vec<EntityHistoryEntry>> { + let limit = limit.unwrap_or(50); // TODO: make a static + + let rows: Vec<(EditgroupRow, ChangelogRow, Self::EditRow)> = editgroup::table + .inner_join(changelog::table) + .inner_join($edit_table::table) + .filter($edit_table::ident_id.eq(ident.to_uuid())) + .order(changelog::id.desc()) + .limit(limit) + .get_results(conn)?; + + let history: Result<Vec<EntityHistoryEntry>> = rows.into_iter() + .map(|(eg_row, cl_row, e_row)| { + Ok(EntityHistoryEntry { + edit: e_row.into_model()?, + editgroup: eg_row.into_model_partial(), + changelog_entry: cl_row.into_model(), + }) + }) + .collect(); + history + } + }; +} + +macro_rules! generic_db_insert_rev { + () => { + fn db_insert_rev(&self, conn: &DbConn) -> Result<Uuid> { + Self::db_insert_revs(conn, &vec![self]).map(|id_list| id_list[0]) + } + } +} + +impl EntityCrud for ContainerEntity { + type EditRow = ContainerEditRow; + type EditNewRow = ContainerEditNewRow; + type IdentRow = ContainerIdentRow; + type IdentNewRow = ContainerIdentNewRow; + type RevRow = ContainerRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(container_ident, container_rev); + generic_db_get_rev!(container_rev); + generic_db_create!(container_ident, container_edit); + generic_db_create_batch!(container_ident, container_edit); + generic_db_update!(container_ident, container_edit); + generic_db_delete!(container_ident, container_edit); + generic_db_get_history!(container_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option<Self::IdentRow>, + ) -> Result<Self> { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + Ok(ContainerEntity { + issnl: rev_row.issnl, + wikidata_qid: rev_row.wikidata_qid, + publisher: rev_row.publisher, + name: rev_row.name, + abbrev: rev_row.abbrev, + coden: rev_row.coden, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + extra: rev_row.extra_json, + editgroup_id: None, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + if let Some(ref extid) = entity.issnl { + check_issn(extid)?; + } + } + + let rev_ids: Vec<Uuid> = insert_into(container_rev::table) + .values( + models + .iter() + .map(|model| ContainerRevNewRow { + name: model.name.clone(), + publisher: model.publisher.clone(), + issnl: model.issnl.clone(), + wikidata_qid: model.wikidata_qid.clone(), + abbrev: model.abbrev.clone(), + coden: model.coden.clone(), + extra_json: model.extra.clone(), + }) + .collect::<Vec<ContainerRevNewRow>>(), + ) + .returning(container_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} + +impl EntityCrud for CreatorEntity { + type EditRow = CreatorEditRow; + type EditNewRow = CreatorEditNewRow; + type IdentRow = CreatorIdentRow; + type IdentNewRow = CreatorIdentNewRow; + type RevRow = CreatorRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(creator_ident, creator_rev); + generic_db_get_rev!(creator_rev); + generic_db_create!(creator_ident, creator_edit); + generic_db_create_batch!(creator_ident, creator_edit); + generic_db_update!(creator_ident, creator_edit); + generic_db_delete!(creator_ident, creator_edit); + generic_db_get_history!(creator_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option<Self::IdentRow>, + ) -> Result<Self> { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + Ok(CreatorEntity { + display_name: rev_row.display_name, + given_name: rev_row.given_name, + surname: rev_row.surname, + orcid: rev_row.orcid, + wikidata_qid: rev_row.wikidata_qid, + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.orcid { + check_orcid(extid)?; + } + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + } + + let rev_ids: Vec<Uuid> = insert_into(creator_rev::table) + .values( + models + .iter() + .map(|model| CreatorRevNewRow { + display_name: model.display_name.clone(), + given_name: model.given_name.clone(), + surname: model.surname.clone(), + orcid: model.orcid.clone(), + wikidata_qid: model.wikidata_qid.clone(), + extra_json: model.extra.clone(), + }) + .collect::<Vec<CreatorRevNewRow>>(), + ) + .returning(creator_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} + +impl EntityCrud for FileEntity { + type EditRow = FileEditRow; + type EditNewRow = FileEditNewRow; + type IdentRow = FileIdentRow; + type IdentNewRow = FileIdentNewRow; + type RevRow = FileRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(file_ident, file_rev); + generic_db_get_rev!(file_rev); + generic_db_create!(file_ident, file_edit); + generic_db_create_batch!(file_ident, file_edit); + generic_db_update!(file_ident, file_edit); + generic_db_delete!(file_ident, file_edit); + generic_db_get_history!(file_edit); + generic_db_insert_rev!(); + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option<Self::IdentRow>, + ) -> Result<Self> { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let releases: Vec<FatCatId> = file_release::table + .filter(file_release::file_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FileReleaseRow| FatCatId::from_uuid(&r.target_release_ident_id)) + .collect(); + + let urls: Vec<FileEntityUrls> = file_rev_url::table + .filter(file_rev_url::file_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map(|r: FileRevUrlRow| FileEntityUrls { + rel: r.rel, + url: r.url, + }) + .collect(); + + Ok(FileEntity { + sha1: rev_row.sha1, + sha256: rev_row.sha256, + md5: rev_row.md5, + size: rev_row.size.map(|v| v as i64), + urls: Some(urls), + mimetype: rev_row.mimetype, + releases: Some(releases.iter().map(|fcid| fcid.to_string()).collect()), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>> { + let rev_ids: Vec<Uuid> = insert_into(file_rev::table) + .values( + models + .iter() + .map(|model| FileRevNewRow { + size: model.size, + sha1: model.sha1.clone(), + sha256: model.sha256.clone(), + md5: model.md5.clone(), + mimetype: model.mimetype.clone(), + extra_json: model.extra.clone(), + }) + .collect::<Vec<FileRevNewRow>>(), + ) + .returning(file_rev::id) + .get_results(conn)?; + + let mut file_release_rows: Vec<FileReleaseRow> = vec![]; + let mut file_url_rows: Vec<FileRevUrlNewRow> = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.releases { + None => (), + Some(release_list) => { + let these_release_rows: Result<Vec<FileReleaseRow>> = release_list + .iter() + .map(|r| { + Ok(FileReleaseRow { + file_rev: rev_id.clone(), + target_release_ident_id: FatCatId::from_str(r)?.to_uuid(), + }) + }) + .collect(); + file_release_rows.extend(these_release_rows?); + } + }; + + match &model.urls { + None => (), + Some(url_list) => { + let these_url_rows: Vec<FileRevUrlNewRow> = url_list + .into_iter() + .map(|u| FileRevUrlNewRow { + file_rev: rev_id.clone(), + rel: u.rel.clone(), + url: u.url.clone(), + }) + .collect(); + file_url_rows.extend(these_url_rows); + } + }; + } + + if !file_release_rows.is_empty() { + // TODO: shouldn't it be "file_rev_release"? + insert_into(file_release::table) + .values(file_release_rows) + .execute(conn)?; + } + + if !file_url_rows.is_empty() { + insert_into(file_rev_url::table) + .values(file_url_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for ReleaseEntity { + type EditRow = ReleaseEditRow; + type EditNewRow = ReleaseEditNewRow; + type IdentRow = ReleaseIdentRow; + type IdentNewRow = ReleaseIdentNewRow; + type RevRow = ReleaseRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(release_ident, release_rev); + generic_db_get_rev!(release_rev); + //generic_db_create!(release_ident, release_edit); + //generic_db_create_batch!(release_ident, release_edit); + generic_db_update!(release_ident, release_edit); + generic_db_delete!(release_ident, release_edit); + generic_db_get_history!(release_edit); + generic_db_insert_rev!(); + + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result<Self::EditRow> { + let mut edits = Self::db_create_batch(conn, edit_context, &vec![self])?; + // probably a more elegant way to destroy the vec and take first element + Ok(edits.pop().unwrap()) + } + + fn db_create_batch( + conn: &DbConn, + edit_context: &EditContext, + models: &[&Self], + ) -> Result<Vec<Self::EditRow>> { + // This isn't the generic implementation because we need to create Work entities for each + // of the release entities passed (at least in the common case) + + // Generate the set of new work entities to insert (usually one for each release, but some + // releases might be pointed to a work already) + let mut new_work_models: Vec<&WorkEntity> = vec![]; + for entity in models { + if entity.work_id.is_none() { + new_work_models.push(&WorkEntity { + ident: None, + revision: None, + redirect: None, + state: None, + editgroup_id: None, + extra: None, + }); + }; + } + + // create the works, then pluck the list of idents from the result + let new_work_edits = + WorkEntity::db_create_batch(conn, edit_context, new_work_models.as_slice())?; + let mut new_work_ids: Vec<Uuid> = new_work_edits.iter().map(|edit| edit.ident_id).collect(); + + // Copy all the release models, and ensure that each has work_id set, using the new work + // idents. There should be one new work ident for each release missing one. + let models_with_work_ids: Vec<Self> = models + .iter() + .map(|model| { + let mut model = (*model).clone(); + if model.work_id.is_none() { + model.work_id = + Some(FatCatId::from_uuid(&new_work_ids.pop().unwrap()).to_string()) + } + model + }) + .collect(); + let model_refs: Vec<&Self> = models_with_work_ids.iter().map(|s| s).collect(); + let models = model_refs.as_slice(); + + // The rest here is copy/pasta from the generic (how to avoid copypasta?) + let rev_ids: Vec<Uuid> = Self::db_insert_revs(conn, models)?; + let ident_ids: Vec<Uuid> = insert_into(release_ident::table) + .values( + rev_ids + .iter() + .map(|rev_id| Self::IdentNewRow { + rev_id: Some(rev_id.clone()), + is_live: edit_context.autoaccept, + redirect_id: None, + }) + .collect::<Vec<Self::IdentNewRow>>(), + ) + .returning(release_ident::id) + .get_results(conn)?; + let edits: Vec<Self::EditRow> = insert_into(release_edit::table) + .values( + rev_ids + .into_iter() + .zip(ident_ids.into_iter()) + .map(|(rev_id, ident_id)| Self::EditNewRow { + editgroup_id: edit_context.editgroup_id.to_uuid(), + rev_id: Some(rev_id), + ident_id: ident_id, + redirect_id: None, + prev_rev: None, + extra_json: edit_context.extra_json.clone(), + }) + .collect::<Vec<Self::EditNewRow>>(), + ) + .get_results(conn)?; + Ok(edits) + } + + fn db_from_row( + conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option<Self::IdentRow>, + ) -> Result<Self> { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + let refs: Vec<ReleaseRef> = release_ref::table + .filter(release_ref::release_rev.eq(rev_row.id)) + .order(release_ref::index_val.asc()) + .get_results(conn)? + .into_iter() + .map(|r: ReleaseRefRow| ReleaseRef { + index: r.index_val, + key: r.key, + extra: r.extra_json, + container_title: r.container_title, + year: r.year, + title: r.title, + locator: r.locator, + target_release_id: r.target_release_ident_id + .map(|v| FatCatId::from_uuid(&v).to_string()), + }) + .collect(); + + let contribs: Vec<ReleaseContrib> = release_contrib::table + .filter(release_contrib::release_rev.eq(rev_row.id)) + .order(( + release_contrib::role.asc(), + release_contrib::index_val.asc(), + )) + .get_results(conn)? + .into_iter() + .map(|c: ReleaseContribRow| ReleaseContrib { + index: c.index_val, + raw_name: c.raw_name, + role: c.role, + extra: c.extra_json, + creator_id: c.creator_ident_id + .map(|v| FatCatId::from_uuid(&v).to_string()), + creator: None, + }) + .collect(); + + let abstracts: Vec<ReleaseEntityAbstracts> = release_rev_abstract::table + .inner_join(abstracts::table) + .filter(release_rev_abstract::release_rev.eq(rev_row.id)) + .get_results(conn)? + .into_iter() + .map( + |r: (ReleaseRevAbstractRow, AbstractsRow)| ReleaseEntityAbstracts { + sha1: Some(r.0.abstract_sha1), + mimetype: r.0.mimetype, + lang: r.0.lang, + content: Some(r.1.content), + }, + ) + .collect(); + + Ok(ReleaseEntity { + title: rev_row.title, + release_type: rev_row.release_type, + release_status: rev_row.release_status, + release_date: rev_row + .release_date + .map(|v| chrono::DateTime::from_utc(v.and_hms(0, 0, 0), chrono::Utc)), + doi: rev_row.doi, + pmid: rev_row.pmid, + pmcid: rev_row.pmcid, + isbn13: rev_row.isbn13, + core_id: rev_row.core_id, + wikidata_qid: rev_row.wikidata_qid, + volume: rev_row.volume, + issue: rev_row.issue, + pages: rev_row.pages, + files: None, + container: None, + container_id: rev_row + .container_ident_id + .map(|u| FatCatId::from_uuid(&u).to_string()), + publisher: rev_row.publisher, + language: rev_row.language, + work_id: Some(FatCatId::from_uuid(&rev_row.work_ident_id).to_string()), + refs: Some(refs), + contribs: Some(contribs), + abstracts: Some(abstracts), + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>> { + // first verify external identifier syntax + for entity in models { + if let Some(ref extid) = entity.doi { + check_doi(extid)?; + } + if let Some(ref extid) = entity.pmid { + check_pmid(extid)?; + } + if let Some(ref extid) = entity.pmcid { + check_pmcid(extid)?; + } + if let Some(ref extid) = entity.wikidata_qid { + check_wikidata_qid(extid)?; + } + } + + let rev_ids: Vec<Uuid> = insert_into(release_rev::table) + .values(models + .iter() + .map(|model| { + Ok(ReleaseRevNewRow { + title: model.title.clone(), + release_type: model.release_type.clone(), + release_status: model.release_status.clone(), + release_date: model.release_date.map(|v| v.naive_utc().date()), + doi: model.doi.clone(), + pmid: model.pmid.clone(), + pmcid: model.pmcid.clone(), + wikidata_qid: model.wikidata_qid.clone(), + isbn13: model.isbn13.clone(), + core_id: model.core_id.clone(), + volume: model.volume.clone(), + issue: model.issue.clone(), + pages: model.pages.clone(), + work_ident_id: match model.work_id.clone() { + None => bail!("release_revs must have a work_id by the time they are inserted; this is an internal soundness error"), + Some(s) => FatCatId::from_str(&s)?.to_uuid(), + }, + container_ident_id: match model.container_id.clone() { + None => None, + Some(s) => Some(FatCatId::from_str(&s)?.to_uuid()), + }, + publisher: model.publisher.clone(), + language: model.language.clone(), + extra_json: model.extra.clone() + }) + }) + .collect::<Result<Vec<ReleaseRevNewRow>>>()?) + .returning(release_rev::id) + .get_results(conn)?; + + let mut release_ref_rows: Vec<ReleaseRefNewRow> = vec![]; + let mut release_contrib_rows: Vec<ReleaseContribNewRow> = vec![]; + let mut abstract_rows: Vec<AbstractsRow> = vec![]; + let mut release_abstract_rows: Vec<ReleaseRevAbstractNewRow> = vec![]; + + for (model, rev_id) in models.iter().zip(rev_ids.iter()) { + match &model.refs { + None => (), + Some(ref_list) => { + let these_ref_rows: Vec<ReleaseRefNewRow> = ref_list + .iter() + .map(|r| { + Ok(ReleaseRefNewRow { + release_rev: rev_id.clone(), + target_release_ident_id: match r.target_release_id.clone() { + None => None, + Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), + }, + index_val: r.index, + key: r.key.clone(), + container_title: r.container_title.clone(), + year: r.year, + title: r.title.clone(), + locator: r.locator.clone(), + extra_json: r.extra.clone(), + }) + }) + .collect::<Result<Vec<ReleaseRefNewRow>>>()?; + release_ref_rows.extend(these_ref_rows); + } + }; + + match &model.contribs { + None => (), + Some(contrib_list) => { + let these_contrib_rows: Vec<ReleaseContribNewRow> = contrib_list + .iter() + .map(|c| { + Ok(ReleaseContribNewRow { + release_rev: rev_id.clone(), + creator_ident_id: match c.creator_id.clone() { + None => None, + Some(v) => Some(FatCatId::from_str(&v)?.to_uuid()), + }, + raw_name: c.raw_name.clone(), + index_val: c.index, + role: c.role.clone(), + extra_json: c.extra.clone(), + }) + }) + .collect::<Result<Vec<ReleaseContribNewRow>>>()?; + release_contrib_rows.extend(these_contrib_rows); + } + }; + + if let Some(abstract_list) = &model.abstracts { + // For rows that specify content, we need to insert the abstract if it doesn't exist + // already + let new_abstracts: Vec<AbstractsRow> = abstract_list + .iter() + .filter(|ea| ea.content.is_some()) + .map(|c| AbstractsRow { + sha1: Sha1::from(c.content.clone().unwrap()).hexdigest(), + content: c.content.clone().unwrap(), + }) + .collect(); + abstract_rows.extend(new_abstracts); + let new_release_abstract_rows: Vec<ReleaseRevAbstractNewRow> = abstract_list + .into_iter() + .map(|c| { + Ok(ReleaseRevAbstractNewRow { + release_rev: rev_id.clone(), + abstract_sha1: match c.content { + Some(ref content) => Sha1::from(content).hexdigest(), + None => match c.sha1.clone() { + Some(v) => v, + None => bail!("either abstract_sha1 or content is required"), + }, + }, + lang: c.lang.clone(), + mimetype: c.mimetype.clone(), + }) + }) + .collect::<Result<Vec<ReleaseRevAbstractNewRow>>>()?; + release_abstract_rows.extend(new_release_abstract_rows); + } + } + + if !release_ref_rows.is_empty() { + insert_into(release_ref::table) + .values(release_ref_rows) + .execute(conn)?; + } + + if !release_contrib_rows.is_empty() { + insert_into(release_contrib::table) + .values(release_contrib_rows) + .execute(conn)?; + } + + if !abstract_rows.is_empty() { + // Sort of an "upsert"; only inserts new abstract rows if they don't already exist + insert_into(abstracts::table) + .values(&abstract_rows) + .on_conflict(abstracts::sha1) + .do_nothing() + .execute(conn)?; + insert_into(release_rev_abstract::table) + .values(release_abstract_rows) + .execute(conn)?; + } + + Ok(rev_ids) + } +} + +impl EntityCrud for WorkEntity { + type EditRow = WorkEditRow; + type EditNewRow = WorkEditNewRow; + type IdentRow = WorkIdentRow; + type IdentNewRow = WorkIdentNewRow; + type RevRow = WorkRevRow; + + generic_parse_editgroup_id!(); + generic_db_get!(work_ident, work_rev); + generic_db_get_rev!(work_rev); + generic_db_create!(work_ident, work_edit); + generic_db_create_batch!(work_ident, work_edit); + generic_db_update!(work_ident, work_edit); + generic_db_delete!(work_ident, work_edit); + generic_db_get_history!(work_edit); + generic_db_insert_rev!(); + + fn db_from_row( + _conn: &DbConn, + rev_row: Self::RevRow, + ident_row: Option<Self::IdentRow>, + ) -> Result<Self> { + let (state, ident_id, redirect_id) = match ident_row { + Some(i) => ( + Some(i.state().unwrap().shortname()), + Some(FatCatId::from_uuid(&i.id).to_string()), + i.redirect_id.map(|u| FatCatId::from_uuid(&u).to_string()), + ), + None => (None, None, None), + }; + + Ok(WorkEntity { + state: state, + ident: ident_id, + revision: Some(rev_row.id.to_string()), + redirect: redirect_id, + editgroup_id: None, + extra: rev_row.extra_json, + }) + } + + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>> { + let rev_ids: Vec<Uuid> = insert_into(work_rev::table) + .values( + models + .iter() + .map(|model| WorkRevNewRow { + extra_json: model.extra.clone(), + }) + .collect::<Vec<WorkRevNewRow>>(), + ) + .returning(work_rev::id) + .get_results(conn)?; + Ok(rev_ids) + } +} diff --git a/rust/src/database_models.rs b/rust/src/database_models.rs index 47e00bcf..93e6a0fe 100644 --- a/rust/src/database_models.rs +++ b/rust/src/database_models.rs @@ -37,7 +37,14 @@ pub trait EntityEditRow { // Helper for constructing tables macro_rules! entity_structs { - ($edit_table:expr, $edit_struct:ident, $ident_table:expr, $ident_struct:ident) => { + ( + $edit_table:expr, + $edit_struct:ident, + $edit_new_struct:ident, + $ident_table:expr, + $ident_struct:ident, + $ident_new_struct:ident + ) => { #[derive(Debug, Queryable, Identifiable, Associations, AsChangeset, QueryableByName)] #[table_name = $edit_table] pub struct $edit_struct { @@ -51,6 +58,17 @@ macro_rules! entity_structs { pub extra_json: Option<serde_json::Value>, } + #[derive(Debug, Associations, AsChangeset, QueryableByName, Insertable)] + #[table_name = $edit_table] + pub struct $edit_new_struct { + pub editgroup_id: Uuid, + pub ident_id: Uuid, + pub rev_id: Option<Uuid>, + pub redirect_id: Option<Uuid>, + pub prev_rev: Option<Uuid>, + pub extra_json: Option<serde_json::Value>, + } + impl EntityEditRow for $edit_struct { /// Go from a row (SQL model) to an API model fn into_model(self) -> Result<EntityEdit> { @@ -75,6 +93,14 @@ macro_rules! entity_structs { pub redirect_id: Option<Uuid>, } + #[derive(Debug, Associations, AsChangeset, Insertable)] + #[table_name = $ident_table] + pub struct $ident_new_struct { + pub is_live: bool, + pub rev_id: Option<Uuid>, + pub redirect_id: Option<Uuid>, + } + impl EntityIdentRow for $ident_struct { fn state(&self) -> Result<EntityState> { if !self.is_live { @@ -104,11 +130,25 @@ pub struct ContainerRevRow { pub coden: Option<String>, } +#[derive(Debug, Associations, AsChangeset, Insertable)] +#[table_name = "container_rev"] +pub struct ContainerRevNewRow { + pub extra_json: Option<serde_json::Value>, + pub name: String, + pub publisher: Option<String>, + pub issnl: Option<String>, + pub wikidata_qid: Option<String>, + pub abbrev: Option<String>, + pub coden: Option<String>, +} + entity_structs!( "container_edit", ContainerEditRow, + ContainerEditNewRow, "container_ident", - ContainerIdentRow + ContainerIdentRow, + ContainerIdentNewRow ); #[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)] @@ -123,11 +163,24 @@ pub struct CreatorRevRow { pub wikidata_qid: Option<String>, } +#[derive(Debug, Associations, AsChangeset, Insertable)] +#[table_name = "creator_rev"] +pub struct CreatorRevNewRow { + pub extra_json: Option<serde_json::Value>, + pub display_name: String, + pub given_name: Option<String>, + pub surname: Option<String>, + pub orcid: Option<String>, + pub wikidata_qid: Option<String>, +} + entity_structs!( "creator_edit", CreatorEditRow, + CreatorEditNewRow, "creator_ident", - CreatorIdentRow + CreatorIdentRow, + CreatorIdentNewRow ); #[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)] @@ -159,7 +212,25 @@ pub struct FileRevRow { pub mimetype: Option<String>, } -entity_structs!("file_edit", FileEditRow, "file_ident", FileIdentRow); +#[derive(Debug, Associations, AsChangeset, Insertable)] +#[table_name = "file_rev"] +pub struct FileRevNewRow { + pub extra_json: Option<serde_json::Value>, + pub size: Option<i64>, + pub sha1: Option<String>, + pub sha256: Option<String>, + pub md5: Option<String>, + pub mimetype: Option<String>, +} + +entity_structs!( + "file_edit", + FileEditRow, + FileEditNewRow, + "file_ident", + FileIdentRow, + FileIdentNewRow +); #[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)] #[table_name = "release_rev"] @@ -185,11 +256,36 @@ pub struct ReleaseRevRow { pub language: Option<String>, } +#[derive(Debug, Associations, AsChangeset, Insertable)] +#[table_name = "release_rev"] +pub struct ReleaseRevNewRow { + pub extra_json: Option<serde_json::Value>, + pub work_ident_id: Uuid, + pub container_ident_id: Option<Uuid>, + pub title: String, + pub release_type: Option<String>, + pub release_status: Option<String>, + pub release_date: Option<chrono::NaiveDate>, + pub doi: Option<String>, + pub pmid: Option<String>, + pub pmcid: Option<String>, + pub wikidata_qid: Option<String>, + pub isbn13: Option<String>, + pub core_id: Option<String>, + pub volume: Option<String>, + pub issue: Option<String>, + pub pages: Option<String>, + pub publisher: Option<String>, + pub language: Option<String>, +} + entity_structs!( "release_edit", ReleaseEditRow, + ReleaseEditNewRow, "release_ident", - ReleaseIdentRow + ReleaseIdentRow, + ReleaseIdentNewRow ); #[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)] @@ -199,7 +295,20 @@ pub struct WorkRevRow { pub extra_json: Option<serde_json::Value>, } -entity_structs!("work_edit", WorkEditRow, "work_ident", WorkIdentRow); +#[derive(Debug, Associations, AsChangeset, Insertable)] +#[table_name = "work_rev"] +pub struct WorkRevNewRow { + pub extra_json: Option<serde_json::Value>, +} + +entity_structs!( + "work_edit", + WorkEditRow, + WorkEditNewRow, + "work_ident", + WorkIdentRow, + WorkIdentNewRow +); #[derive(Debug, Queryable, Identifiable, Associations, AsChangeset)] #[table_name = "release_rev_abstract"] diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 356084a5..2236d602 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -25,6 +25,7 @@ extern crate sha1; pub mod api_helpers; pub mod api_server; pub mod api_wrappers; +pub mod database_entity_crud; pub mod database_models; pub mod database_schema; @@ -47,6 +48,10 @@ mod errors { description("external identifier doesn't match required pattern") display("external identifier doesn't match required pattern") } + EditgroupAlreadyAccepted(id: String) { + description("editgroup was already accepted") + display("attempted to accept an editgroup which was already accepted: {}", id) + } } } } diff --git a/rust/tests/test_api_server.rs b/rust/tests/test_api_server.rs index 02c77413..54639228 100644 --- a/rust/tests/test_api_server.rs +++ b/rust/tests/test_api_server.rs @@ -469,6 +469,71 @@ fn test_post_work() { } #[test] +fn test_update_work() { + let (headers, router, conn) = setup(); + + check_response( + request::post( + "http://localhost:9411/v0/work", + headers.clone(), + r#"{ + "extra": { "source": "other speculation" } + }"#, + &router, + ), + status::Created, + None, + ); + + let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001").unwrap(); + let editgroup_id = get_or_create_editgroup(editor_id, &conn).unwrap(); + check_response( + request::post( + &format!( + "http://localhost:9411/v0/editgroup/{}/accept", + uuid2fcid(&editgroup_id) + ), + headers.clone(), + "", + &router, + ), + status::Ok, + None, + ); +} + +#[test] +fn test_delete_work() { + let (headers, router, conn) = setup(); + + check_response( + request::delete( + "http://localhost:9411/v0/work/aaaaaaaaaaaaavkvaaaaaaaaai", + headers.clone(), + &router, + ), + status::Ok, + None, + ); + + let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001").unwrap(); + let editgroup_id = get_or_create_editgroup(editor_id, &conn).unwrap(); + check_response( + request::post( + &format!( + "http://localhost:9411/v0/editgroup/{}/accept", + uuid2fcid(&editgroup_id) + ), + headers.clone(), + "", + &router, + ), + status::Ok, + None, + ); +} + +#[test] fn test_accept_editgroup() { let (headers, router, conn) = setup(); @@ -918,3 +983,44 @@ fn test_contribs() { None, ); } + +#[test] +fn test_post_batch_autoaccept() { + let (headers, router, _conn) = setup(); + + // "true" + check_response( + request::post( + "http://localhost:9411/v0/container/batch?autoaccept=true", + headers.clone(), + r#"[{"name": "test journal"}, {"name": "another test journal"}]"#, + &router, + ), + status::Created, + None, + ); + + // "n" + check_response( + request::post( + "http://localhost:9411/v0/container/batch?autoaccept=n", + headers.clone(), + r#"[{"name": "test journal"}, {"name": "another test journal"}]"#, + &router, + ), + status::Created, + None, + ); + + // editgroup + check_response( + request::post( + "http://localhost:9411/v0/container/batch?autoaccept=yes&editgroup=asdf", + headers.clone(), + r#"[{"name": "test journal"}, {"name": "another test journal"}]"#, + &router, + ), + status::BadRequest, + None, + ); +} |