diff options
author | Bryan Newbold <bnewbold@robocracy.org> | 2018-09-10 19:37:36 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@robocracy.org> | 2018-09-10 19:37:36 -0700 |
commit | dd0dbc45a0c0aad0819203431d374cbd93cce58f (patch) | |
tree | f9a8fb6e8eea82d54f561fb89472d16a4f287505 | |
parent | 1aa49b2c613178f17a2cb6e85a22f183a1c81947 (diff) | |
parent | 60b070103e80a83e062a57cefd0ba0a84fc3a4c0 (diff) | |
download | fatcat-cockroach.tar.gz fatcat-cockroach.zip |
Merge branch 'master' into cockroachx-attic-cockroachcockroach
Manually resolve conflicts in:
rust/migrations/2018-05-12-001226_init/up.sql
-rw-r--r-- | TODO | 7 | ||||
-rw-r--r-- | fatcat-openapi2.yml | 20 | ||||
-rw-r--r-- | notes/import_timing_20180908.txt | 249 | ||||
-rw-r--r-- | python/fatcat/importer_common.py | 2 | ||||
-rw-r--r-- | python/tests/files/0000-0003-3953-765X.json | 1 | ||||
-rw-r--r-- | python/tests/importer.py | 1 | ||||
-rw-r--r-- | python/tests/orcid.py | 6 | ||||
-rw-r--r-- | rust/HACKING.md | 4 | ||||
-rw-r--r-- | rust/TODO | 10 | ||||
-rw-r--r-- | rust/fatcat-api/README.md | 2 | ||||
-rw-r--r-- | rust/fatcat-api/api.yaml | 20 | ||||
-rw-r--r-- | rust/fatcat-api/api/swagger.yaml | 54 | ||||
-rw-r--r-- | rust/fatcat-api/src/client.rs | 26 | ||||
-rw-r--r-- | rust/fatcat-api/src/lib.rs | 28 | ||||
-rw-r--r-- | rust/fatcat-api/src/mimetypes.rs | 20 | ||||
-rw-r--r-- | rust/fatcat-api/src/server.rs | 44 | ||||
-rw-r--r-- | rust/migrations/2018-05-12-001226_init/up.sql | 28 | ||||
-rw-r--r-- | rust/src/api_entity_crud.rs (renamed from rust/src/database_entity_crud.rs) | 92 | ||||
-rw-r--r-- | rust/src/api_helpers.rs | 58 | ||||
-rw-r--r-- | rust/src/api_server.rs | 309 | ||||
-rw-r--r-- | rust/src/api_wrappers.rs | 206 | ||||
-rw-r--r-- | rust/src/lib.rs | 2 |
22 files changed, 634 insertions, 555 deletions
@@ -76,6 +76,13 @@ name ref: https://www.w3.org/International/questions/qa-personal-names - hydrate entities in API ? "expand" query param +- don't include abstracts by default? +- "stub" mode for lookups, returning only the ident (or maybe whole row)? + +## Database + +- test using hash indexes for some UUID column indexes, or at least sha1 and + other hashes (abstracts, file lookups) ## Other diff --git a/fatcat-openapi2.yml b/fatcat-openapi2.yml index a8919216..2b0615d2 100644 --- a/fatcat-openapi2.yml +++ b/fatcat-openapi2.yml @@ -671,7 +671,7 @@ paths: operationId: "get_creator_releases" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -939,7 +939,7 @@ paths: operationId: "get_release_files" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -1081,7 +1081,7 @@ paths: operationId: "get_work_releases" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -1097,9 +1097,13 @@ paths: operationId: "get_editor" responses: 200: - description: Found Editor + description: Found schema: $ref: "#/definitions/editor" + 400: + description: Bad Request + schema: + $ref: "#/definitions/error_response" 404: description: Not Found schema: @@ -1118,11 +1122,15 @@ paths: operationId: "get_editor_changelog" responses: 200: - description: Found Merged Changes + description: Found schema: type: array items: $ref: "#/definitions/changelog_entry" + 400: + description: Bad Request + schema: + $ref: "#/definitions/error_response" 404: description: Not Found schema: @@ -1163,7 +1171,7 @@ paths: operationId: "get_editgroup" responses: 200: - description: Found Entity + description: Found schema: $ref: "#/definitions/editgroup" 400: diff --git a/notes/import_timing_20180908.txt b/notes/import_timing_20180908.txt new file mode 100644 index 00000000..3091e4fa --- /dev/null +++ b/notes/import_timing_20180908.txt @@ -0,0 +1,249 @@ + +Changes since last time: +- auto-accept flag (potentially no need to UPDATE/vacuum database) +- large CRUD code refactor (but not complete removal of api_server CRUD yet) +- removed "single query insert per entity", but added row-based inserts + +## Laptop Rough Timing + +HTTP-VERB branch 7354899493f6448bed5698ad6ade1dbebcf39379: +=> note: in this branch, importer is sitll creating editgroups in separate request + + time ./fatcat_import.py import-issn /home/bnewbold/code/oa-journals-analysis/upload-2018-04-05/journal_extra_metadata.csv + real 0m28.779s + user 0m5.716s + sys 0m0.208s + + cat /data/crossref/crossref-works.2018-01-21.badsample_100k.json | time parallel -j4 --round-robin --pipe ./fatcat_import.py import-crossref - /data/issn/20180216.ISSN-to-ISSN-L.txt + + 89.83user 4.58system 1:46.67elapsed 88%CPU (0avgtext+0avgdata 386184maxresident)k + 78632inputs+385408outputs (1major+579693minor)pagefaults 0swaps + # 1:46.67 elapsed + + # running again + time ./fatcat_import.py import-issn /home/bnewbold/code/oa-journals-analysis/upload-2018-04-05/journal_extra_metadata.csv + real 0m29.714s + user 0m6.048s + sys 0m0.212s + + cat /data/crossref/crossref-works.2018-01-21.badsample_100k.json | time parallel -j4 --round-robin --pipe ./fatcat_import.py import-crossref - /data/issn/20180216.ISSN-to-ISSN-L.txt + 89.07user 4.52system 1:48.11elapsed 86%CPU (0avgtext+0avgdata 385940maxresident)k + 0inputs+377456outputs (0major+583532minor)pagefaults 0swaps + # 1:48.11 elapsed + +MASTER branch 0053d133f8ff96aa4dedc1ff7e2754812ddfc79a + + time ./fatcat_import.py import-issn /home/bnewbold/code/oa-journals-analysis/upload-2018-04-05/journal_extra_metadata.csv + real 1m8.061s + user 0m7.384s + sys 0m0.280s + + cat /data/crossref/crossref-works.2018-01-21.badsample_100k.json | time parallel -j4 --round-robin --pipe ./fatcat_import.py import-crossref - /data/issn/20180216.ISSN-to-ISSN-L.txt + 96.68user 5.25system 3:23.05elapsed 50%CPU (0avgtext+0avgdata 385516maxresident)k + 0inputs+389168outputs (0major+586810minor)pagefaults 0swaps + # 3:23.05 elapsed + +AUTO-ACCEPT branch 8cccbcdef11e7ddc761ec494cb894a8d49a0d510 + + time ./fatcat_import.py import-issn /home/bnewbold/code/oa-journals-analysis/upload-2018-04-05/journal_extra_metadata.csv + real 1m7.757s + user 0m6.240s + sys 0m0.228s + + cat /data/crossref/crossref-works.2018-01-21.badsample_100k.json | time parallel -j4 --round-robin --pipe ./fatcat_import.py import-crossref - /data/issn/20180216.ISSN-to-ISSN-L.txt + 91.73user 4.65system 3:26.61elapsed 46%CPU (0avgtext+0avgdata 385512maxresident)k + 0inputs+384976outputs (0major+580544minor)pagefaults 0swaps + +So, annecdotally, seems like autoaccept didn't do much here (not +vacuum-limited?), but the row inserts were more than a factor of 2x performance +improvement. Great! Could try experimenting with even larger batch sizes... + + +## Production Import + +http-verb branch 7354899493f6448bed5698ad6ade1dbebcf39379 + + time ./fatcat_import.py import-issn /srv/datasets/journal_extra_metadata.csv + + real 0m30.845s + user 0m8.884s + sys 0m0.312s + + + time parallel --bar --pipepart -j8 -a /srv/datasets/public_profiles_1_2_json.all.json ./fatcat_import.py import-orcid - + # TPS: 1181 + real 6m54.019s => down from 22m! + user 25m33.980s + sys 1m30.480s + + xzcat /srv/datasets/crossref-works.2018-01-21.json.xz | time parallel -j20 --round-robin --pipe ./fatcat_import.py import-crossref - /srv/datasets/20180216.ISSN-to-ISSN-L.txt + # fatcatd at 200% CPU (2 full cores); many PIDs/workers, but only one so busy (must be diesel/db driver?) + # parallel at ~80% + # postgres pretty busy; looks like doing PARSE on ever request? some idle in transaction + # postgres still does a vacuum to analyze; good! + # ~600-1000 TPS near start (large variance) + # left to run overnight... + # slowed down to ~50-80 TPS about 10 hours later + # lots of IOWAIT + # only at 40309165 rows in the morning; this could take a long time + + # PostgreSQL 10.4 - wbgrp-svc500.us.archive.org - postgres@localhost:5432/postgres - Ref.: 2s + # Size: 153.71G - 1.90M/s | TPS: 77 + # Mem.: 68.70% - 22.60G/49.14G | IO Max: 20448/s + # Swap: 3.80% - 1.88G/50.00G | Read : 10.17M/s - 2603/s + # Load: 11.71 11.80 12.01 | Write: 4.00M/s - 1024/s + + 92530.17user 2891.76system 35:45:15elapsed 74%CPU (0avgtext+0avgdata 463520maxresident)k + 1093736inputs+302588448outputs (52568major+36405225minor)pagefaults 0swaps + # 35:45:15 elapsed + + time ./fatcat_import.py import-manifest /srv/datasets/idents_files_urls.sqlite + +## Perf Tweaks + + + + SELECT + relname, + seq_scan - idx_scan AS too_much_seq, + CASE + WHEN + seq_scan - coalesce(idx_scan, 0) > 0 + THEN + 'Missing Index?' + ELSE + 'OK' + END, + pg_relation_size(relname::regclass) AS rel_size, seq_scan, idx_scan + FROM + pg_stat_all_tables + WHERE + schemaname = 'public' + AND pg_relation_size(relname::regclass) > 80000 + ORDER BY + too_much_seq DESC; + + relname | too_much_seq | case | rel_size | seq_scan | idx_scan +----------------------+--------------+------+--------------+----------+----------- + file_rev_url | -1 | OK | 163323904 | 2 | 3 + file_release | -3 | OK | 24461312 | 2 | 5 + release_edit | -10265 | OK | 6080495616 | 2 | 10267 + container_edit | -10265 | OK | 31170560 | 2 | 10267 + work_edit | -10265 | OK | 6080364544 | 2 | 10267 + file_edit | -10265 | OK | 49111040 | 2 | 10267 + creator_edit | -10265 | OK | 330924032 | 2 | 10267 + changelog | -11692 | OK | 106668032 | 2 | 11694 + release_ref | -374197 | OK | 125437673472 | 3 | 374200 + release_contrib | -374202 | OK | 16835354624 | 3 | 374205 + release_rev_abstract | -374919 | OK | 162250752 | 3 | 374922 + file_ident | -1047172 | OK | 41926656 | 2 | 1047174 + container_rev | -1378943 | OK | 50356224 | 724612 | 2103555 <= + file_rev | -2407647 | OK | 68493312 | 4 | 2407651 + abstracts | -2765610 | OK | 1450901504 | 1 | 2765611 + creator_ident | -7127467 | OK | 242688000 | 2 | 7127469 + creator_rev | -7943007 | OK | 353370112 | 839155 | 8782162 <= + container_ident | -57488245 | OK | 23060480 | 2 | 57488247 + release_ident | -66085389 | OK | 4459159552 | 14 | 66085403 + work_rev | -130613391 | OK | 2892775424 | 1 | 130613392 + work_ident | -130633923 | OK | 4459192320 | 2 | 130633925 + editgroup | -136775970 | OK | 120561664 | 1 | 136775971 + release_rev | -718337215 | OK | 36850507776 | 8 | 718337223 + + +Slowest queries: + + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = $1 AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = $2 AND "creator_ident"."redirect_id" IS NULL LIMIT $3 + + SELECT "container_ident"."id", "container_ident"."is_live", "container_ident"."rev_id", "container_ident"."redirect_id", "container_rev"."id", "container_rev"."extra_json", "container_rev"."name", "container_rev"."publisher", "container_rev"."issnl", "container_rev"."wikidata_qid", "container_rev"."abbrev", "container_rev"."coden" FROM ("container_ident" INNER JOIN "container_rev" ON "container_ident"."rev_id" = "container_rev"."id") WHERE "container_rev"."issnl" = $1 AND "container_rev"."issnl" IS NOT NULL AND "container_ident"."is_live" = $2 AND "container_ident"."redirect_id" IS NULL LIMIT $3 + + SELECT $2 FROM ONLY "public"."release_rev" x WHERE "id" OPERATOR(pg_catalog.=) $1 FOR KEY SHARE OF x:...skipping... + + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = $1 AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = $2 AND "creator_ident"."redirect_id" IS NULL LIMIT $3 + + +DEBUG: (while a file import is running) + +Creator lookup where row exists: + + time curl localhost:9411/v0/creator/lookup?orcid=0000-0002-8867-1663 + real 0m0.020s + user 0m0.004s + sys 0m0.004s + + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = '0000-0002-8867-1663' AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = 't' AND "creator_ident"."redirect_id" IS NULL LIMIT 1; + => (1 row) + Time: 0.988 ms + +Creator lookup where row doesn't exist: + + bnewbold@wbgrp-svc500$ time curl localhost:9411/v0/creator/lookup?orcid=0000-0002-8867-166X + {"message":"Not found: 0000-0002-8867-166X"} + real 0m1.282s + user 0m0.008s + sys 0m0.000s + Sep 10 21:50:49 wbgrp-svc500.us.archive.org fatcat-api[25327]: Sep 10 21:50:49.231 INFO GET http://localhost:9411/v0/creator/lookup?orcid=0000-0002-8867-166X 404 Not Found (1282 ms) + + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = '0000-0002-8867-166X' AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = 't' AND "creator_ident"."redirect_id" IS NULL LIMIT 1; + => (0 rows) + Time: 0.810 ms + +------- + + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = '0000-0002-8867-1663' AND "creator_ident"."is_live" = 't' AND "creator_ident"."redirect_id" IS NULL LIMIT 1; + + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = '0000-0002-8867-1663' AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = 't' AND "creator_ident"."redirect_id" IS NULL LIMIT 1; + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = '0000-0002-8867-166X' AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = 't' AND "creator_ident"."redirect_id" IS NULL LIMIT 1; + +from logs: + + execute __diesel_stmt_5: SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = $1 AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = $2 AND "creator_ident"."redirect_id" IS NULL LIMIT $3 + +NOTE: have been doing *full* postgres logs this whole time! probably a ton of disk churn. :( + + + exact logs: + SET TIME ZONE 'UTC' + SET CLIENT_ENCODING TO 'UTF8' + SELECT 1 + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = $1 AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = $2 AND "creator_ident"."redirect_id" IS NULL LIMIT $3 + parameters: $1 = '0000-0002-8867-166X', $2 = 't', $3 = '1' + + SELECT "creator_ident"."id", "creator_ident"."is_live", "creator_ident"."rev_id", "creator_ident"."redirect_id", "creator_rev"."id", "creator_rev"."extra_json", "creator_rev"."display_name", "creator_rev"."given_name", "creator_rev"."surname", "creator_rev"."orcid", "creator_rev"."wikidata_qid" FROM ("creator_ident" INNER JOIN "creator_rev" ON "creator_ident"."rev_id" = "creator_rev"."id") WHERE "creator_rev"."orcid" = '0000-0002-8867-166X' AND "creator_rev"."orcid" IS NOT NULL AND "creator_ident"."is_live" = 't' AND "creator_ident"."redirect_id" IS NULL LIMIT '1'; + +CHANGES: +- restart fatcat-api +- restart postgresql +- select pg_stat_reset(); + +seems like the fetch involves a index scans on creator_rev and creator_ident, *and* a scan on creator_rev. + + + table_name | table_size | indexes_size | total_size +--------------------------------------------------------------+------------+--------------+------------ + "public"."release_ref" | 117 GB | 34 GB | 151 GB + "public"."release_rev" | 34 GB | 9202 MB | 43 GB + "public"."release_contrib" | 16 GB | 17 GB | 33 GB + "public"."work_edit" | 5800 MB | 4033 MB | 9833 MB + "public"."release_edit" | 5800 MB | 4032 MB | 9833 MB + "public"."work_ident" | 4254 MB | 5102 MB | 9356 MB + "public"."release_ident" | 4254 MB | 5095 MB | 9349 MB + "public"."work_rev" | 2759 MB | 2545 MB | 5304 MB + "public"."abstracts" | 1417 MB | 114 MB | 1530 MB + "public"."creator_rev" | 337 MB | 277 MB | 614 MB + "public"."creator_edit" | 316 MB | 221 MB | 536 MB + "public"."creator_ident" | 232 MB | 279 MB | 511 MB + "public"."file_rev_url" | 260 MB | 101 MB | 361 MB + "public"."release_rev_abstract" | 155 MB | 200 MB | 355 MB + "public"."file_rev" | 109 MB | 124 MB | 233 MB + "public"."changelog" | 102 MB | 106 MB | 208 MB + "public"."editgroup" | 116 MB | 69 MB | 184 MB + "public"."file_ident" | 66 MB | 70 MB | 136 MB + "public"."file_edit" | 78 MB | 53 MB | 131 MB + "public"."file_release" | 38 MB | 64 MB | 103 MB + "public"."container_rev" | 48 MB | 26 MB | 74 MB + "public"."container_edit" | 30 MB | 20 MB | 50 MB + "public"."container_ident" | 22 MB | 26 MB | 48 MB + +CHANGES: +- remove "IS NOT NULL" from creator_rev, that seemed to be a significant speedup for the "row not found" case. + diff --git a/python/fatcat/importer_common.py b/python/fatcat/importer_common.py index 0b02d175..74a57ac1 100644 --- a/python/fatcat/importer_common.py +++ b/python/fatcat/importer_common.py @@ -23,7 +23,7 @@ class FatcatImporter: self._orcid_id_map = dict() self._doi_id_map = dict() self._issn_issnl_map = None - self._orcid_regex = re.compile("^\\d{4}-\\d{4}-\\d{4}-\\d{4}$") + self._orcid_regex = re.compile("^\\d{4}-\\d{4}-\\d{4}-\\d{3}[\\dX]$") if issn_map_file: self.read_issn_map_file(issn_map_file) diff --git a/python/tests/files/0000-0003-3953-765X.json b/python/tests/files/0000-0003-3953-765X.json new file mode 100644 index 00000000..9699d1bf --- /dev/null +++ b/python/tests/files/0000-0003-3953-765X.json @@ -0,0 +1 @@ +{"orcid-identifier":{"uri":"http://orcid.org/0000-0003-3953-765X","path":"0000-0003-3953-765X","host":"orcid.org"},"preferences":{"locale":"en"},"history":{"creation-method":"Member-referred","completion-date":null,"submission-date":{"value":1407501041999},"last-modified-date":{"value":1465949566770},"claimed":true,"source":null,"deactivation-date":null,"verified-email":true,"verified-primary-email":true},"person":{"last-modified-date":null,"name":{"created-date":{"value":1460755375159},"last-modified-date":{"value":1460755375159},"given-names":{"value":"Man-Hui"},"family-name":{"value":"Li"},"credit-name":null,"source":null,"visibility":"public","path":"0000-0003-3953-765X"},"other-names":{"last-modified-date":null,"other-name":null,"path":"/0000-0003-3953-765X/other-names"},"biography":{"created-date":{"value":1460755375161},"last-modified-date":{"value":1460755375161},"content":null,"visibility":"public","path":"/0000-0003-3953-765X/biography"},"researcher-urls":{"last-modified-date":null,"researcher-url":null,"path":"/0000-0003-3953-765X/researcher-urls"},"emails":{"last-modified-date":null,"email":null,"path":"/0000-0003-3953-765X/email"},"addresses":{"last-modified-date":null,"address":null,"path":"/0000-0003-3953-765X/address"},"keywords":{"last-modified-date":null,"keyword":null,"path":"/0000-0003-3953-765X/keywords"},"external-identifiers":{"last-modified-date":null,"external-identifier":null,"path":"/0000-0003-3953-765X/external-identifiers"},"path":"/0000-0003-3953-765X/person"},"activities-summary":{"last-modified-date":null,"educations":{"last-modified-date":null,"education-summary":null,"path":"/0000-0003-3953-765X/educations"},"employments":{"last-modified-date":null,"employment-summary":null,"path":"/0000-0003-3953-765X/employments"},"fundings":{"last-modified-date":null,"group":null,"path":"/0000-0003-3953-765X/fundings"},"peer-reviews":{"last-modified-date":null,"group":null,"path":"/0000-0003-3953-765X/peer-reviews"},"works":{"last-modified-date":null,"group":null,"path":"/0000-0003-3953-765X/works"},"path":"/0000-0003-3953-765X/activities"},"path":"/0000-0003-3953-765X"} diff --git a/python/tests/importer.py b/python/tests/importer.py index 4d49e794..22af37ed 100644 --- a/python/tests/importer.py +++ b/python/tests/importer.py @@ -29,6 +29,7 @@ def test_identifiers(): assert fi.is_doi("10.1234_56789") == False assert fi.is_orcid("0000-0003-3118-6591") == True + assert fi.is_orcid("0000-0003-3953-765X") == True assert fi.is_orcid("0000-00x3-3118-659") == False assert fi.is_orcid("0000-00033118-659") == False assert fi.is_orcid("0000-0003-3118-659.") == False diff --git a/python/tests/orcid.py b/python/tests/orcid.py index e07583ac..ae3d0d0b 100644 --- a/python/tests/orcid.py +++ b/python/tests/orcid.py @@ -21,6 +21,12 @@ def test_orcid_importer(orcid_importer): with open('tests/files/0000-0001-8254-7103.json', 'r') as f: orcid_importer.process_source(f) +def test_orcid_importer_x(orcid_importer): + with open('tests/files/0000-0003-3953-765X.json', 'r') as f: + orcid_importer.process_source(f) + c = orcid_importer.api.lookup_creator(orcid="0000-0003-3953-765X") + assert c is not None + def test_orcid_dict_parse(orcid_importer): with open('tests/files/0000-0001-8254-7103.json', 'r') as f: raw = json.loads(f.readline()) diff --git a/rust/HACKING.md b/rust/HACKING.md index 622a4b5a..0dde5058 100644 --- a/rust/HACKING.md +++ b/rust/HACKING.md @@ -8,11 +8,11 @@ swagger API spec on one side and the SQL schema on the other. - `./src/database_schema.rs`: autogenerated per-table Diesel schemas - `./src/database_models.rs`: hand- and macro-generated Rust structs matching Diesel schemas, and a small number of row-level helpers -- `./src/database_entity_crud.rs`: "struct-relational-mapping"; trait +- `./src/api_entity_crud.rs`: "struct-relational-mapping"; trait implementations of CRUD (create, read, update, delete) actions for each entity model, hitting the database (and building on `database_model` structs) - `./src/api_server.rs`: one function for each API endpoint, with rust-style - arguments and return types. mostly calls in to `database_entity_crud`. + arguments and return types. mostly calls in to `api_entity_crud`. - `./src/api_wrappers.rs`: hand- and macro-generated wrapper functions, one per API endpoint, that map between API request and return types, and rust-idiomatic request and return types (plus API models). @@ -1,16 +1,10 @@ -finish refactor: -- database_entity_crud -> api_entity_crud -x merge autoaccept branch in with http-verbs branch -- direct CRUD calls from api_wrappers (except, maybe, batch?) - => generally, standardize "edit" actions -- FatCatId and edit context between wrappers and handlers -- review editgroup accept code - verbs: - enforce "previous_rev" required in updates +- review editgroup accept code (?) - fatcat_api -> fatcat_api_schema (or spec? models? types?) +- generally, standardize "edit" actions - fatcat -> fatcat-api-server - editgroup param to update => also for creation? for consistency diff --git a/rust/fatcat-api/README.md b/rust/fatcat-api/README.md index c971b88c..1b566766 100644 --- a/rust/fatcat-api/README.md +++ b/rust/fatcat-api/README.md @@ -13,7 +13,7 @@ To see how to make this your own, look here: [README](https://github.com/swagger-api/swagger-codegen/blob/master/README.md) - API version: 0.1.0 -- Build date: 2018-09-08T04:52:59.479Z +- Build date: 2018-09-11T02:27:08.863Z This autogenerated project defines an API crate `fatcat` which contains: * An `Api` trait defining the API in Rust. diff --git a/rust/fatcat-api/api.yaml b/rust/fatcat-api/api.yaml index a8919216..2b0615d2 100644 --- a/rust/fatcat-api/api.yaml +++ b/rust/fatcat-api/api.yaml @@ -671,7 +671,7 @@ paths: operationId: "get_creator_releases" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -939,7 +939,7 @@ paths: operationId: "get_release_files" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -1081,7 +1081,7 @@ paths: operationId: "get_work_releases" responses: 200: - description: Found Entity + description: Found schema: type: array items: @@ -1097,9 +1097,13 @@ paths: operationId: "get_editor" responses: 200: - description: Found Editor + description: Found schema: $ref: "#/definitions/editor" + 400: + description: Bad Request + schema: + $ref: "#/definitions/error_response" 404: description: Not Found schema: @@ -1118,11 +1122,15 @@ paths: operationId: "get_editor_changelog" responses: 200: - description: Found Merged Changes + description: Found schema: type: array items: $ref: "#/definitions/changelog_entry" + 400: + description: Bad Request + schema: + $ref: "#/definitions/error_response" 404: description: Not Found schema: @@ -1163,7 +1171,7 @@ paths: operationId: "get_editgroup" responses: 200: - description: Found Entity + description: Found schema: $ref: "#/definitions/editgroup" 400: diff --git a/rust/fatcat-api/api/swagger.yaml b/rust/fatcat-api/api/swagger.yaml index 0b1ca88a..9bc84351 100644 --- a/rust/fatcat-api/api/swagger.yaml +++ b/rust/fatcat-api/api/swagger.yaml @@ -834,13 +834,13 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Entity" + description: "Found" schema: type: "array" items: $ref: "#/definitions/release_entity" - x-responseId: "FoundEntity" - x-uppercaseResponseId: "FOUND_ENTITY" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_CREATOR_RELEASES" uppercase_data_type: "VEC<RELEASEENTITY>" producesJson: true @@ -1749,13 +1749,13 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Entity" + description: "Found" schema: type: "array" items: $ref: "#/definitions/file_entity" - x-responseId: "FoundEntity" - x-uppercaseResponseId: "FOUND_ENTITY" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_RELEASE_FILES" uppercase_data_type: "VEC<FILEENTITY>" producesJson: true @@ -2232,13 +2232,13 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Entity" + description: "Found" schema: type: "array" items: $ref: "#/definitions/release_entity" - x-responseId: "FoundEntity" - x-uppercaseResponseId: "FOUND_ENTITY" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_WORK_RELEASES" uppercase_data_type: "VEC<RELEASEENTITY>" producesJson: true @@ -2286,14 +2286,23 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Editor" + description: "Found" schema: $ref: "#/definitions/editor" - x-responseId: "FoundEditor" - x-uppercaseResponseId: "FOUND_EDITOR" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_EDITOR" uppercase_data_type: "EDITOR" producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "GET_EDITOR" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true 404: description: "Not Found" schema: @@ -2329,16 +2338,25 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Merged Changes" + description: "Found" schema: type: "array" items: $ref: "#/definitions/changelog_entry" - x-responseId: "FoundMergedChanges" - x-uppercaseResponseId: "FOUND_MERGED_CHANGES" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_EDITOR_CHANGELOG" uppercase_data_type: "VEC<CHANGELOGENTRY>" producesJson: true + 400: + description: "Bad Request" + schema: + $ref: "#/definitions/error_response" + x-responseId: "BadRequest" + x-uppercaseResponseId: "BAD_REQUEST" + uppercase_operation_id: "GET_EDITOR_CHANGELOG" + uppercase_data_type: "ERRORRESPONSE" + producesJson: true 404: description: "Not Found" schema: @@ -2428,11 +2446,11 @@ paths: example: "\"id_example\".to_string()" responses: 200: - description: "Found Entity" + description: "Found" schema: $ref: "#/definitions/editgroup" - x-responseId: "FoundEntity" - x-uppercaseResponseId: "FOUND_ENTITY" + x-responseId: "Found" + x-uppercaseResponseId: "FOUND" uppercase_operation_id: "GET_EDITGROUP" uppercase_data_type: "EDITGROUP" producesJson: true diff --git a/rust/fatcat-api/src/client.rs b/rust/fatcat-api/src/client.rs index 6f61f773..a08e3cfe 100644 --- a/rust/fatcat-api/src/client.rs +++ b/rust/fatcat-api/src/client.rs @@ -1749,7 +1749,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::<Vec<models::ReleaseEntity>>(&buf)?; - Ok(GetCreatorReleasesResponse::FoundEntity(body)) + Ok(GetCreatorReleasesResponse::Found(body)) } 400 => { let mut buf = String::new(); @@ -1809,7 +1809,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::<models::Editgroup>(&buf)?; - Ok(GetEditgroupResponse::FoundEntity(body)) + Ok(GetEditgroupResponse::Found(body)) } 400 => { let mut buf = String::new(); @@ -1869,7 +1869,14 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::<models::Editor>(&buf)?; - Ok(GetEditorResponse::FoundEditor(body)) + Ok(GetEditorResponse::Found(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(GetEditorResponse::BadRequest(body)) } 404 => { let mut buf = String::new(); @@ -1922,7 +1929,14 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::<Vec<models::ChangelogEntry>>(&buf)?; - Ok(GetEditorChangelogResponse::FoundMergedChanges(body)) + Ok(GetEditorChangelogResponse::Found(body)) + } + 400 => { + let mut buf = String::new(); + response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; + let body = serde_json::from_str::<models::ErrorResponse>(&buf)?; + + Ok(GetEditorChangelogResponse::BadRequest(body)) } 404 => { let mut buf = String::new(); @@ -2179,7 +2193,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::<Vec<models::FileEntity>>(&buf)?; - Ok(GetReleaseFilesResponse::FoundEntity(body)) + Ok(GetReleaseFilesResponse::Found(body)) } 400 => { let mut buf = String::new(); @@ -2492,7 +2506,7 @@ impl Api for Client { response.read_to_string(&mut buf).map_err(|e| ApiError(format!("Response was not valid UTF8: {}", e)))?; let body = serde_json::from_str::<Vec<models::ReleaseEntity>>(&buf)?; - Ok(GetWorkReleasesResponse::FoundEntity(body)) + Ok(GetWorkReleasesResponse::Found(body)) } 400 => { let mut buf = String::new(); diff --git a/rust/fatcat-api/src/lib.rs b/rust/fatcat-api/src/lib.rs index fc1ae2a1..a08c6e04 100644 --- a/rust/fatcat-api/src/lib.rs +++ b/rust/fatcat-api/src/lib.rs @@ -304,8 +304,8 @@ pub enum GetCreatorHistoryResponse { #[derive(Debug, PartialEq)] pub enum GetCreatorReleasesResponse { - /// Found Entity - FoundEntity(Vec<models::ReleaseEntity>), + /// Found + Found(Vec<models::ReleaseEntity>), /// Bad Request BadRequest(models::ErrorResponse), /// Not Found @@ -316,8 +316,8 @@ pub enum GetCreatorReleasesResponse { #[derive(Debug, PartialEq)] pub enum GetEditgroupResponse { - /// Found Entity - FoundEntity(models::Editgroup), + /// Found + Found(models::Editgroup), /// Bad Request BadRequest(models::ErrorResponse), /// Not Found @@ -328,8 +328,10 @@ pub enum GetEditgroupResponse { #[derive(Debug, PartialEq)] pub enum GetEditorResponse { - /// Found Editor - FoundEditor(models::Editor), + /// Found + Found(models::Editor), + /// Bad Request + BadRequest(models::ErrorResponse), /// Not Found NotFound(models::ErrorResponse), /// Generic Error @@ -338,8 +340,10 @@ pub enum GetEditorResponse { #[derive(Debug, PartialEq)] pub enum GetEditorChangelogResponse { - /// Found Merged Changes - FoundMergedChanges(Vec<models::ChangelogEntry>), + /// Found + Found(Vec<models::ChangelogEntry>), + /// Bad Request + BadRequest(models::ErrorResponse), /// Not Found NotFound(models::ErrorResponse), /// Generic Error @@ -384,8 +388,8 @@ pub enum GetReleaseResponse { #[derive(Debug, PartialEq)] pub enum GetReleaseFilesResponse { - /// Found Entity - FoundEntity(Vec<models::FileEntity>), + /// Found + Found(Vec<models::FileEntity>), /// Bad Request BadRequest(models::ErrorResponse), /// Not Found @@ -440,8 +444,8 @@ pub enum GetWorkHistoryResponse { #[derive(Debug, PartialEq)] pub enum GetWorkReleasesResponse { - /// Found Entity - FoundEntity(Vec<models::ReleaseEntity>), + /// Found + Found(Vec<models::ReleaseEntity>), /// Bad Request BadRequest(models::ErrorResponse), /// Not Found diff --git a/rust/fatcat-api/src/mimetypes.rs b/rust/fatcat-api/src/mimetypes.rs index 2c54a313..ff2c12ce 100644 --- a/rust/fatcat-api/src/mimetypes.rs +++ b/rust/fatcat-api/src/mimetypes.rs @@ -362,7 +362,7 @@ pub mod responses { } /// Create Mime objects for the response content types for GetCreatorReleases lazy_static! { - pub static ref GET_CREATOR_RELEASES_FOUND_ENTITY: Mime = mime!(Application / Json); + pub static ref GET_CREATOR_RELEASES_FOUND: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetCreatorReleases lazy_static! { @@ -378,7 +378,7 @@ pub mod responses { } /// Create Mime objects for the response content types for GetEditgroup lazy_static! { - pub static ref GET_EDITGROUP_FOUND_ENTITY: Mime = mime!(Application / Json); + pub static ref GET_EDITGROUP_FOUND: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetEditgroup lazy_static! { @@ -394,7 +394,11 @@ pub mod responses { } /// Create Mime objects for the response content types for GetEditor lazy_static! { - pub static ref GET_EDITOR_FOUND_EDITOR: Mime = mime!(Application / Json); + pub static ref GET_EDITOR_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for GetEditor + lazy_static! { + pub static ref GET_EDITOR_BAD_REQUEST: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetEditor lazy_static! { @@ -406,7 +410,11 @@ pub mod responses { } /// Create Mime objects for the response content types for GetEditorChangelog lazy_static! { - pub static ref GET_EDITOR_CHANGELOG_FOUND_MERGED_CHANGES: Mime = mime!(Application / Json); + pub static ref GET_EDITOR_CHANGELOG_FOUND: Mime = mime!(Application / Json); + } + /// Create Mime objects for the response content types for GetEditorChangelog + lazy_static! { + pub static ref GET_EDITOR_CHANGELOG_BAD_REQUEST: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetEditorChangelog lazy_static! { @@ -466,7 +474,7 @@ pub mod responses { } /// Create Mime objects for the response content types for GetReleaseFiles lazy_static! { - pub static ref GET_RELEASE_FILES_FOUND_ENTITY: Mime = mime!(Application / Json); + pub static ref GET_RELEASE_FILES_FOUND: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetReleaseFiles lazy_static! { @@ -538,7 +546,7 @@ pub mod responses { } /// Create Mime objects for the response content types for GetWorkReleases lazy_static! { - pub static ref GET_WORK_RELEASES_FOUND_ENTITY: Mime = mime!(Application / Json); + pub static ref GET_WORK_RELEASES_FOUND: Mime = mime!(Application / Json); } /// Create Mime objects for the response content types for GetWorkReleases lazy_static! { diff --git a/rust/fatcat-api/src/server.rs b/rust/fatcat-api/src/server.rs index 04d10e14..dfc94a81 100644 --- a/rust/fatcat-api/src/server.rs +++ b/rust/fatcat-api/src/server.rs @@ -2361,11 +2361,11 @@ where match api.get_creator_releases(param_id, context).wait() { Ok(rsp) => match rsp { - GetCreatorReleasesResponse::FoundEntity(body) => { + GetCreatorReleasesResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_CREATOR_RELEASES_FOUND_ENTITY.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_CREATOR_RELEASES_FOUND.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -2449,11 +2449,11 @@ where match api.get_editgroup(param_id, context).wait() { Ok(rsp) => match rsp { - GetEditgroupResponse::FoundEntity(body) => { + GetEditgroupResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_EDITGROUP_FOUND_ENTITY.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_EDITGROUP_FOUND.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -2537,11 +2537,21 @@ where match api.get_editor(param_id, context).wait() { Ok(rsp) => match rsp { - GetEditorResponse::FoundEditor(body) => { + GetEditorResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_FOUND_EDITOR.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + GetEditorResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_BAD_REQUEST.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -2615,11 +2625,21 @@ where match api.get_editor_changelog(param_id, context).wait() { Ok(rsp) => match rsp { - GetEditorChangelogResponse::FoundMergedChanges(body) => { + GetEditorChangelogResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_CHANGELOG_FOUND_MERGED_CHANGES.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_CHANGELOG_FOUND.clone())); + + context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); + + Ok(response) + } + GetEditorChangelogResponse::BadRequest(body) => { + let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); + + let mut response = Response::with((status::Status::from_u16(400), body_string)); + response.headers.set(ContentType(mimetypes::responses::GET_EDITOR_CHANGELOG_BAD_REQUEST.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -2969,11 +2989,11 @@ where match api.get_release_files(param_id, context).wait() { Ok(rsp) => match rsp { - GetReleaseFilesResponse::FoundEntity(body) => { + GetReleaseFilesResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_RELEASE_FILES_FOUND_ENTITY.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_RELEASE_FILES_FOUND.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); @@ -3391,11 +3411,11 @@ where match api.get_work_releases(param_id, context).wait() { Ok(rsp) => match rsp { - GetWorkReleasesResponse::FoundEntity(body) => { + GetWorkReleasesResponse::Found(body) => { let body_string = serde_json::to_string(&body).expect("impossible to fail to serialize"); let mut response = Response::with((status::Status::from_u16(200), body_string)); - response.headers.set(ContentType(mimetypes::responses::GET_WORK_RELEASES_FOUND_ENTITY.clone())); + response.headers.set(ContentType(mimetypes::responses::GET_WORK_RELEASES_FOUND.clone())); context.x_span_id.as_ref().map(|header| response.headers.set(XSpanId(header.clone()))); diff --git a/rust/migrations/2018-05-12-001226_init/up.sql b/rust/migrations/2018-05-12-001226_init/up.sql index 3ca954ca..bb62ba75 100644 --- a/rust/migrations/2018-05-12-001226_init/up.sql +++ b/rust/migrations/2018-05-12-001226_init/up.sql @@ -63,8 +63,8 @@ CREATE TABLE creator_rev ( -- Could denormalize a "is_live" flag into revision tables, to make indices -- more efficient -CREATE INDEX creator_rev_orcid_idx ON creator_rev(orcid); -- WHERE orcid IS NOT NULL; -CREATE INDEX creator_rev_wikidata_idx ON creator_rev(wikidata_qid); -- WHERE wikidata_qid IS NOT NULL; +CREATE INDEX creator_rev_orcid_idx ON creator_rev(orcid); +CREATE INDEX creator_rev_wikidata_idx ON creator_rev(wikidata_qid); CREATE TABLE creator_ident ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), @@ -101,8 +101,8 @@ CREATE TABLE container_rev ( coden TEXT ); -CREATE INDEX container_rev_issnl_idx ON container_rev(issnl); -- WHERE issnl IS NOT NULL; -CREATE INDEX container_rev_wikidata_idx ON container_rev(wikidata_qid); -- WHERE wikidata_qid IS NOT NULL; +CREATE INDEX container_rev_issnl_idx ON container_rev(issnl); +CREATE INDEX container_rev_wikidata_idx ON container_rev(wikidata_qid); CREATE TABLE container_ident ( id UUID PRIMARY KEY DEFAULT gen_random_uuid(), @@ -138,9 +138,9 @@ CREATE TABLE file_rev ( mimetype TEXT ); -CREATE INDEX file_rev_sha1_idx ON file_rev(sha1); -- WHERE sha1 IS NOT NULL; -CREATE INDEX file_rev_md5_idx ON file_rev(md5); -- WHERE md5 IS NOT NULL; -CREATE INDEX file_rev_sha256_idx ON file_rev(sha256); -- WHERE sha256 IS NOT NULL; +CREATE INDEX file_rev_sha1_idx ON file_rev(sha1); +CREATE INDEX file_rev_md5_idx ON file_rev(md5); +CREATE INDEX file_rev_sha256_idx ON file_rev(sha256); CREATE TABLE file_rev_url ( id BIGSERIAL PRIMARY KEY, @@ -199,13 +199,13 @@ CREATE TABLE release_rev ( -- TODO: identifier table? ); -CREATE INDEX release_rev_doi_idx ON release_rev(doi); -- WHERE doi IS NOT NULL; -CREATE INDEX release_rev_pmid_idx ON release_rev(pmid); -- WHERE pmid IS NOT NULL; -CREATE INDEX release_rev_pmcid_idx ON release_rev(pmcid); -- WHERE pmcid IS NOT NULL; -CREATE INDEX release_rev_wikidata_idx ON release_rev(wikidata_qid); -- WHERE wikidata_qid IS NOT NULL; -CREATE INDEX release_rev_isbn13_idx ON release_rev(isbn13); -- WHERE isbn13 IS NOT NULL; -CREATE INDEX release_rev_core_idx ON release_rev(core_id); -- WHERE core_id IS NOT NULL; -CREATE INDEX release_rev_work_idx ON release_rev(work_ident_id); -- WHERE work_ident_id IS NOT NULL; +CREATE INDEX release_rev_doi_idx ON release_rev(doi); +CREATE INDEX release_rev_pmid_idx ON release_rev(pmid); +CREATE INDEX release_rev_pmcid_idx ON release_rev(pmcid); +CREATE INDEX release_rev_wikidata_idx ON release_rev(wikidata_qid); +CREATE INDEX release_rev_isbn13_idx ON release_rev(isbn13); +CREATE INDEX release_rev_core_idx ON release_rev(core_id); +CREATE INDEX release_rev_work_idx ON release_rev(work_ident_id); CREATE TABLE release_rev_abstract ( id BIGSERIAL PRIMARY KEY, diff --git a/rust/src/database_entity_crud.rs b/rust/src/api_entity_crud.rs index 8b29ff28..2d5ea93d 100644 --- a/rust/src/database_entity_crud.rs +++ b/rust/src/api_entity_crud.rs @@ -6,19 +6,11 @@ use diesel::prelude::*; use diesel::{self, insert_into}; use errors::*; use fatcat_api::models::*; -use serde_json; use sha1::Sha1; use std::marker::Sized; use std::str::FromStr; use uuid::Uuid; -pub struct EditContext { - pub editor_id: FatCatId, - pub editgroup_id: FatCatId, - pub extra_json: Option<serde_json::Value>, - pub autoaccept: bool, -} - /* One goal here is to abstract the non-entity-specific bits into generic traits or functions, * instead of macros. * @@ -51,19 +43,9 @@ where fn parse_editgroup_id(&self) -> Result<Option<FatCatId>>; // Generic Methods - fn db_get( - conn: &DbConn, - ident: FatCatId - ) -> Result<Self>; - fn db_get_rev( - conn: &DbConn, - rev_id: Uuid - ) -> Result<Self>; - fn db_create( - &self, - conn: &DbConn, - edit_context: &EditContext - ) -> Result<Self::EditRow>; + fn db_get(conn: &DbConn, ident: FatCatId) -> Result<Self>; + fn db_get_rev(conn: &DbConn, rev_id: Uuid) -> Result<Self>; + fn db_create(&self, conn: &DbConn, edit_context: &EditContext) -> Result<Self::EditRow>; fn db_create_batch( conn: &DbConn, edit_context: &EditContext, @@ -85,10 +67,7 @@ where ident: FatCatId, limit: Option<i64>, ) -> Result<Vec<EntityHistoryEntry>>; - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId - ) -> Result<u64>; + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result<u64>; // Entity-specific Methods fn db_from_row( @@ -96,14 +75,8 @@ where rev_row: Self::RevRow, ident_row: Option<Self::IdentRow>, ) -> Result<Self>; - fn db_insert_rev( - &self, - conn: &DbConn - ) -> Result<Uuid>; - fn db_insert_revs( - conn: &DbConn, - models: &[&Self] - ) -> Result<Vec<Uuid>>; + fn db_insert_rev(&self, conn: &DbConn) -> Result<Uuid>; + fn db_insert_revs(conn: &DbConn, models: &[&Self]) -> Result<Vec<Uuid>>; } // TODO: this could be a separate trait on all entities @@ -333,13 +306,10 @@ macro_rules! generic_db_get_history { // UPDATE FROM version: single query for many rows // Works with Postgres, not Cockroach +#[allow(unused_macros)] macro_rules! generic_db_accept_edits_batch { ($entity_name_str:expr) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result<u64> { - + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result<u64> { let count = diesel::sql_query(format!( " UPDATE {entity}_ident @@ -351,23 +321,20 @@ macro_rules! generic_db_accept_edits_batch { WHERE {entity}_ident.id = {entity}_edit.ident_id AND {entity}_edit.editgroup_id = $1", - entity = $entity_name_str + entity = $entity_name_str )).bind::<diesel::sql_types::Uuid, _>(editgroup_id.to_uuid()) .execute(conn)?; Ok(count as u64) } - } + }; } // UPDATE ROW version: single query per row // CockroachDB version (slow, single query per row) +#[allow(unused_macros)] macro_rules! generic_db_accept_edits_each { ($ident_table:ident, $edit_table:ident) => { - fn db_accept_edits( - conn: &DbConn, - editgroup_id: FatCatId, - ) -> Result<u64> { - + fn db_accept_edits(conn: &DbConn, editgroup_id: FatCatId) -> Result<u64> { // 1. select edit rows (in sql) let edit_rows: Vec<Self::EditRow> = $edit_table::table .filter($edit_table::editgroup_id.eq(&editgroup_id.to_uuid())) @@ -375,31 +342,26 @@ macro_rules! generic_db_accept_edits_each { // 2. create ident rows (in rust) let ident_rows: Vec<Self::IdentRow> = edit_rows .iter() - .map(|edit| - Self::IdentRow { - id: edit.ident_id, - is_live: true, - rev_id: edit.rev_id, - redirect_id: edit.redirect_id, - - } - ) + .map(|edit| Self::IdentRow { + id: edit.ident_id, + is_live: true, + rev_id: edit.rev_id, + redirect_id: edit.redirect_id, + }) .collect(); /* - // 3. upsert ident rows (in sql) - let count: u64 = diesel::insert_into($ident_table::table) - .values(ident_rows) - .on_conflict() - .do_update() - .set(ident_rows) - .execute(conn)?; - */ + // 3. upsert ident rows (in sql) + let count: u64 = diesel::insert_into($ident_table::table) + .values(ident_rows) + .on_conflict() + .do_update() + .set(ident_rows) + .execute(conn)?; + */ // 3. update every row individually let count = ident_rows.len() as u64; for row in ident_rows { - diesel::update(&row) - .set(&row) - .execute(conn)?; + diesel::update(&row).set(&row).execute(conn)?; } Ok(count) } diff --git a/rust/src/api_helpers.rs b/rust/src/api_helpers.rs index 8ab9dcb3..6c214223 100644 --- a/rust/src/api_helpers.rs +++ b/rust/src/api_helpers.rs @@ -1,18 +1,51 @@ +use api_entity_crud::EntityCrud; use data_encoding::BASE32_NOPAD; use database_models::*; use database_schema::*; -use fatcat_api::models::*; use diesel; use diesel::prelude::*; use errors::*; +use fatcat_api::models::*; use regex::Regex; +use serde_json; use std::str::FromStr; use uuid::Uuid; -use database_entity_crud::EntityCrud; pub type DbConn = diesel::r2d2::PooledConnection<diesel::r2d2::ConnectionManager<diesel::PgConnection>>; +pub struct EditContext { + pub editor_id: FatCatId, + pub editgroup_id: FatCatId, + pub extra_json: Option<serde_json::Value>, + pub autoaccept: bool, +} + +pub fn make_edit_context( + conn: &DbConn, + editgroup_id: Option<FatCatId>, + autoaccept: bool, +) -> Result<EditContext> { + let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth + let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { + (Some(eg), _) => eg, + // If autoaccept and no editgroup_id passed, always create a new one for this transaction + (None, true) => { + let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) + .values((editgroup::editor_id.eq(editor_id),)) + .get_result(conn)?; + FatCatId::from_uuid(&eg_row.id) + } + (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), + }; + Ok(EditContext { + editor_id: FatCatId::from_uuid(&editor_id), + editgroup_id: editgroup_id, + extra_json: None, + autoaccept: autoaccept, + }) +} + /// This function should always be run within a transaction pub fn get_or_create_editgroup(editor_id: Uuid, conn: &DbConn) -> Result<Uuid> { // check for current active @@ -32,34 +65,33 @@ pub fn get_or_create_editgroup(editor_id: Uuid, conn: &DbConn) -> Result<Uuid> { } /// This function should always be run within a transaction -pub fn accept_editgroup(editgroup_id: Uuid, conn: &DbConn) -> Result<ChangelogRow> { +pub fn accept_editgroup(editgroup_id: FatCatId, conn: &DbConn) -> Result<ChangelogRow> { // check that we haven't accepted already (in changelog) // NB: could leave this to a UNIQUE constraint let count: i64 = changelog::table - .filter(changelog::editgroup_id.eq(editgroup_id)) + .filter(changelog::editgroup_id.eq(editgroup_id.to_uuid())) .count() .get_result(conn)?; if count > 0 { - return Err(ErrorKind::EditgroupAlreadyAccepted(uuid2fcid(&editgroup_id)).into()); + return Err(ErrorKind::EditgroupAlreadyAccepted(editgroup_id.to_string()).into()); } // copy edit columns to ident table - let eg_id = FatCatId::from_uuid(&editgroup_id); - ContainerEntity::db_accept_edits(conn, eg_id)?; - CreatorEntity::db_accept_edits(conn, eg_id)?; - FileEntity::db_accept_edits(conn, eg_id)?; - ReleaseEntity::db_accept_edits(conn, eg_id)?; - WorkEntity::db_accept_edits(conn, eg_id)?; + ContainerEntity::db_accept_edits(conn, editgroup_id)?; + CreatorEntity::db_accept_edits(conn, editgroup_id)?; + FileEntity::db_accept_edits(conn, editgroup_id)?; + ReleaseEntity::db_accept_edits(conn, editgroup_id)?; + WorkEntity::db_accept_edits(conn, editgroup_id)?; // append log/changelog row let entry: ChangelogRow = diesel::insert_into(changelog::table) - .values((changelog::editgroup_id.eq(editgroup_id),)) + .values((changelog::editgroup_id.eq(editgroup_id.to_uuid()),)) .get_result(conn)?; // update any editor's active editgroup let no_active: Option<Uuid> = None; diesel::update(editor::table) - .filter(editor::active_editgroup_id.eq(editgroup_id)) + .filter(editor::active_editgroup_id.eq(editgroup_id.to_uuid())) .set(editor::active_editgroup_id.eq(no_active)) .execute(conn)?; Ok(entry) diff --git a/rust/src/api_server.rs b/rust/src/api_server.rs index 5fce8e64..076bc085 100644 --- a/rust/src/api_server.rs +++ b/rust/src/api_server.rs @@ -1,8 +1,8 @@ //! API endpoint handlers +use api_entity_crud::EntityCrud; use api_helpers::*; use chrono; -use database_entity_crud::{EditContext, EntityCrud}; use database_models::*; use database_schema::*; use diesel::prelude::*; @@ -11,7 +11,6 @@ use errors::*; use fatcat_api::models; use fatcat_api::models::*; use std::str::FromStr; -use uuid::Uuid; use ConnectionPool; macro_rules! entity_batch_handler { @@ -53,27 +52,6 @@ macro_rules! count_entity { }}; } -fn make_edit_context(conn: &DbConn, editgroup_id: Option<FatCatId>, autoaccept: bool) -> Result<EditContext> { - let editor_id = Uuid::parse_str("00000000-0000-0000-AAAA-000000000001")?; // TODO: auth - let editgroup_id: FatCatId = match (editgroup_id, autoaccept) { - (Some(eg), _) => eg, - // If autoaccept and no editgroup_id passed, always create a new one for this transaction - (None, true) => { - let eg_row: EditgroupRow = diesel::insert_into(editgroup::table) - .values((editgroup::editor_id.eq(editor_id),)) - .get_result(conn)?; - FatCatId::from_uuid(&eg_row.id) - }, - (None, false) => FatCatId::from_uuid(&get_or_create_editgroup(editor_id, conn)?), - }; - Ok(EditContext { - editor_id: FatCatId::from_uuid(&editor_id), - editgroup_id: editgroup_id, - extra_json: None, - autoaccept: autoaccept, - }) -} - #[derive(Clone)] pub struct Server { pub db_pool: ConnectionPool, @@ -82,11 +60,11 @@ pub struct Server { impl Server { pub fn get_container_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option<String>, conn: &DbConn, ) -> Result<ContainerEntity> { - ContainerEntity::db_get(conn, FatCatId::from_uuid(id)) + ContainerEntity::db_get(conn, id) } pub fn lookup_container_handler(&self, issnl: &str, conn: &DbConn) -> Result<ContainerEntity> { @@ -106,11 +84,11 @@ impl Server { pub fn get_creator_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option<String>, conn: &DbConn, ) -> Result<CreatorEntity> { - CreatorEntity::db_get(conn, FatCatId::from_uuid(id)) + CreatorEntity::db_get(conn, id) } pub fn lookup_creator_handler(&self, orcid: &str, conn: &DbConn) -> Result<CreatorEntity> { @@ -130,16 +108,14 @@ impl Server { pub fn get_creator_releases_handler( &self, - id: &str, + id: FatCatId, conn: &DbConn, ) -> Result<Vec<ReleaseEntity>> { - let id = fcid2uuid(&id)?; - // TODO: some kind of unique or group-by? let rows: Vec<(ReleaseRevRow, ReleaseIdentRow, ReleaseContribRow)> = release_rev::table .inner_join(release_ident::table) .inner_join(release_contrib::table) - .filter(release_contrib::creator_ident_id.eq(&id)) + .filter(release_contrib::creator_ident_id.eq(&id.to_uuid())) .filter(release_ident::is_live.eq(true)) .filter(release_ident::redirect_id.is_null()) .load(conn)?; @@ -152,11 +128,11 @@ impl Server { pub fn get_file_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option<String>, conn: &DbConn, ) -> Result<FileEntity> { - FileEntity::db_get(conn, FatCatId::from_uuid(id)) + FileEntity::db_get(conn, id) } pub fn lookup_file_handler(&self, sha1: &str, conn: &DbConn) -> Result<FileEntity> { @@ -175,19 +151,18 @@ impl Server { pub fn get_release_handler( &self, - id: &Uuid, + id: FatCatId, expand: Option<String>, conn: &DbConn, ) -> Result<ReleaseEntity> { - let mut release = ReleaseEntity::db_get(conn, FatCatId::from_uuid(id))?; + let mut release = ReleaseEntity::db_get(conn, id)?; // For now, if there is any expand param we do them all if expand.is_some() { - release.files = - Some(self.get_release_files_handler(&release.ident.clone().unwrap(), conn)?); + release.files = Some(self.get_release_files_handler(id, conn)?); if let Some(ref cid) = release.container_id { release.container = - Some(self.get_container_handler(&fcid2uuid(&cid)?, None, conn)?); + Some(self.get_container_handler(FatCatId::from_str(&cid)?, None, conn)?); } } Ok(release) @@ -208,13 +183,15 @@ impl Server { ReleaseEntity::db_from_row(conn, rev, Some(ident)) } - pub fn get_release_files_handler(&self, id: &str, conn: &DbConn) -> Result<Vec<FileEntity>> { - let ident = FatCatId::from_str(id)?; - + pub fn get_release_files_handler( + &self, + id: FatCatId, + conn: &DbConn, + ) -> Result<Vec<FileEntity>> { let rows: Vec<(FileRevRow, FileIdentRow, FileReleaseRow)> = file_rev::table .inner_join(file_ident::table) .inner_join(file_release::table) - .filter(file_release::target_release_ident_id.eq(&ident.to_uuid())) + .filter(file_release::target_release_ident_id.eq(&id.to_uuid())) .filter(file_ident::is_live.eq(true)) .filter(file_ident::redirect_id.is_null()) .load(conn)?; @@ -226,19 +203,21 @@ impl Server { pub fn get_work_handler( &self, - id: &Uuid, + id: FatCatId, _expand: Option<String>, conn: &DbConn, ) -> Result<WorkEntity> { - WorkEntity::db_get(conn, FatCatId::from_uuid(id)) + WorkEntity::db_get(conn, id) } - pub fn get_work_releases_handler(&self, id: &str, conn: &DbConn) -> Result<Vec<ReleaseEntity>> { - let id = fcid2uuid(&id)?; - + pub fn get_work_releases_handler( + &self, + id: FatCatId, + conn: &DbConn, + ) -> Result<Vec<ReleaseEntity>> { let rows: Vec<(ReleaseRevRow, ReleaseIdentRow)> = release_rev::table .inner_join(release_ident::table) - .filter(release_rev::work_ident_id.eq(&id)) + .filter(release_rev::work_ident_id.eq(&id.to_uuid())) .filter(release_ident::is_live.eq(true)) .filter(release_ident::redirect_id.is_null()) .load(conn)?; @@ -248,165 +227,8 @@ impl Server { .collect() } - pub fn create_container_handler( - &self, - entity: models::ContainerEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_container_handler( - &self, - id: &Uuid, - entity: models::ContainerEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_container_handler( - &self, - id: &Uuid, - editgroup_id: Option<Uuid>, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = ContainerEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_creator_handler( - &self, - entity: models::CreatorEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_creator_handler( - &self, - id: &Uuid, - entity: models::CreatorEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_creator_handler( - &self, - id: &Uuid, - editgroup_id: Option<Uuid>, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = CreatorEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_file_handler( - &self, - entity: models::FileEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_file_handler( - &self, - id: &Uuid, - entity: models::FileEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_file_handler( - &self, - id: &Uuid, - editgroup_id: Option<Uuid>, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = FileEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_release_handler( - &self, - entity: models::ReleaseEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_release_handler( - &self, - id: &Uuid, - entity: models::ReleaseEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - pub fn delete_release_handler( - &self, - id: &Uuid, - editgroup_id: Option<Uuid>, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = ReleaseEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn create_work_handler( - &self, - entity: models::WorkEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_create(conn, &edit_context)?; - edit.into_model() - } - - pub fn update_work_handler( - &self, - id: &Uuid, - entity: models::WorkEntity, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, entity.parse_editgroup_id()?, false)?; - let edit = entity.db_update(conn, &edit_context, FatCatId::from_uuid(id))?; - edit.into_model() - } - - pub fn delete_work_handler( - &self, - id: &Uuid, - editgroup_id: Option<Uuid>, - conn: &DbConn, - ) -> Result<EntityEdit> { - let edit_context = make_edit_context(conn, editgroup_id.map(|u| FatCatId::from_uuid(&u)), false)?; - let edit = WorkEntity::db_delete(conn, &edit_context, FatCatId::from_uuid(id))?; - - edit.into_model() - } - - pub fn accept_editgroup_handler(&self, id: &str, conn: &DbConn) -> Result<()> { - accept_editgroup(fcid2uuid(id)?, conn)?; + pub fn accept_editgroup_handler(&self, id: FatCatId, conn: &DbConn) -> Result<()> { + accept_editgroup(id, conn)?; Ok(()) } @@ -417,7 +239,7 @@ impl Server { ) -> Result<Editgroup> { let row: EditgroupRow = insert_into(editgroup::table) .values(( - editgroup::editor_id.eq(fcid2uuid(&entity.editor_id)?), + editgroup::editor_id.eq(FatCatId::from_str(&entity.editor_id)?.to_uuid()), editgroup::description.eq(entity.description), editgroup::extra_json.eq(entity.extra), )) @@ -432,14 +254,13 @@ impl Server { }) } - pub fn get_editgroup_handler(&self, id: &str, conn: &DbConn) -> Result<Editgroup> { - let id = fcid2uuid(id)?; - let row: EditgroupRow = editgroup::table.find(id).first(conn)?; + pub fn get_editgroup_handler(&self, id: FatCatId, conn: &DbConn) -> Result<Editgroup> { + let row: EditgroupRow = editgroup::table.find(id.to_uuid()).first(conn)?; let edits = EditgroupEdits { containers: Some( container_edit::table - .filter(container_edit::editgroup_id.eq(id)) + .filter(container_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: ContainerEditRow| e.into_model().unwrap()) @@ -447,7 +268,7 @@ impl Server { ), creators: Some( creator_edit::table - .filter(creator_edit::editgroup_id.eq(id)) + .filter(creator_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: CreatorEditRow| e.into_model().unwrap()) @@ -455,7 +276,7 @@ impl Server { ), files: Some( file_edit::table - .filter(file_edit::editgroup_id.eq(id)) + .filter(file_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: FileEditRow| e.into_model().unwrap()) @@ -463,7 +284,7 @@ impl Server { ), releases: Some( release_edit::table - .filter(release_edit::editgroup_id.eq(id)) + .filter(release_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: ReleaseEditRow| e.into_model().unwrap()) @@ -471,7 +292,7 @@ impl Server { ), works: Some( work_edit::table - .filter(work_edit::editgroup_id.eq(id)) + .filter(work_edit::editgroup_id.eq(id.to_uuid())) .get_results(conn)? .into_iter() .map(|e: WorkEditRow| e.into_model().unwrap()) @@ -489,9 +310,8 @@ impl Server { Ok(eg) } - pub fn get_editor_handler(&self, id: &str, conn: &DbConn) -> Result<Editor> { - let id = fcid2uuid(id)?; - let row: EditorRow = editor::table.find(id).first(conn)?; + pub fn get_editor_handler(&self, id: FatCatId, conn: &DbConn) -> Result<Editor> { + let row: EditorRow = editor::table.find(id.to_uuid()).first(conn)?; let ed = Editor { id: Some(uuid2fcid(&row.id)), @@ -500,14 +320,13 @@ impl Server { Ok(ed) } - pub fn editor_changelog_get_handler( + pub fn get_editor_changelog_handler( &self, - id: &str, + id: FatCatId, conn: &DbConn, ) -> Result<Vec<ChangelogEntry>> { - let id = fcid2uuid(id)?; // TODO: single query - let editor: EditorRow = editor::table.find(id).first(conn)?; + let editor: EditorRow = editor::table.find(id.to_uuid()).first(conn)?; let changes: Vec<(ChangelogRow, EditgroupRow)> = changelog::table .inner_join(editgroup::table) .filter(editgroup::editor_id.eq(editor.id)) @@ -552,7 +371,8 @@ impl Server { pub fn get_changelog_entry_handler(&self, id: i64, conn: &DbConn) -> Result<ChangelogEntry> { let cl_row: ChangelogRow = changelog::table.find(id).first(conn)?; - let editgroup = self.get_editgroup_handler(&uuid2fcid(&cl_row.editgroup_id), conn)?; + let editgroup = + self.get_editgroup_handler(FatCatId::from_uuid(&cl_row.editgroup_id), conn)?; let mut entry = cl_row.into_model(); entry.editgroup = Some(editgroup); @@ -638,45 +458,4 @@ impl Server { entity_batch_handler!(create_file_batch_handler, FileEntity); entity_batch_handler!(create_release_batch_handler, ReleaseEntity); entity_batch_handler!(create_work_batch_handler, WorkEntity); - - pub fn get_container_history_handler( - &self, - id: &Uuid, - limit: Option<i64>, - conn: &DbConn, - ) -> Result<Vec<EntityHistoryEntry>> { - ContainerEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_creator_history_handler( - &self, - id: &Uuid, - limit: Option<i64>, - conn: &DbConn, - ) -> Result<Vec<EntityHistoryEntry>> { - CreatorEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_file_history_handler( - &self, - id: &Uuid, - limit: Option<i64>, - conn: &DbConn, - ) -> Result<Vec<EntityHistoryEntry>> { - FileEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_release_history_handler( - &self, - id: &Uuid, - limit: Option<i64>, - conn: &DbConn, - ) -> Result<Vec<EntityHistoryEntry>> { - ReleaseEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } - pub fn get_work_history_handler( - &self, - id: &Uuid, - limit: Option<i64>, - conn: &DbConn, - ) -> Result<Vec<EntityHistoryEntry>> { - WorkEntity::db_get_history(conn, FatCatId::from_uuid(id), limit) - } } diff --git a/rust/src/api_wrappers.rs b/rust/src/api_wrappers.rs index faafe984..f6425c8c 100644 --- a/rust/src/api_wrappers.rs +++ b/rust/src/api_wrappers.rs @@ -1,13 +1,16 @@ //! API endpoint handlers -use api_helpers::fcid2uuid; +use api_entity_crud::EntityCrud; +use api_helpers::*; use api_server::Server; +use database_models::EntityEditRow; use diesel::Connection; use errors::*; use fatcat_api::models; use fatcat_api::models::*; use fatcat_api::*; use futures::{self, Future}; +use std::str::FromStr; /// Helper for generating wrappers (which return "Box::new(futures::done(Ok(BLAH)))" like the /// codegen fatcat-api code wants) that call through to actual helpers (which have simple Result<> @@ -17,11 +20,11 @@ macro_rules! wrap_entity_handlers { // stable doesn't have a mechanism to "concat" or generate new identifiers in macros, at least // in the context of defining new functions. // The only stable approach I know of would be: https://github.com/dtolnay/mashup - ($get_fn:ident, $get_handler:ident, $get_resp:ident, $post_fn:ident, $post_handler:ident, + ($get_fn:ident, $get_handler:ident, $get_resp:ident, $post_fn:ident, $post_resp:ident, $post_batch_fn:ident, $post_batch_handler:ident, - $post_batch_resp:ident, $update_fn:ident, $update_handler:ident, $update_resp:ident, - $delete_fn:ident, $delete_handler:ident, $delete_resp:ident, $get_history_fn:ident, - $get_history_handler:ident, $get_history_resp:ident, $model:ident) => { + $post_batch_resp:ident, $update_fn:ident, $update_resp:ident, + $delete_fn:ident, $delete_resp:ident, $get_history_fn:ident, + $get_history_resp:ident, $model:ident) => { fn $get_fn( &self, @@ -29,13 +32,12 @@ macro_rules! wrap_entity_handlers { expand: Option<String>, _context: &Context, ) -> Box<Future<Item = $get_resp, Error = ApiError> + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($get_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); // No transaction for GET - let ret = match self.$get_handler(&id, expand, &conn) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + self.$get_handler(entity_id, expand, &conn) + }) { Ok(entity) => $get_resp::FoundEntity(entity), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -61,7 +63,10 @@ macro_rules! wrap_entity_handlers { _context: &Context, ) -> Box<Future<Item = $post_resp, Error = ApiError> + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$post_handler(entity, &conn)) { + let ret = match conn.transaction(|| { + let edit_context = make_edit_context(&conn, entity.parse_editgroup_id()?, false)?; + Ok(entity.db_create(&conn, &edit_context)?.into_model()?) + }) { Ok(edit) => $post_resp::CreatedEntity(edit), Err(Error(ErrorKind::Diesel(e), _)) => @@ -115,12 +120,12 @@ macro_rules! wrap_entity_handlers { entity: models::$model, _context: &Context, ) -> Box<Future<Item = $update_resp, Error = ApiError> + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($update_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$update_handler(&id, entity, &conn)) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + let edit_context = make_edit_context(&conn, entity.parse_editgroup_id()?, false)?; + Ok(entity.db_update(&conn, &edit_context, entity_id)?.into_model()?) + }) { Ok(edit) => $update_resp::UpdatedEntity(edit), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -148,19 +153,16 @@ macro_rules! wrap_entity_handlers { editgroup_id: Option<String>, _context: &Context, ) -> Box<Future<Item = $delete_resp, Error = ApiError> + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($delete_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; - let editgroup_id = match editgroup_id { - Some(raw) => if let Ok(parsed) = fcid2uuid(&raw) { Some(parsed) } else { - return Box::new(futures::done(Ok($delete_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(raw).to_string() })))) - } - None => None - }; let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.$delete_handler(&id, editgroup_id, &conn)) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + let editgroup_id: Option<FatCatId> = match editgroup_id { + Some(s) => Some(FatCatId::from_str(&s)?), + None => None, + }; + let edit_context = make_edit_context(&conn, editgroup_id, false)?; + Ok($model::db_delete(&conn, &edit_context, entity_id)?.into_model()?) + }) { Ok(edit) => $delete_resp::DeletedEntity(edit), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -188,13 +190,12 @@ macro_rules! wrap_entity_handlers { limit: Option<i64>, _context: &Context, ) -> Box<Future<Item = $get_history_resp, Error = ApiError> + Send> { - let id = if let Ok(parsed) = fcid2uuid(&id) { parsed } else { - return Box::new(futures::done(Ok($get_history_resp::BadRequest(ErrorResponse { - message: ErrorKind::InvalidFatcatId(id).to_string() })))); - }; let conn = self.db_pool.get().expect("db_pool error"); // No transaction for GET - let ret = match self.$get_history_handler(&id, limit, &conn) { + let ret = match conn.transaction(|| { + let entity_id = FatCatId::from_str(&id)?; + Ok($model::db_get_history(&conn, entity_id, limit)?) + }) { Ok(history) => $get_history_resp::FoundEntityHistory(history), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => @@ -240,25 +241,50 @@ macro_rules! wrap_lookup_handler { } } +macro_rules! wrap_fcid_handler { + ($get_fn:ident, $get_handler:ident, $get_resp:ident) => { + fn $get_fn( + &self, + id: String, + _context: &Context, + ) -> Box<Future<Item = $get_resp, Error = ApiError> + Send> { + let conn = self.db_pool.get().expect("db_pool error"); + // No transaction for GET + let ret = match (|| { + let fcid = FatCatId::from_str(&id)?; + self.$get_handler(fcid, &conn) + })() { + Ok(entity) => + $get_resp::Found(entity), + Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => + $get_resp::NotFound(ErrorResponse { message: format!("Not found: {}", id) }), + Err(Error(ErrorKind::MalformedExternalId(e), _)) => + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }), + Err(e) => { + error!("{}", e); + $get_resp::BadRequest(ErrorResponse { message: e.to_string() }) + }, + }; + Box::new(futures::done(Ok(ret))) + } + } +} + impl Api for Server { wrap_entity_handlers!( get_container, get_container_handler, GetContainerResponse, create_container, - create_container_handler, CreateContainerResponse, create_container_batch, create_container_batch_handler, CreateContainerBatchResponse, update_container, - update_container_handler, UpdateContainerResponse, delete_container, - delete_container_handler, DeleteContainerResponse, get_container_history, - get_container_history_handler, GetContainerHistoryResponse, ContainerEntity ); @@ -268,19 +294,15 @@ impl Api for Server { get_creator_handler, GetCreatorResponse, create_creator, - create_creator_handler, CreateCreatorResponse, create_creator_batch, create_creator_batch_handler, CreateCreatorBatchResponse, update_creator, - update_creator_handler, UpdateCreatorResponse, delete_creator, - delete_creator_handler, DeleteCreatorResponse, get_creator_history, - get_creator_history_handler, GetCreatorHistoryResponse, CreatorEntity ); @@ -289,19 +311,15 @@ impl Api for Server { get_file_handler, GetFileResponse, create_file, - create_file_handler, CreateFileResponse, create_file_batch, create_file_batch_handler, CreateFileBatchResponse, update_file, - update_file_handler, UpdateFileResponse, delete_file, - delete_file_handler, DeleteFileResponse, get_file_history, - get_file_history_handler, GetFileHistoryResponse, FileEntity ); @@ -310,19 +328,15 @@ impl Api for Server { get_release_handler, GetReleaseResponse, create_release, - create_release_handler, CreateReleaseResponse, create_release_batch, create_release_batch_handler, CreateReleaseBatchResponse, update_release, - update_release_handler, UpdateReleaseResponse, delete_release, - delete_release_handler, DeleteReleaseResponse, get_release_history, - get_release_history_handler, GetReleaseHistoryResponse, ReleaseEntity ); @@ -331,19 +345,15 @@ impl Api for Server { get_work_handler, GetWorkResponse, create_work, - create_work_handler, CreateWorkResponse, create_work_batch, create_work_batch_handler, CreateWorkBatchResponse, update_work, - update_work_handler, UpdateWorkResponse, delete_work, - delete_work_handler, DeleteWorkResponse, get_work_history, - get_work_history_handler, GetWorkHistoryResponse, WorkEntity ); @@ -377,26 +387,26 @@ impl Api for Server { String ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_release_files, get_release_files_handler, - GetReleaseFilesResponse, - id, - String + GetReleaseFilesResponse ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_work_releases, get_work_releases_handler, - GetWorkReleasesResponse, - id, - String + GetWorkReleasesResponse ); - wrap_lookup_handler!( + wrap_fcid_handler!( get_creator_releases, get_creator_releases_handler, - GetCreatorReleasesResponse, - id, - String + GetCreatorReleasesResponse + ); + wrap_fcid_handler!(get_editor, get_editor_handler, GetEditorResponse); + wrap_fcid_handler!( + get_editor_changelog, + get_editor_changelog_handler, + GetEditorChangelogResponse ); fn accept_editgroup( @@ -405,7 +415,10 @@ impl Api for Server { _context: &Context, ) -> Box<Future<Item = AcceptEditgroupResponse, Error = ApiError> + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.accept_editgroup_handler(&id, &conn)) { + let ret = match conn.transaction(|| { + let id = FatCatId::from_str(&id)?; + self.accept_editgroup_handler(id, &conn) + }) { Ok(()) => AcceptEditgroupResponse::MergedSuccessfully(Success { message: "horray!".to_string(), }), @@ -432,9 +445,12 @@ impl Api for Server { _context: &Context, ) -> Box<Future<Item = GetEditgroupResponse, Error = ApiError> + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.get_editgroup_handler(&id, &conn)) { + let ret = match conn.transaction(|| { + let id = FatCatId::from_str(&id)?; + self.get_editgroup_handler(id, &conn) + }) { Ok(entity) => - GetEditgroupResponse::FoundEntity(entity), + GetEditgroupResponse::Found(entity), Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => GetEditgroupResponse::NotFound( ErrorResponse { message: format!("No such editgroup: {}", id) }), @@ -452,7 +468,9 @@ impl Api for Server { _context: &Context, ) -> Box<Future<Item = CreateEditgroupResponse, Error = ApiError> + Send> { let conn = self.db_pool.get().expect("db_pool error"); - let ret = match conn.transaction(|| self.create_editgroup_handler(entity, &conn)) { + let ret = match conn.transaction(|| + self.create_editgroup_handler(entity, &conn) + ) { Ok(eg) => CreateEditgroupResponse::SuccessfullyCreated(eg), Err(e) => @@ -463,56 +481,6 @@ impl Api for Server { Box::new(futures::done(Ok(ret))) } - fn get_editor_changelog( - &self, - username: String, - _context: &Context, - ) -> Box<Future<Item = GetEditorChangelogResponse, Error = ApiError> + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match self.editor_changelog_get_handler(&username, &conn) { - Ok(entries) => GetEditorChangelogResponse::FoundMergedChanges(entries), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - GetEditorChangelogResponse::NotFound(ErrorResponse { - message: format!("No such editor: {}", username.clone()), - }) - } - Err(e) => { - // TODO: dig in to error type here - error!("{}", e); - GetEditorChangelogResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - - fn get_editor( - &self, - username: String, - _context: &Context, - ) -> Box<Future<Item = GetEditorResponse, Error = ApiError> + Send> { - let conn = self.db_pool.get().expect("db_pool error"); - // No transaction for GET - let ret = match self.get_editor_handler(&username, &conn) { - Ok(entity) => GetEditorResponse::FoundEditor(entity), - Err(Error(ErrorKind::Diesel(::diesel::result::Error::NotFound), _)) => { - GetEditorResponse::NotFound(ErrorResponse { - message: format!("No such editor: {}", username.clone()), - }) - } - Err(e) => { - // TODO: dig in to error type here - error!("{}", e); - GetEditorResponse::GenericError(ErrorResponse { - message: e.to_string(), - }) - } - }; - Box::new(futures::done(Ok(ret))) - } - fn get_changelog( &self, limit: Option<i64>, diff --git a/rust/src/lib.rs b/rust/src/lib.rs index 2236d602..b4b9e3c7 100644 --- a/rust/src/lib.rs +++ b/rust/src/lib.rs @@ -22,10 +22,10 @@ extern crate regex; extern crate lazy_static; extern crate sha1; +pub mod api_entity_crud; pub mod api_helpers; pub mod api_server; pub mod api_wrappers; -pub mod database_entity_crud; pub mod database_models; pub mod database_schema; |