aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
Diffstat (limited to 'python')
-rw-r--r--python/Pipfile28
-rw-r--r--python/Pipfile.lock602
-rw-r--r--python/README.md27
-rw-r--r--python/TODO80
-rw-r--r--python/config.py13
-rw-r--r--python/fatcat/__init__.py15
-rw-r--r--python/fatcat/api.py280
-rw-r--r--python/fatcat/api_client.py175
-rw-r--r--python/fatcat/dummy.py135
-rw-r--r--python/fatcat/models.py429
-rw-r--r--python/fatcat/routes.py129
-rw-r--r--python/fatcat/sql.py150
-rw-r--r--python/fatcat/static/robots.txt1
-rw-r--r--python/fatcat/templates/404.html6
-rw-r--r--python/fatcat/templates/about.html161
-rw-r--r--python/fatcat/templates/base.html70
-rw-r--r--python/fatcat/templates/container_add.html168
-rw-r--r--python/fatcat/templates/container_view.html14
-rw-r--r--python/fatcat/templates/creator_view.html10
-rw-r--r--python/fatcat/templates/editgroup_view.html49
-rw-r--r--python/fatcat/templates/editor_changelog.html17
-rw-r--r--python/fatcat/templates/editor_view.html9
-rw-r--r--python/fatcat/templates/file_view.html10
-rw-r--r--python/fatcat/templates/home.html29
-rw-r--r--python/fatcat/templates/release_changelog.html17
-rw-r--r--python/fatcat/templates/release_view.html31
-rw-r--r--python/fatcat/templates/work_add.html215
-rw-r--r--python/fatcat/templates/work_view.html37
-rwxr-xr-xpython/fatcat_client.py41
-rw-r--r--python/pytest.ini8
-rwxr-xr-xpython/run.py38
-rw-r--r--python/schema.sql1078
-rw-r--r--python/tests/api.py308
-rw-r--r--python/tests/api_client.py14
-rw-r--r--python/tests/entity_lifecycle.py80
-rw-r--r--python/tests/files/crossref-works.2018-01-21.badsample.json10
-rw-r--r--python/tests/fixtures.py169
-rw-r--r--python/tests/models.py87
-rw-r--r--python/tests/routes.py67
-rw-r--r--python/tests/test_fixtures.py29
40 files changed, 4836 insertions, 0 deletions
diff --git a/python/Pipfile b/python/Pipfile
new file mode 100644
index 00000000..d60bf48f
--- /dev/null
+++ b/python/Pipfile
@@ -0,0 +1,28 @@
+[[source]]
+url = "https://pypi.python.org/simple"
+verify_ssl = true
+name = "pypi"
+
+[dev-packages]
+pytest = "*"
+pytest-pythonpath = "*"
+ipython = "*"
+responses = "*"
+pytest-cov = "*"
+pylint = "*"
+pg-view = "*"
+flask-debugtoolbar = "*"
+
+[packages]
+Flask = "*"
+SQLAlchemy = "*"
+requests = "*"
+Flask-SQLAlchemy = "*"
+raven = "*"
+flask-sqlalchemy = "*"
+marshmallow-sqlalchemy = "*"
+flask-marshmallow = "*"
+"psycopg2" = "*"
+
+[requires]
+python_version = "3.5"
diff --git a/python/Pipfile.lock b/python/Pipfile.lock
new file mode 100644
index 00000000..677cc5eb
--- /dev/null
+++ b/python/Pipfile.lock
@@ -0,0 +1,602 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "ee7c7ec92727ff77576a3244fb2d9317e1c16a15eae26fdc1f21c8c8e3dcfa3a"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.5"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.python.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {
+ "certifi": {
+ "hashes": [
+ "sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7",
+ "sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0"
+ ],
+ "version": "==2018.4.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "click": {
+ "hashes": [
+ "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d",
+ "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
+ ],
+ "version": "==6.7"
+ },
+ "flask": {
+ "hashes": [
+ "sha256:0749df235e3ff61ac108f69ac178c9770caeaccad2509cb762ce1f65570a8856",
+ "sha256:49f44461237b69ecd901cc7ce66feea0319b9158743dd27a2899962ab214dac1"
+ ],
+ "index": "pypi",
+ "version": "==0.12.2"
+ },
+ "flask-marshmallow": {
+ "hashes": [
+ "sha256:7fefe98ba8088437e3cd940c813d2f71661da00f041826456cc1ae7c5250bf34",
+ "sha256:d1457a8ee4cda662b121baacb22267774f3f7834e4adafae606b259bfbeb46b9"
+ ],
+ "index": "pypi",
+ "version": "==0.8.0"
+ },
+ "flask-sqlalchemy": {
+ "hashes": [
+ "sha256:3bc0fac969dd8c0ace01b32060f0c729565293302f0c4269beed154b46bec50b",
+ "sha256:5971b9852b5888655f11db634e87725a9031e170f37c0ce7851cf83497f56e53"
+ ],
+ "index": "pypi",
+ "version": "==2.3.2"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "version": "==2.6"
+ },
+ "itsdangerous": {
+ "hashes": [
+ "sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519"
+ ],
+ "version": "==0.24"
+ },
+ "jinja2": {
+ "hashes": [
+ "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd",
+ "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"
+ ],
+ "version": "==2.10"
+ },
+ "markupsafe": {
+ "hashes": [
+ "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665"
+ ],
+ "version": "==1.0"
+ },
+ "marshmallow": {
+ "hashes": [
+ "sha256:8740ada95f47fa19f905772aa4932dc5512226a90c30da5672d6d6bf3dd791a7",
+ "sha256:d3f31fe7be2106b1d783cbd0765ef4e1c6615505514695f33082805f929dd584"
+ ],
+ "version": "==2.15.0"
+ },
+ "marshmallow-sqlalchemy": {
+ "hashes": [
+ "sha256:3e034964d09d1af15f6868a36cc26ec22d41a703e576928ba086996ae4287e59",
+ "sha256:9804ef2829f781f469a06528d107c2a763f109c687266ab8b1f000f9684184ae"
+ ],
+ "index": "pypi",
+ "version": "==0.13.2"
+ },
+ "psycopg2": {
+ "hashes": [
+ "sha256:027ae518d0e3b8fff41990e598bc7774c3d08a3a20e9ecc0b59fb2aaaf152f7f",
+ "sha256:092a80da1b052a181b6e6c765849c9b32d46c5dac3b81bf8c9b83e697f3cdbe8",
+ "sha256:0b9851e798bae024ed1a2a6377a8dab4b8a128a56ed406f572f9f06194e4b275",
+ "sha256:179c52eb870110a8c1b460c86d4f696d58510ea025602cd3f81453746fccb94f",
+ "sha256:19983b77ec1fc2a210092aa0333ee48811fd9fb5f194c6cd5b927ed409aea5f8",
+ "sha256:1d90379d01d0dc50ae9b40c863933d87ff82d51dd7d52cea5d1cb7019afd72cd",
+ "sha256:27467fd5af1dcc0a82d72927113b8f92da8f44b2efbdb8906bd76face95b596d",
+ "sha256:32702e3bd8bfe12b36226ba9846ed9e22336fc4bd710039d594b36bd432ae255",
+ "sha256:33f9e1032095e1436fa9ec424abcbd4c170da934fb70e391c5d78275d0307c75",
+ "sha256:36030ca7f4b4519ee4f52a74edc4ec73c75abfb6ea1d80ac7480953d1c0aa3c3",
+ "sha256:363fbbf4189722fc46779be1fad2597e2c40b3f577dc618f353a46391cf5d235",
+ "sha256:6f302c486132f8dd11f143e919e236ea4467d53bf18c451cac577e6988ecbd05",
+ "sha256:733166464598c239323142c071fa4c9b91c14359176e5ae7e202db6bcc1d2eb5",
+ "sha256:7cbc3b21ce2f681ca9ad2d8c0901090b23a30c955e980ebf1006d41f37068a95",
+ "sha256:888bba7841116e529f407f15c6d28fe3ef0760df8c45257442ec2f14f161c871",
+ "sha256:8966829cb0d21a08a3c5ac971a2eb67c3927ae27c247300a8476554cc0ce2ae8",
+ "sha256:8bf51191d60f6987482ef0cfe8511bbf4877a5aa7f313d7b488b53189cf26209",
+ "sha256:8eb94c0625c529215b53c08fb4e461546e2f3fc96a49c13d5474b5ad7aeab6cf",
+ "sha256:8ebba5314c609a05c6955e5773c7e0e57b8dd817e4f751f30de729be58fa5e78",
+ "sha256:932a4c101af007cb3132b1f8a9ffef23386acc53dad46536dc5ba43a3235ae02",
+ "sha256:ad75fe10bea19ad2188c5cb5fc4cdf53ee808d9b44578c94a3cd1e9fc2beb656",
+ "sha256:aeaba399254ca79c299d9fe6aa811d3c3eac61458dee10270de7f4e71c624998",
+ "sha256:b178e0923c93393e16646155794521e063ec17b7cc9f943f15b7d4b39776ea2c",
+ "sha256:b68e89bb086a9476fa85298caab43f92d0a6af135a5f433d1f6b6d82cafa7b55",
+ "sha256:d74cf9234ba76426add5e123449be08993a9b13ff434c6efa3a07caa305a619f",
+ "sha256:f3d3a88128f0c219bdc5b2d9ccd496517199660cea021c560a3252116df91cbd",
+ "sha256:fe6a7f87356116f5ea840c65b032af17deef0e1a5c34013a2962dd6f99b860dd"
+ ],
+ "index": "pypi",
+ "version": "==2.7.4"
+ },
+ "raven": {
+ "hashes": [
+ "sha256:e4edf648829a64234800a10ed94ca08e0b38592f7449fa5e70931db62f5cd851",
+ "sha256:f908e9b39f02580e7f822030d119ed3b2e8d32300a2fec6373e5827d588bbae7"
+ ],
+ "index": "pypi",
+ "version": "==6.7.0"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "six": {
+ "hashes": [
+ "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
+ "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
+ ],
+ "version": "==1.11.0"
+ },
+ "sqlalchemy": {
+ "hashes": [
+ "sha256:d6cda03b0187d6ed796ff70e87c9a7dce2c2c9650a7bc3c022cd331416853c31"
+ ],
+ "index": "pypi",
+ "version": "==1.2.7"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "version": "==1.22"
+ },
+ "werkzeug": {
+ "hashes": [
+ "sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c",
+ "sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b"
+ ],
+ "version": "==0.14.1"
+ }
+ },
+ "develop": {
+ "astroid": {
+ "hashes": [
+ "sha256:35cfae47aac19c7b407b7095410e895e836f2285ccf1220336afba744cc4c5f2",
+ "sha256:38186e481b65877fd8b1f9acc33e922109e983eb7b6e487bd4c71002134ad331"
+ ],
+ "version": "==1.6.3"
+ },
+ "attrs": {
+ "hashes": [
+ "sha256:1c7960ccfd6a005cd9f7ba884e6316b5e430a3f1a6c37c5f87d8b43f83b54ec9",
+ "sha256:a17a9573a6f475c99b551c0e0a812707ddda1ec9653bed04c13841404ed6f450"
+ ],
+ "version": "==17.4.0"
+ },
+ "backcall": {
+ "hashes": [
+ "sha256:38ecd85be2c1e78f77fd91700c76e14667dc21e2713b63876c0eb901196e01e4",
+ "sha256:bbbf4b1e5cd2bdb08f915895b51081c041bac22394fdfcfdfbe9f14b77c08bf2"
+ ],
+ "version": "==0.1.0"
+ },
+ "blinker": {
+ "hashes": [
+ "sha256:471aee25f3992bd325afa3772f1063dbdbbca947a041b8b89466dc00d606f8b6"
+ ],
+ "version": "==1.4"
+ },
+ "certifi": {
+ "hashes": [
+ "sha256:13e698f54293db9f89122b0581843a782ad0934a4fe0172d2a980ba77fc61bb7",
+ "sha256:9fa520c1bacfb634fa7af20a76bcbd3d5fb390481724c597da32c719a7dca4b0"
+ ],
+ "version": "==2018.4.16"
+ },
+ "chardet": {
+ "hashes": [
+ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
+ "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
+ ],
+ "version": "==3.0.4"
+ },
+ "click": {
+ "hashes": [
+ "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d",
+ "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
+ ],
+ "version": "==6.7"
+ },
+ "cookies": {
+ "hashes": [
+ "sha256:15bee753002dff684987b8df8c235288eb8d45f8191ae056254812dfd42c81d3",
+ "sha256:d6b698788cae4cfa4e62ef8643a9ca332b79bd96cb314294b864ae8d7eb3ee8e"
+ ],
+ "version": "==2.2.1"
+ },
+ "coverage": {
+ "hashes": [
+ "sha256:03481e81d558d30d230bc12999e3edffe392d244349a90f4ef9b88425fac74ba",
+ "sha256:0b136648de27201056c1869a6c0d4e23f464750fd9a9ba9750b8336a244429ed",
+ "sha256:104ab3934abaf5be871a583541e8829d6c19ce7bde2923b2751e0d3ca44db60a",
+ "sha256:15b111b6a0f46ee1a485414a52a7ad1d703bdf984e9ed3c288a4414d3871dcbd",
+ "sha256:198626739a79b09fa0a2f06e083ffd12eb55449b5f8bfdbeed1df4910b2ca640",
+ "sha256:1c383d2ef13ade2acc636556fd544dba6e14fa30755f26812f54300e401f98f2",
+ "sha256:28b2191e7283f4f3568962e373b47ef7f0392993bb6660d079c62bd50fe9d162",
+ "sha256:2eb564bbf7816a9d68dd3369a510be3327f1c618d2357fa6b1216994c2e3d508",
+ "sha256:337ded681dd2ef9ca04ef5d93cfc87e52e09db2594c296b4a0a3662cb1b41249",
+ "sha256:3a2184c6d797a125dca8367878d3b9a178b6fdd05fdc2d35d758c3006a1cd694",
+ "sha256:3c79a6f7b95751cdebcd9037e4d06f8d5a9b60e4ed0cd231342aa8ad7124882a",
+ "sha256:3d72c20bd105022d29b14a7d628462ebdc61de2f303322c0212a054352f3b287",
+ "sha256:3eb42bf89a6be7deb64116dd1cc4b08171734d721e7a7e57ad64cc4ef29ed2f1",
+ "sha256:4635a184d0bbe537aa185a34193898eee409332a8ccb27eea36f262566585000",
+ "sha256:56e448f051a201c5ebbaa86a5efd0ca90d327204d8b059ab25ad0f35fbfd79f1",
+ "sha256:5a13ea7911ff5e1796b6d5e4fbbf6952381a611209b736d48e675c2756f3f74e",
+ "sha256:69bf008a06b76619d3c3f3b1983f5145c75a305a0fea513aca094cae5c40a8f5",
+ "sha256:6bc583dc18d5979dc0f6cec26a8603129de0304d5ae1f17e57a12834e7235062",
+ "sha256:701cd6093d63e6b8ad7009d8a92425428bc4d6e7ab8d75efbb665c806c1d79ba",
+ "sha256:7608a3dd5d73cb06c531b8925e0ef8d3de31fed2544a7de6c63960a1e73ea4bc",
+ "sha256:76ecd006d1d8f739430ec50cc872889af1f9c1b6b8f48e29941814b09b0fd3cc",
+ "sha256:7aa36d2b844a3e4a4b356708d79fd2c260281a7390d678a10b91ca595ddc9e99",
+ "sha256:7d3f553904b0c5c016d1dad058a7554c7ac4c91a789fca496e7d8347ad040653",
+ "sha256:7e1fe19bd6dce69d9fd159d8e4a80a8f52101380d5d3a4d374b6d3eae0e5de9c",
+ "sha256:8c3cb8c35ec4d9506979b4cf90ee9918bc2e49f84189d9bf5c36c0c1119c6558",
+ "sha256:9d6dd10d49e01571bf6e147d3b505141ffc093a06756c60b053a859cb2128b1f",
+ "sha256:9e112fcbe0148a6fa4f0a02e8d58e94470fc6cb82a5481618fea901699bf34c4",
+ "sha256:ac4fef68da01116a5c117eba4dd46f2e06847a497de5ed1d64bb99a5fda1ef91",
+ "sha256:b8815995e050764c8610dbc82641807d196927c3dbed207f0a079833ffcf588d",
+ "sha256:be6cfcd8053d13f5f5eeb284aa8a814220c3da1b0078fa859011c7fffd86dab9",
+ "sha256:c1bb572fab8208c400adaf06a8133ac0712179a334c09224fb11393e920abcdd",
+ "sha256:de4418dadaa1c01d497e539210cb6baa015965526ff5afc078c57ca69160108d",
+ "sha256:e05cb4d9aad6233d67e0541caa7e511fa4047ed7750ec2510d466e806e0255d6",
+ "sha256:e4d96c07229f58cb686120f168276e434660e4358cc9cf3b0464210b04913e77",
+ "sha256:f3f501f345f24383c0000395b26b726e46758b71393267aeae0bd36f8b3ade80",
+ "sha256:f8a923a85cb099422ad5a2e345fe877bbc89a8a8b23235824a93488150e45f6e"
+ ],
+ "version": "==4.5.1"
+ },
+ "decorator": {
+ "hashes": [
+ "sha256:2c51dff8ef3c447388fe5e4453d24a2bf128d3a4c32af3fabef1f01c6851ab82",
+ "sha256:c39efa13fbdeb4506c476c9b3babf6a718da943dab7811c206005a4a956c080c"
+ ],
+ "version": "==4.3.0"
+ },
+ "flask": {
+ "hashes": [
+ "sha256:0749df235e3ff61ac108f69ac178c9770caeaccad2509cb762ce1f65570a8856",
+ "sha256:49f44461237b69ecd901cc7ce66feea0319b9158743dd27a2899962ab214dac1"
+ ],
+ "index": "pypi",
+ "version": "==0.12.2"
+ },
+ "flask-debugtoolbar": {
+ "hashes": [
+ "sha256:3d9657bc0c3633ace429e3ff451742bb59d1b7a7b95c9eb23a65ac9be2812959",
+ "sha256:ec810083123aae0632eb32ba11e1cb4cdace81e7ce6c5009dd06c5204afbce52"
+ ],
+ "index": "pypi",
+ "version": "==0.10.1"
+ },
+ "idna": {
+ "hashes": [
+ "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f",
+ "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4"
+ ],
+ "version": "==2.6"
+ },
+ "ipython": {
+ "hashes": [
+ "sha256:85882f97d75122ff8cdfe129215a408085a26039527110c8d4a2b8a5e45b7639",
+ "sha256:a6ac981381b3f5f604b37a293369963485200e3639fb0404fa76092383c10c41"
+ ],
+ "index": "pypi",
+ "version": "==6.3.1"
+ },
+ "ipython-genutils": {
+ "hashes": [
+ "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8",
+ "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"
+ ],
+ "version": "==0.2.0"
+ },
+ "isort": {
+ "hashes": [
+ "sha256:1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af",
+ "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8",
+ "sha256:ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497"
+ ],
+ "version": "==4.3.4"
+ },
+ "itsdangerous": {
+ "hashes": [
+ "sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519"
+ ],
+ "version": "==0.24"
+ },
+ "jedi": {
+ "hashes": [
+ "sha256:1972f694c6bc66a2fac8718299e2ab73011d653a6d8059790c3476d2353b99ad",
+ "sha256:5861f6dc0c16e024cbb0044999f9cf8013b292c05f287df06d3d991a87a4eb89"
+ ],
+ "version": "==0.12.0"
+ },
+ "jinja2": {
+ "hashes": [
+ "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd",
+ "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4"
+ ],
+ "version": "==2.10"
+ },
+ "lazy-object-proxy": {
+ "hashes": [
+ "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33",
+ "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39",
+ "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019",
+ "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088",
+ "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b",
+ "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e",
+ "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6",
+ "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b",
+ "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5",
+ "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff",
+ "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd",
+ "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7",
+ "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff",
+ "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d",
+ "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2",
+ "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35",
+ "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4",
+ "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514",
+ "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252",
+ "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109",
+ "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f",
+ "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c",
+ "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92",
+ "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577",
+ "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d",
+ "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d",
+ "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f",
+ "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a",
+ "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"
+ ],
+ "version": "==1.3.1"
+ },
+ "markupsafe": {
+ "hashes": [
+ "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665"
+ ],
+ "version": "==1.0"
+ },
+ "mccabe": {
+ "hashes": [
+ "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42",
+ "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"
+ ],
+ "version": "==0.6.1"
+ },
+ "more-itertools": {
+ "hashes": [
+ "sha256:0dd8f72eeab0d2c3bd489025bb2f6a1b8342f9b198f6fc37b52d15cfa4531fea",
+ "sha256:11a625025954c20145b37ff6309cd54e39ca94f72f6bb9576d1195db6fa2442e",
+ "sha256:c9ce7eccdcb901a2c75d326ea134e0886abfbea5f93e91cc95de9507c0816c44"
+ ],
+ "version": "==4.1.0"
+ },
+ "parso": {
+ "hashes": [
+ "sha256:62bd6bf7f04ab5c817704ff513ef175328676471bdef3629d4bdd46626f75551",
+ "sha256:a75a304d7090d2c67bd298091c14ef9d3d560e3c53de1c239617889f61d1d307"
+ ],
+ "version": "==0.2.0"
+ },
+ "pexpect": {
+ "hashes": [
+ "sha256:9783f4644a3ef8528a6f20374eeb434431a650c797ca6d8df0d81e30fffdfa24",
+ "sha256:9f8eb3277716a01faafaba553d629d3d60a1a624c7cf45daa600d2148c30020c"
+ ],
+ "markers": "sys_platform != 'win32'",
+ "version": "==4.5.0"
+ },
+ "pg-view": {
+ "hashes": [
+ "sha256:841115a46e1cc672ee3e2c240c78f99981c4c33db68dff748a51bf30f9b8536c"
+ ],
+ "index": "pypi",
+ "version": "==1.4.1"
+ },
+ "pickleshare": {
+ "hashes": [
+ "sha256:84a9257227dfdd6fe1b4be1319096c20eb85ff1e82c7932f36efccfe1b09737b",
+ "sha256:c9a2541f25aeabc070f12f452e1f2a8eae2abd51e1cd19e8430402bdf4c1d8b5"
+ ],
+ "version": "==0.7.4"
+ },
+ "pluggy": {
+ "hashes": [
+ "sha256:7f8ae7f5bdf75671a718d2daf0a64b7885f74510bcd98b1a0bb420eb9a9d0cff",
+ "sha256:d345c8fe681115900d6da8d048ba67c25df42973bda370783cd58826442dcd7c",
+ "sha256:e160a7fcf25762bb60efc7e171d4497ff1d8d2d75a3d0df7a21b76821ecbf5c5"
+ ],
+ "version": "==0.6.0"
+ },
+ "prompt-toolkit": {
+ "hashes": [
+ "sha256:1df952620eccb399c53ebb359cc7d9a8d3a9538cb34c5a1344bdbeb29fbcc381",
+ "sha256:3f473ae040ddaa52b52f97f6b4a493cfa9f5920c255a12dc56a7d34397a398a4",
+ "sha256:858588f1983ca497f1cf4ffde01d978a3ea02b01c8a26a8bbc5cd2e66d816917"
+ ],
+ "version": "==1.0.15"
+ },
+ "psycopg2": {
+ "hashes": [
+ "sha256:027ae518d0e3b8fff41990e598bc7774c3d08a3a20e9ecc0b59fb2aaaf152f7f",
+ "sha256:092a80da1b052a181b6e6c765849c9b32d46c5dac3b81bf8c9b83e697f3cdbe8",
+ "sha256:0b9851e798bae024ed1a2a6377a8dab4b8a128a56ed406f572f9f06194e4b275",
+ "sha256:179c52eb870110a8c1b460c86d4f696d58510ea025602cd3f81453746fccb94f",
+ "sha256:19983b77ec1fc2a210092aa0333ee48811fd9fb5f194c6cd5b927ed409aea5f8",
+ "sha256:1d90379d01d0dc50ae9b40c863933d87ff82d51dd7d52cea5d1cb7019afd72cd",
+ "sha256:27467fd5af1dcc0a82d72927113b8f92da8f44b2efbdb8906bd76face95b596d",
+ "sha256:32702e3bd8bfe12b36226ba9846ed9e22336fc4bd710039d594b36bd432ae255",
+ "sha256:33f9e1032095e1436fa9ec424abcbd4c170da934fb70e391c5d78275d0307c75",
+ "sha256:36030ca7f4b4519ee4f52a74edc4ec73c75abfb6ea1d80ac7480953d1c0aa3c3",
+ "sha256:363fbbf4189722fc46779be1fad2597e2c40b3f577dc618f353a46391cf5d235",
+ "sha256:6f302c486132f8dd11f143e919e236ea4467d53bf18c451cac577e6988ecbd05",
+ "sha256:733166464598c239323142c071fa4c9b91c14359176e5ae7e202db6bcc1d2eb5",
+ "sha256:7cbc3b21ce2f681ca9ad2d8c0901090b23a30c955e980ebf1006d41f37068a95",
+ "sha256:888bba7841116e529f407f15c6d28fe3ef0760df8c45257442ec2f14f161c871",
+ "sha256:8966829cb0d21a08a3c5ac971a2eb67c3927ae27c247300a8476554cc0ce2ae8",
+ "sha256:8bf51191d60f6987482ef0cfe8511bbf4877a5aa7f313d7b488b53189cf26209",
+ "sha256:8eb94c0625c529215b53c08fb4e461546e2f3fc96a49c13d5474b5ad7aeab6cf",
+ "sha256:8ebba5314c609a05c6955e5773c7e0e57b8dd817e4f751f30de729be58fa5e78",
+ "sha256:932a4c101af007cb3132b1f8a9ffef23386acc53dad46536dc5ba43a3235ae02",
+ "sha256:ad75fe10bea19ad2188c5cb5fc4cdf53ee808d9b44578c94a3cd1e9fc2beb656",
+ "sha256:aeaba399254ca79c299d9fe6aa811d3c3eac61458dee10270de7f4e71c624998",
+ "sha256:b178e0923c93393e16646155794521e063ec17b7cc9f943f15b7d4b39776ea2c",
+ "sha256:b68e89bb086a9476fa85298caab43f92d0a6af135a5f433d1f6b6d82cafa7b55",
+ "sha256:d74cf9234ba76426add5e123449be08993a9b13ff434c6efa3a07caa305a619f",
+ "sha256:f3d3a88128f0c219bdc5b2d9ccd496517199660cea021c560a3252116df91cbd",
+ "sha256:fe6a7f87356116f5ea840c65b032af17deef0e1a5c34013a2962dd6f99b860dd"
+ ],
+ "index": "pypi",
+ "version": "==2.7.4"
+ },
+ "ptyprocess": {
+ "hashes": [
+ "sha256:e64193f0047ad603b71f202332ab5527c5e52aa7c8b609704fc28c0dc20c4365",
+ "sha256:e8c43b5eee76b2083a9badde89fd1bbce6c8942d1045146e100b7b5e014f4f1a"
+ ],
+ "version": "==0.5.2"
+ },
+ "py": {
+ "hashes": [
+ "sha256:29c9fab495d7528e80ba1e343b958684f4ace687327e6f789a94bf3d1915f881",
+ "sha256:983f77f3331356039fdd792e9220b7b8ee1aa6bd2b25f567a963ff1de5a64f6a"
+ ],
+ "version": "==1.5.3"
+ },
+ "pygments": {
+ "hashes": [
+ "sha256:78f3f434bcc5d6ee09020f92ba487f95ba50f1e3ef83ae96b9d5ffa1bab25c5d",
+ "sha256:dbae1046def0efb574852fab9e90209b23f556367b5a320c0bcb871c77c3e8cc"
+ ],
+ "version": "==2.2.0"
+ },
+ "pylint": {
+ "hashes": [
+ "sha256:0b7e6b5d9f1d4e0b554b5d948f14ed7969e8cdf9a0120853e6e5af60813b18ab",
+ "sha256:34738a82ab33cbd3bb6cd4cef823dbcabdd2b6b48a4e3a3054a2bbbf0c712be9"
+ ],
+ "index": "pypi",
+ "version": "==1.8.4"
+ },
+ "pytest": {
+ "hashes": [
+ "sha256:54713b26c97538db6ff0703a12b19aeaeb60b5e599de542e7fca0ec83b9038e8",
+ "sha256:829230122facf05a5f81a6d4dfe6454a04978ea3746853b2b84567ecf8e5c526"
+ ],
+ "index": "pypi",
+ "version": "==3.5.1"
+ },
+ "pytest-cov": {
+ "hashes": [
+ "sha256:03aa752cf11db41d281ea1d807d954c4eda35cfa1b21d6971966cc041bbf6e2d",
+ "sha256:890fe5565400902b0c78b5357004aab1c814115894f4f21370e2433256a3eeec"
+ ],
+ "index": "pypi",
+ "version": "==2.5.1"
+ },
+ "pytest-pythonpath": {
+ "hashes": [
+ "sha256:f3d46b0a8276e856f7dc4f70ca97b88be6fbcf52d57ce36e35057d502388265e"
+ ],
+ "index": "pypi",
+ "version": "==0.7.2"
+ },
+ "requests": {
+ "hashes": [
+ "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b",
+ "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e"
+ ],
+ "index": "pypi",
+ "version": "==2.18.4"
+ },
+ "responses": {
+ "hashes": [
+ "sha256:c6082710f4abfb60793899ca5f21e7ceb25aabf321560cc0726f8b59006811c9",
+ "sha256:f23a29dca18b815d9d64a516b4a0abb1fbdccff6141d988ad8100facb81cf7b3"
+ ],
+ "index": "pypi",
+ "version": "==0.9.0"
+ },
+ "simplegeneric": {
+ "hashes": [
+ "sha256:dc972e06094b9af5b855b3df4a646395e43d1c9d0d39ed345b7393560d0b9173"
+ ],
+ "version": "==0.8.1"
+ },
+ "six": {
+ "hashes": [
+ "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9",
+ "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb"
+ ],
+ "version": "==1.11.0"
+ },
+ "traitlets": {
+ "hashes": [
+ "sha256:9c4bd2d267b7153df9152698efb1050a5d84982d3384a37b2c1f7723ba3e7835",
+ "sha256:c6cb5e6f57c5a9bdaa40fa71ce7b4af30298fbab9ece9815b5d995ab6217c7d9"
+ ],
+ "version": "==4.3.2"
+ },
+ "urllib3": {
+ "hashes": [
+ "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b",
+ "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"
+ ],
+ "version": "==1.22"
+ },
+ "wcwidth": {
+ "hashes": [
+ "sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
+ "sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
+ ],
+ "version": "==0.1.7"
+ },
+ "werkzeug": {
+ "hashes": [
+ "sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c",
+ "sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b"
+ ],
+ "version": "==0.14.1"
+ },
+ "wrapt": {
+ "hashes": [
+ "sha256:d4d560d479f2c21e1b5443bbd15fe7ec4b37fe7e53d335d3b9b0a7b1226fe3c6"
+ ],
+ "version": "==1.10.11"
+ }
+ }
+}
diff --git a/python/README.md b/python/README.md
new file mode 100644
index 00000000..dbf6aef7
--- /dev/null
+++ b/python/README.md
@@ -0,0 +1,27 @@
+
+ __ _ _
+ / _| __ _| |_ ___ __ _| |_
+ | |_ / _` | __/ __/ _` | __|
+ | _| (_| | || (_| (_| | |_
+ |_| \__,_|\__\___\__,_|\__|
+
+ ... catalog all the things!
+
+
+This is just a concept for now; see [rfc](./rfc).
+
+
+## Python Prototype
+
+Use `pipenv` (which you can install with `pip`).
+
+ pipenv run run.py --init-db
+ pipenv run run.py
+
+Run tests:
+
+ pipenv run pytest
+
+ # for coverage:
+ pipenv run pytest --cov --cov-report html
+
diff --git a/python/TODO b/python/TODO
new file mode 100644
index 00000000..88186280
--- /dev/null
+++ b/python/TODO
@@ -0,0 +1,80 @@
+
+next/high-level:
+- quick python ORCID and ISSN import scripts
+- client export:
+ => one json-nl file per entity type
+- flask-apispec
+- swagger API docs?
+- naive API-based import scripts for: journals (norwegian), orcid, crossref
+- switch to marshmallow in create APIs (at least for revs)
+
+- kong or oauth2_proxy for auth, rate-limit, etc
+- "authn" microservice: https://keratin.tech/
+
+api:
+- PUT for mid-edit revisions
+/ use marshmallow in POST for all entities
+/ consider refactoring into method-method (classes)
+
+model:
+- 'parent rev' for revisions (vs. container parent)
+- "submit" status for editgroups?
+
+tests
+- full object fields actually getting passed e2e (for rich_app)
+- implicit editor.active_edit_group behavior
+- modify existing release via edit mechanism (and commit)
+- redirect a release to another (merge)
+- update (via edit) a redirect release
+- api: try to reuse an accepted edit group
+- api: try to modify an accepted release
+- api: multiple edits, same entity, same editgroup
+
+review
+- what does openlibrary API look like?
+- hydrate in files for releases... nested good enough?
+- add a 'live' (or 'immutable') flag to revision tables
+- how to encode proposed redirects? history goes in changelog
+ => proposed_ident_action table, which points to edits
+ => ident in edit as a partial solution (not redirects)
+ => extend edit object to have "to/from" info, and be per-entity
+
+views
+- oldest un-merged edits/edit-groups
+
+later:
+- switch extra_json to just be JSONB column
+- public IDs are UUID (sqlite hack, or just require postgres)
+
+## High-Level Priorities
+
+- bulk loading of releases, files, containers, creators
+- manual editing of containers and releases
+- accurate auto-matching matching of containers (eg, via ISSN)
+- full database dump and reload
+
+## Planning...
+
+before switching to golang:
+x swap extra_json to simple text field
+x profile slow bulk imports
+ client:
+ 78% waiting for POST
+ api:
+ 56% / 22ms api_release_create
+ 36% / 13ms api_work_create
+ 7% / 4ms container lookup
+- flush out web interface (POST, etc)
+ x create release
+ => edit existing release
+ => edit editgroup (remove edits)
+ => approve editgroup
+- "model" issues above
+- look at "review" issues above
+- try cockroach
+
+after switching:
+- UUID identifiers
+- faster bulk importers (API client; parallel)
+- editor accounts
+
diff --git a/python/config.py b/python/config.py
new file mode 100644
index 00000000..a7ec4b50
--- /dev/null
+++ b/python/config.py
@@ -0,0 +1,13 @@
+
+import os
+basedir = os.path.abspath(os.path.dirname(__file__))
+
+class Config(object):
+ SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URI') or \
+ 'sqlite:///' + os.path.join(basedir, 'fatcat_dev.sqlite')
+ SQLALCHEMY_TRACK_MODIFICATIONS = False
+
+ # "Event more verbose" debug options. SECRET_KEY is bogus.
+ #SQLALCHEMY_ECHO = True
+ #SECRET_KEY = "kuhy0284hflskjhg01284"
+ #DEBUG = True
diff --git a/python/fatcat/__init__.py b/python/fatcat/__init__.py
new file mode 100644
index 00000000..a824d220
--- /dev/null
+++ b/python/fatcat/__init__.py
@@ -0,0 +1,15 @@
+
+from flask import Flask
+from flask_sqlalchemy import SQLAlchemy
+from flask_marshmallow import Marshmallow
+from flask_debugtoolbar import DebugToolbarExtension
+from config import Config
+
+toolbar = DebugToolbarExtension()
+app = Flask(__name__)
+app.config.from_object(Config)
+db = SQLAlchemy(app)
+ma = Marshmallow(app)
+toolbar = DebugToolbarExtension(app)
+
+from fatcat import routes, models, api, sql, dummy
diff --git a/python/fatcat/api.py b/python/fatcat/api.py
new file mode 100644
index 00000000..2c91533b
--- /dev/null
+++ b/python/fatcat/api.py
@@ -0,0 +1,280 @@
+
+from flask import Flask, render_template, send_from_directory, request, \
+ url_for, abort, g, redirect, jsonify, session
+from fatcat import app, db
+from fatcat.models import *
+from fatcat.sql import *
+
+
+### Helpers #################################################################
+
+def get_or_create_editgroup(param=None):
+ if param != None:
+ editgroup = EditGroup.query.get_or_404(int(param))
+ return editgroup
+ editor = Editor.query.get_or_404(1)
+ if editor.active_editgroup:
+ return editor.active_editgroup
+
+ editgroup = EditGroup(editor=editor)
+ db.session.add(editgroup)
+ db.session.commit()
+ editor.active_editgroup = editgroup
+ db.session.add(editor)
+ db.session.commit()
+ return editgroup
+
+### Views ###################################################################
+
+@app.route('/v0/work/<int:ident>', methods=['GET'])
+def api_work_get(ident):
+ entity = WorkIdent.query.get_or_404(ident)
+ return work_schema.jsonify(entity)
+
+@app.route('/v0/work', methods=['POST'])
+def api_work_create(params=None):
+ # TODO: Special-case to pull out primary and create that?
+ if params == None:
+ params = request.get_json()
+ editgroup = get_or_create_editgroup(params.get('editgroup'))
+ rev = WorkRev(
+ title=params.get('title', None),
+ work_type=params.get('work_type', None),
+ )
+ ident = WorkIdent(is_live=False, rev=rev)
+ edit = WorkEdit(editgroup=editgroup, ident=ident, rev=rev)
+ if params.get('extra', None):
+ rev.extra_json = json.dumps(params['extra'], indent=False).encode('utf-8')
+ db.session.add_all([edit, ident, rev])
+ db.session.commit()
+ return work_schema.jsonify(ident)
+
+@app.route('/v0/work/random', methods=['GET'])
+def api_work_random():
+ entity = WorkIdent.query.order_by(db.func.random()).first()
+ return redirect('/v0/work/{}'.format(entity.id))
+
+
+@app.route('/v0/release/<int:ident>', methods=['GET'])
+def api_release_get(ident):
+ entity = ReleaseIdent.query.get_or_404(ident)
+ return release_schema.jsonify(entity)
+
+@app.route('/v0/release', methods=['POST'])
+def api_release_create(params=None):
+ if params == None:
+ params = request.get_json()
+ editgroup = get_or_create_editgroup(params.get('editgroup'))
+ creators = params.get('creators', [])
+ creators = [CreatorIdent.query.get_or_404(c) for c in creators]
+ targets = [ref['target'] for ref in params.get('refs', []) if ref.get('target') != None]
+ targets = [ReleaseIdent.query.get_or_404(t) for t in targets]
+ work = params.get('work')
+ if work:
+ work = WorkIdent.query.get_or_404(work)
+ container = params.get('container')
+ if container:
+ container = ContainerIdent.query.get_or_404(container)
+ rev = ReleaseRev(
+ title=params.get('title', None),
+ release_type=params.get('release_type', None),
+ work=work,
+ container=container,
+ doi=params.get('doi', None),
+ )
+ contribs = [ReleaseContrib(release=rev, creator=c) for c in creators]
+ rev.creators = contribs
+ db.session.add_all(contribs)
+ refs = [ReleaseRef(release=rev, target=t) for t in targets]
+ rev.refs = refs
+ db.session.add_all(refs)
+ ident = ReleaseIdent(is_live=False, rev=rev)
+ edit = ReleaseEdit(editgroup=editgroup, ident=ident, rev=rev)
+ if params.get('extra', None):
+ rev.extra_json = json.dumps(params['extra'], indent=False).encode('utf-8')
+ db.session.add_all([edit, ident, rev])
+ db.session.commit()
+ return release_schema.jsonify(ident)
+
+@app.route('/v0/release/<int:ident>/changelog', methods=['GET'])
+def api_release_changelog(ident):
+ entries = ChangelogEntry.query\
+ .join(ReleaseEdit.editgroup)\
+ .filter(ReleaseEdit.ident_id==ident)\
+ .all()
+ return changelogentry_schema.jsonify(entries, many=True)
+
+@app.route('/v0/release/random', methods=['GET'])
+def api_release_random():
+ entity = ReleaseIdent.query.order_by(db.func.random()).first()
+ return redirect('/v0/release/{}'.format(entity.id))
+
+@app.route('/v0/release/lookup', methods=['GET'])
+def api_release_lookup():
+ params = request.get_json()
+ doi = params['doi'].strip().lower()
+ # TODO: proper regex
+ if not (doi.startswith("10.") and len(doi.split('/')) == 2):
+ abort(400)
+ entity = ReleaseIdent.query\
+ .join(ReleaseIdent.rev)\
+ .filter(ReleaseRev.doi==doi)\
+ .first_or_404()
+ return release_schema.jsonify(entity)
+
+
+@app.route('/v0/creator/<int:ident>', methods=['GET'])
+def api_creator_get(ident):
+ entity = CreatorIdent.query.get_or_404(ident)
+ return creator_schema.jsonify(entity)
+
+@app.route('/v0/creator', methods=['POST'])
+def api_creator_create(params=None):
+ if params == None:
+ params = request.get_json()
+ editgroup = get_or_create_editgroup(params.get('editgroup'))
+ rev = CreatorRev(
+ name=params.get('name', None),
+ orcid=params.get('orcid', None),
+ )
+ ident = CreatorIdent(is_live=False, rev=rev)
+ edit = CreatorEdit(editgroup=editgroup, ident=ident, rev=rev)
+ if params.get('extra', None):
+ rev.extra_json = json.dumps(params['extra'], indent=False).encode('utf-8')
+ db.session.add_all([edit, ident, rev])
+ db.session.commit()
+ return creator_schema.jsonify(ident)
+
+@app.route('/v0/creator/lookup', methods=['GET'])
+def api_creator_lookup():
+ params = request.get_json()
+ orcid = params['orcid'].strip()
+ # TODO: proper regex
+ if not (len(orcid) == len("0000-0002-1825-0097") and len(orcid.split('-')) == 4):
+ abort(400)
+ entity = CreatorIdent.query\
+ .join(CreatorIdent.rev)\
+ .filter(CreatorRev.orcid==orcid)\
+ .first_or_404()
+ return creator_schema.jsonify(entity)
+
+
+@app.route('/v0/container/<int:ident>', methods=['GET'])
+def api_container_get(ident):
+ entity = ContainerIdent.query.get_or_404(ident)
+ return container_schema.jsonify(entity)
+
+@app.route('/v0/container', methods=['POST'])
+def api_container_create(params=None):
+ if params == None:
+ params = request.get_json()
+ editgroup = get_or_create_editgroup(params.get('editgroup'))
+ rev = ContainerRev(
+ name=params.get('name', None),
+ publisher=params.get('publisher', None),
+ issn=params.get('issn', None),
+ )
+ ident = ContainerIdent(is_live=False, rev=rev)
+ edit = ContainerEdit(editgroup=editgroup, ident=ident, rev=rev)
+ if params.get('extra', None):
+ rev.extra_json = json.dumps(params['extra'], indent=False).encode('utf-8')
+ db.session.add_all([edit, ident, rev])
+ db.session.commit()
+ return container_schema.jsonify(ident)
+
+@app.route('/v0/container/lookup', methods=['GET'])
+def api_container_lookup():
+ params = request.get_json()
+ issn = params['issn'].strip()
+ # TODO: proper regex
+ if not (len(issn) == 9 and issn[0:4].isdigit() and issn[5:7].isdigit()):
+ abort(400)
+ entity = ContainerIdent.query\
+ .join(ContainerIdent.rev)\
+ .filter(ContainerRev.issn==issn)\
+ .first_or_404()
+ return container_schema.jsonify(entity)
+
+
+@app.route('/v0/file/<int:ident>', methods=['GET'])
+def api_file_get(ident):
+ entity = FileIdent.query.get_or_404(ident)
+ return file_schema.jsonify(entity)
+
+@app.route('/v0/file', methods=['POST'])
+def api_file_create(params=None):
+ if params == None:
+ params = request.get_json()
+ editgroup = get_or_create_editgroup(params.get('editgroup'))
+ releases = params.get('releases', [])
+ releases = [ReleaseIdent.query.get_or_404(r) for r in releases]
+ rev = FileRev(
+ sha1=params.get('sha1', None),
+ size=params.get('size', None),
+ url=params.get('url', None),
+ )
+ file_releases = [FileRelease(file=rev, release=r) for r in releases]
+ rev.releases = file_releases
+ db.session.add_all(file_releases)
+ ident = FileIdent(is_live=False, rev=rev)
+ edit = FileEdit(editgroup=editgroup, ident=ident, rev=rev)
+ if params.get('extra', None):
+ rev.extra_json = json.dumps(params['extra'], indent=False).encode('utf-8')
+ db.session.add_all([edit, ident, rev])
+ db.session.commit()
+ return file_schema.jsonify(ident)
+
+
+@app.route('/v0/editgroup/<int:ident>', methods=['GET'])
+def api_editgroup_get(ident):
+ entity = EditGroup.query\
+ .join(EditGroup.editor)\
+ .filter(EditGroup.id==ident)\
+ .first_or_404()
+ rv = editgroup_schema.dump(entity).data
+ rv['work_edits'] = work_edit_schema.dump(
+ WorkEdit.query.filter(EditGroup.id==ident).all(), many=True).data
+ rv['release_edits'] = release_edit_schema.dump(
+ ReleaseEdit.query.filter(EditGroup.id==ident).all(), many=True).data
+ rv['creator_edits'] = creator_edit_schema.dump(
+ CreatorEdit.query.filter(EditGroup.id==ident).all(), many=True).data
+ rv['container_edits'] = container_edit_schema.dump(
+ ContainerEdit.query.filter(EditGroup.id==ident).all(), many=True).data
+ rv['file_edits'] = file_edit_schema.dump(
+ FileEdit.query.filter(EditGroup.id==ident).all(), many=True).data
+ return jsonify(rv)
+
+@app.route('/v0/editgroup', methods=['POST'])
+def api_editgroup_create(params=None):
+ if params == None:
+ params = request.get_json()
+ eg = EditGroup(
+ editor_id=1,
+ description=params.get('description', None),
+ )
+ if params.get('extra', None):
+ eg.extra_json = json.dumps(params['extra'], indent=False).encode('utf-8')
+ db.session.add(eg)
+ db.session.commit()
+ return editgroup_schema.jsonify(eg)
+
+@app.route('/v0/editgroup/<int:ident>/accept', methods=['POST'])
+def api_editgroup_accept(ident):
+ entity = EditGroup.query.get_or_404(ident)
+ accept_editgroup(entity)
+ return jsonify({'success': True})
+
+
+@app.route('/v0/editor/<username>', methods=['GET'])
+def api_editor_get(username):
+ entity = Editor.query.filter(Editor.username==username).first_or_404()
+ return editor_schema.jsonify(entity)
+
+@app.route('/v0/editor/<username>/changelog', methods=['GET'])
+def api_editor_changelog(username):
+ entries = ChangelogEntry.query\
+ .join(ChangelogEntry.editgroup)\
+ .join(EditGroup.editor)\
+ .filter(Editor.username==username)\
+ .all()
+ return changelogentry_schema.jsonify(entries, many=True)
diff --git a/python/fatcat/api_client.py b/python/fatcat/api_client.py
new file mode 100644
index 00000000..f2fd6a1d
--- /dev/null
+++ b/python/fatcat/api_client.py
@@ -0,0 +1,175 @@
+
+import sys
+import json
+import requests
+
+
+class FatCatApiClient:
+
+ def __init__(self, host_url):
+ self.host_url = host_url
+ self.session = requests.Session()
+ self._issn_map = dict()
+
+ def get(self, path, data=None):
+ headers = {"content-type": "application/json"}
+ return self.session.get(self.host_url + path, json=data,
+ headers=headers)
+
+ def post(self, path, data=None):
+ headers = {"content-type": "application/json"}
+ return self.session.post(self.host_url + path, json=data,
+ headers=headers)
+
+ def new_editgroup(self):
+ rv = self.post('/v0/editgroup', data=dict(
+ editor=1))
+ assert rv.status_code == 200
+ editgroup_id = rv.json()['id']
+ return editgroup_id
+
+ def accept_editgroup(self, eg):
+ rv = self.post('/v0/editgroup/{}/accept'.format(eg))
+ assert rv.status_code == 200
+ return rv
+
+ def lookup_issn(self, issn):
+ assert len(issn) == 9 and issn[4] == '-'
+ if issn in self._issn_map:
+ return self._issn_map[issn]
+ rv = self.get('/v0/container/lookup', data=dict(issn=issn))
+ container_id = None
+ if rv.status_code == 200:
+ container_id = rv.json()['id']
+ else:
+ # only other valid response is a 404; otherwise we had an error
+ assert rv.status_code == 404
+ self._issn_map[issn] = container_id
+ return container_id
+
+ def import_crossref_file(self, json_file, create_containers=False, batchsize=100):
+ eg = self.new_editgroup()
+ i = 0
+ with open(json_file, 'r') as file:
+ for line in file:
+ if i % batchsize == 0:
+ sys.stdout.write('\n{}: '.format(i))
+ if (i+1) % 20 == 0:
+ sys.stdout.write('.')
+ i = i + 1
+ obj = json.loads(line)
+ if not ("author" in obj and "title" in obj):
+ continue
+ try:
+ self.import_crossref_dict(obj, editgroup=eg,
+ create_containers=create_containers)
+ except Exception as e:
+ print("ERROR: {}".format(e))
+ if i % batchsize == 0:
+ self.accept_editgroup(eg)
+ eg = self.new_editgroup()
+ if i % batchsize != 0:
+ self.accept_editgroup(eg)
+ print("done!")
+
+ def import_crossref_dict(self, meta, editgroup=None,
+ create_containers=False):
+
+ # creators
+ creators = []
+ for am in meta['author']:
+ c = dict(name="{} {}".format(am['given'], am['family']),
+ sortname="{}, {}".format(am['family'], am['given']),
+ orcid=None)
+ creators.append(c)
+
+ # container
+ issn = meta.get('ISSN', [None])[0]
+ container_id = self.lookup_issn(issn)
+ container = dict(
+ issn=issn,
+ name=meta['container-title'][0],
+ container=container_id,
+ #sortname=meta['short-container-title'][0])
+ publisher=meta['publisher'])
+
+ if container_id is None and create_containers and issn != None:
+ rv = self.post('/v0/container', data=dict(
+ issn=container['issn'],
+ publisher=container['publisher']))
+ assert rv.status_code == 200
+ container_id = rv.json()['id']
+ print("created container: {}".format(issn))
+ container['id'] = container_id
+ self._issn_map[issn] = container_id
+
+ # references
+ refs = []
+ for i, rm in enumerate(meta.get('reference', [])):
+ ref = dict(
+ doi=rm.get("DOI", None),
+ index=i+1,
+ # TODO: how to generate a proper stub here from k/v metadata?
+ stub="| ".join(rm.values()))
+ refs.append(ref)
+
+ # work and release
+ title = meta['title'][0]
+ rv = self.post('/v0/work',
+ data=dict(title=title, editgroup=editgroup)) #work_type="book"
+ assert rv.status_code == 200
+ work_id = rv.json()['id']
+
+ extra = dict(crossref={
+ 'links': meta.get('link', []),
+ 'subject': meta.get('subject'),
+ 'type': meta['type'],
+ 'alternative-id': meta.get('alternative-id', [])})
+
+ rv = self.post('/v0/release', data=dict(
+ title=title,
+ work=work_id,
+ # XXX: creators=creators,
+ # XXX: refs=refs,
+ # XXX: container=container_id,
+ release_type=meta['type'],
+ doi=meta['DOI'],
+ date=meta['created']['date-time'],
+ license=meta.get('license', [dict(URL=None)])[0]['URL'] or None,
+ issue=meta.get('issue', None),
+ volume=meta.get('volume', None),
+ pages=meta.get('page', None),
+ editgroup=editgroup,
+ extra=extra))
+ assert rv.status_code == 200
+ release_id = rv.json()['id']
+
+ def import_issn_file(self, json_file, create_containers=False, batchsize=100):
+ eg = self.new_editgroup()
+ i = 0
+ with open(json_file, 'r') as file:
+ for line in file:
+ if i % batchsize == 0:
+ sys.stdout.write('\n{}: '.format(i))
+ if (i+1) % 20 == 0:
+ sys.stdout.write('.')
+ i = i + 1
+ obj = json.loads(line)
+ if not ("author" in obj and "title" in obj):
+ continue
+ try:
+ self.import_crossref_dict(obj, editgroup=eg,
+ create_containers=create_containers)
+ except Exception as e:
+ print("ERROR: {}".format(e))
+ if i % batchsize == 0:
+ self.accept_editgroup(eg)
+ eg = self.new_editgroup()
+ if i % batchsize != 0:
+ self.accept_editgroup(eg)
+ print("done!")
+
+ def health(self):
+ rv = self.get("/health")
+ assert rv.status_code == 200
+ return rv.json()
diff --git a/python/fatcat/dummy.py b/python/fatcat/dummy.py
new file mode 100644
index 00000000..f22c4dcb
--- /dev/null
+++ b/python/fatcat/dummy.py
@@ -0,0 +1,135 @@
+
+import random
+import hashlib
+from fatcat import db
+from fatcat.models import *
+
+def insert_example_works():
+ """
+ TODO: doesn't create an edit trail (yet)
+ """
+
+ n_elkies = CreatorRev(
+ name="Noam D. Elkies",
+ sortname="Elkies, N",
+ orcid=None)
+ n_elkies_id = CreatorIdent(rev=n_elkies)
+ pi_work = WorkRev(
+ title="Why is π^2 so close to 10?",
+ work_type="journal-article")
+ pi_work_id = WorkIdent(rev=pi_work)
+ pi_release = ReleaseRev(
+ title=pi_work.title,
+ work_ident_id=pi_work.id,
+ release_type="journal-article")
+ pi_contrib = ReleaseContrib(creator=n_elkies_id)
+ pi_release.creators.append(pi_contrib)
+ pi_release_id = ReleaseIdent(rev=pi_release)
+ pi_work.primary_release = pi_release_id
+
+ # TODO:
+ #pi_file = File(
+ # sha1="efee52e46c86691e2b892dbeb212f3b92e92e9d3",
+ # url="http://www.math.harvard.edu/~elkies/Misc/pi10.pdf")
+ db.session.add_all([n_elkies, n_elkies_id, pi_work, pi_work_id, pi_release,
+ pi_release_id])
+
+ # TODO:
+ #ligo_collab = CreatorRev(name="LIGO Scientific Collaboration")
+ #ligo_paper = ReleaseRev(
+ # title="Full Band All-sky Search for Periodic Gravitational Waves in the O1 LIGO Data")
+ db.session.commit()
+
+
+def insert_random_works(count=100):
+ """
+ TODO: doesn't create an edit trail (yet)
+ """
+
+ first_names = ("Sarah", "Robin", "Halko", "Jefferson", "Max", "桃井",
+ "Koizumi", "Rex", "Billie", "Tenzin")
+ last_names = ("Headroom", "はるこ", "Jun'ichirō", "Wong", "Smith")
+
+ author_revs = []
+ author_ids = []
+ for _ in range(count):
+ first = random.choice(first_names)
+ last = random.choice(last_names)
+ ar = CreatorRev(
+ name="{} {}".format(first, last),
+ sortname="{}, {}".format(last, first[0]),
+ orcid=None)
+ author_revs.append(ar)
+ author_ids.append(CreatorIdent(rev=ar))
+
+ container_revs = []
+ container_ids = []
+ for _ in range(5):
+ cr = ContainerRev(
+ name="The Fake Journal of Stuff",
+ #container_id=None,
+ publisher="Big Paper",
+ sortname="Fake Journal of Stuff",
+ issn="1234-5678")
+ container_revs.append(cr)
+ container_ids.append(ContainerIdent(rev=cr))
+
+ title_start = ("All about ", "When I grow up I want to be",
+ "The final word on", "Infinity: ", "The end of")
+ title_ends = ("Humankind", "Bees", "Democracy", "Avocados", "«küßî»", "“ЌύБЇ”")
+ work_revs = []
+ work_ids = []
+ release_revs = []
+ release_ids = []
+ file_revs = []
+ file_ids = []
+ for _ in range(count):
+ title = "{} {}".format(random.choice(title_start), random.choice(title_ends))
+ work = WorkRev(title=title)
+ work_id = WorkIdent(rev=work)
+ authors = set(random.sample(author_ids, 5))
+ release = ReleaseRev(
+ title=work.title,
+ creators=[ReleaseContrib(creator=a) for a in list(authors)],
+ #work=work,
+ container=random.choice(container_ids))
+ release_id = ReleaseIdent(rev=release)
+ work.primary_release = release_id
+ authors.add(random.choice(author_ids))
+ release2 = ReleaseRev(
+ title=work.title + " (again)",
+ creators=[ReleaseContrib(creator=a) for a in list(authors)],
+ #work=work,
+ container=random.choice(container_ids))
+ release_id2 = ReleaseIdent(rev=release2)
+ work_revs.append(work)
+ work_ids.append(work_id)
+ release_revs.append(release)
+ release_revs.append(release2)
+ release_ids.append(release_id)
+ release_ids.append(release_id2)
+
+ file_content = str(random.random()) * random.randint(3,100)
+ file_sha = hashlib.sha1(file_content.encode('utf-8')).hexdigest()
+ file_rev = FileRev(
+ sha1=file_sha,
+ size=len(file_content),
+ url="http://archive.invalid/{}".format(file_sha),
+ releases=[FileRelease(release=release_id), FileRelease(release=release_id2)],
+ )
+ file_id = FileIdent(rev=file_rev)
+ file_revs.append(file_rev)
+ file_ids.append(file_id)
+
+ db.session.add_all(author_revs)
+ db.session.add_all(author_ids)
+ db.session.add_all(work_revs)
+ db.session.add_all(work_ids)
+ db.session.add_all(release_revs)
+ db.session.add_all(release_ids)
+ db.session.add_all(container_revs)
+ db.session.add_all(container_ids)
+ db.session.add_all(file_revs)
+ db.session.add_all(file_ids)
+
+ db.session.commit()
diff --git a/python/fatcat/models.py b/python/fatcat/models.py
new file mode 100644
index 00000000..c35e541f
--- /dev/null
+++ b/python/fatcat/models.py
@@ -0,0 +1,429 @@
+
+"""
+states for identifiers:
+- pre-live: points to a rev (during edit/accept period)
+- live: points to a rev
+- redirect: live, points to upstream rev, also points to redirect id
+ => if live and redirect non-null, all other fields copied from redirect target
+- deleted: live, but doesn't point to a rev
+
+possible refactors:
+- '_rev' instead of '_rev'
+- use mixins for entities
+"""
+
+import json
+import hashlib
+from marshmallow import post_dump, pre_load
+from fatcat import db, ma
+
+
+### Inter-Entity Relationships ###############################################
+
+class ReleaseContrib(db.Model):
+ __tablename__ = "release_contrib"
+ release_rev = db.Column(db.ForeignKey('release_rev.id'), nullable=False, primary_key=True)
+ creator_ident_id = db.Column(db.ForeignKey('creator_ident.id'), nullable=False, primary_key=True)
+ stub = db.Column(db.String, nullable=True)
+ type = db.Column(db.String, nullable=True)
+ # TODO: index (int)?
+
+ creator = db.relationship("CreatorIdent")
+ release = db.relationship("ReleaseRev")
+
+class ReleaseRef(db.Model):
+ __tablename__ = "release_ref"
+ id = db.Column(db.Integer, primary_key=True, nullable=False)
+ release_rev = db.Column(db.ForeignKey('release_rev.id'), nullable=False)
+ target_release_ident_id = db.Column(db.ForeignKey('release_ident.id'), nullable=True)
+ index = db.Column(db.Integer, nullable=True)
+ stub = db.Column(db.String, nullable=True)
+ doi = db.Column(db.String, nullable=True)
+
+ release = db.relationship("ReleaseRev")
+ target = db.relationship("ReleaseIdent")
+
+class FileRelease(db.Model):
+ __tablename__ = "file_release"
+ id = db.Column(db.Integer, primary_key=True, nullable=False)
+ file_rev= db.Column(db.ForeignKey('file_rev.id'), nullable=False)
+ release_ident_id = db.Column(db.ForeignKey('release_ident.id'), nullable=False)
+
+ release = db.relationship("ReleaseIdent")
+ file = db.relationship("FileRev")
+
+
+### Entities #################################################################
+
+class WorkRev(db.Model):
+ __tablename__ = 'work_rev'
+ id = db.Column(db.Integer, primary_key=True)
+ extra_json = db.Column(db.String, nullable=True)
+
+ title = db.Column(db.String)
+ work_type = db.Column(db.String)
+ primary_release_id = db.Column(db.ForeignKey('release_ident.id'), nullable=True)
+ primary_release = db.relationship('ReleaseIdent')
+
+class WorkIdent(db.Model):
+ """
+ If rev_id is null, this was deleted.
+ If redirect_id is not null, this has been merged with the given id. In this
+ case rev_id is a "cached" copy of the redirect's rev_id, as
+ an optimization. If the merged work is "deleted", rev_id can be
+ null and redirect_id not-null.
+ """
+ __tablename__ = 'work_ident'
+ id = db.Column(db.Integer, primary_key=True, nullable=False)
+ is_live = db.Column(db.Boolean, nullable=False, default=False)
+ rev_id = db.Column(db.ForeignKey('work_rev.id'), nullable=True)
+ redirect_id = db.Column(db.ForeignKey('work_ident.id'), nullable=True)
+ rev = db.relationship("WorkRev")
+
+class WorkEdit(db.Model):
+ __tablename__ = 'work_edit'
+ id = db.Column(db.Integer, primary_key=True)
+ ident_id = db.Column(db.ForeignKey('work_ident.id'), nullable=True)
+ rev_id = db.Column(db.ForeignKey('work_rev.id'), nullable=True)
+ redirect_id = db.Column(db.ForeignKey('work_ident.id'), nullable=True)
+ editgroup_id = db.Column(db.ForeignKey('editgroup.id'), nullable=True)
+ extra_json = db.Column(db.String, nullable=True)
+ ident = db.relationship("WorkIdent", foreign_keys="WorkEdit.ident_id")
+ rev = db.relationship("WorkRev")
+ editgroup = db.relationship("EditGroup")
+
+
+class ReleaseRev(db.Model):
+ __tablename__ = 'release_rev'
+ id = db.Column(db.Integer, primary_key=True, autoincrement=True)
+ extra_json = db.Column(db.String, nullable=True)
+
+ work_ident_id = db.Column(db.ForeignKey('work_ident.id', use_alter=True), nullable=True) # XXX: nullable=False
+ container_ident_id = db.Column(db.ForeignKey('container_ident.id'), nullable=True)
+ title = db.Column(db.String, nullable=False)
+ license = db.Column(db.String, nullable=True) # TODO: oa status foreign key
+ release_type = db.Column(db.String) # TODO: foreign key
+ date = db.Column(db.String, nullable=True) # TODO: datetime
+ doi = db.Column(db.String, nullable=True) # TODO: identifier table
+ volume = db.Column(db.String, nullable=True)
+ pages = db.Column(db.String, nullable=True)
+ issue = db.Column(db.String, nullable=True)
+
+ work = db.relationship("WorkIdent", lazy='subquery', foreign_keys="ReleaseRev.work_ident_id")
+ container = db.relationship("ContainerIdent", lazy='subquery')
+ creators = db.relationship('ReleaseContrib', lazy='subquery')
+ refs = db.relationship('ReleaseRef', lazy='subquery')
+
+class ReleaseIdent(db.Model):
+ __tablename__ = 'release_ident'
+ id = db.Column(db.Integer, primary_key=True)
+ is_live = db.Column(db.Boolean, nullable=False, default=False)
+ rev_id = db.Column(db.ForeignKey('release_rev.id'))
+ redirect_id = db.Column(db.ForeignKey('release_ident.id'), nullable=True)
+ rev = db.relationship("ReleaseRev")
+
+class ReleaseEdit(db.Model):
+ __tablename__ = 'release_edit'
+ id = db.Column(db.Integer, primary_key=True)
+ ident_id = db.Column(db.ForeignKey('release_ident.id'), nullable=True)
+ rev_id = db.Column(db.ForeignKey('release_rev.id'), nullable=True)
+ redirect_id = db.Column(db.ForeignKey('release_ident.id'), nullable=True)
+ editgroup_id = db.Column(db.ForeignKey('editgroup.id'), nullable=True)
+ extra_json = db.Column(db.String, nullable=True)
+ ident = db.relationship("ReleaseIdent", foreign_keys="ReleaseEdit.ident_id")
+ rev = db.relationship("ReleaseRev")
+ editgroup = db.relationship("EditGroup")
+
+
+class CreatorRev(db.Model):
+ __tablename__ = 'creator_rev'
+ id = db.Column(db.Integer, primary_key=True, autoincrement=True)
+ extra_json = db.Column(db.String, nullable=True)
+
+ name = db.Column(db.String)
+ sortname = db.Column(db.String)
+ orcid = db.Column(db.String) # TODO: identifier table
+
+class CreatorIdent(db.Model):
+ __tablename__ = 'creator_ident'
+ id = db.Column(db.Integer, primary_key=True)
+ is_live = db.Column(db.Boolean, nullable=False, default=False)
+ rev_id = db.Column(db.ForeignKey('creator_rev.id'))
+ redirect_id = db.Column(db.ForeignKey('creator_ident.id'), nullable=True)
+ rev = db.relationship("CreatorRev")
+
+class CreatorEdit(db.Model):
+ __tablename__ = 'creator_edit'
+ id = db.Column(db.Integer, primary_key=True)
+ ident_id = db.Column(db.ForeignKey('creator_ident.id'), nullable=True)
+ rev_id = db.Column(db.ForeignKey('creator_rev.id'), nullable=True)
+ redirect_id = db.Column(db.ForeignKey('creator_ident.id'), nullable=True)
+ editgroup_id = db.Column(db.ForeignKey('editgroup.id'), nullable=True)
+ extra_json = db.Column(db.String, nullable=True)
+ ident = db.relationship("CreatorIdent", foreign_keys="CreatorEdit.ident_id")
+ rev = db.relationship("CreatorRev")
+ editgroup = db.relationship("EditGroup")
+
+
+class ContainerRev(db.Model):
+ __tablename__ = 'container_rev'
+ id = db.Column(db.Integer, primary_key=True, autoincrement=True)
+ extra_json = db.Column(db.String, nullable=True)
+
+ name = db.Column(db.String)
+ parent_id = db.Column(db.ForeignKey('container_ident.id', use_alter=True))
+ publisher = db.Column(db.String) # TODO: foreign key
+ sortname = db.Column(db.String)
+ issn = db.Column(db.String) # TODO: identifier table
+ parent = db.relationship("ContainerIdent", foreign_keys="ContainerRev.parent_id")
+
+class ContainerIdent(db.Model):
+ __tablename__ = 'container_ident'
+ id = db.Column(db.Integer, primary_key=True)
+ is_live = db.Column(db.Boolean, nullable=False, default=False)
+ rev_id = db.Column(db.ForeignKey('container_rev.id'))
+ redirect_id = db.Column(db.ForeignKey('container_ident.id'), nullable=True)
+ rev = db.relationship("ContainerRev", foreign_keys="ContainerIdent.rev_id")
+
+class ContainerEdit(db.Model):
+ __tablename__ = 'container_edit'
+ id = db.Column(db.Integer, primary_key=True)
+ ident_id = db.Column(db.ForeignKey('container_ident.id'), nullable=True)
+ rev_id = db.Column(db.ForeignKey('container_rev.id'), nullable=True)
+ redirect_id = db.Column(db.ForeignKey('container_ident.id'), nullable=True)
+ editgroup_id = db.Column(db.ForeignKey('editgroup.id'), nullable=True)
+ extra_json = db.Column(db.String, nullable=True)
+ ident = db.relationship("ContainerIdent", foreign_keys="ContainerEdit.ident_id")
+ rev = db.relationship("ContainerRev")
+ editgroup = db.relationship("EditGroup")
+
+
+class FileRev(db.Model):
+ __tablename__ = 'file_rev'
+ id = db.Column(db.Integer, primary_key=True, autoincrement=True)
+ extra_json = db.Column(db.String, nullable=True)
+
+ size = db.Column(db.Integer)
+ sha1 = db.Column(db.String) # TODO: hash table... only or in addition?
+ url = db.Column(db.Integer) # TODO: URL table
+ releases = db.relationship('FileRelease', lazy='subquery')
+
+class FileIdent(db.Model):
+ __tablename__ = 'file_ident'
+ id = db.Column(db.Integer, primary_key=True)
+ is_live = db.Column(db.Boolean, nullable=False, default=False)
+ rev_id = db.Column(db.ForeignKey('file_rev.id'))
+ redirect_id = db.Column(db.ForeignKey('file_ident.id'), nullable=True)
+ rev = db.relationship("FileRev")
+
+class FileEdit(db.Model):
+ __tablename__ = 'file_edit'
+ id = db.Column(db.Integer, primary_key=True)
+ ident_id = db.Column(db.ForeignKey('file_ident.id'), nullable=True)
+ rev_id = db.Column(db.ForeignKey('file_rev.id'), nullable=True)
+ redirect_id = db.Column(db.ForeignKey('file_ident.id'), nullable=True)
+ editgroup_id = db.Column(db.ForeignKey('editgroup.id'), nullable=True)
+ extra_json = db.Column(db.String, nullable=True)
+ ident = db.relationship("FileIdent", foreign_keys="FileEdit.ident_id")
+ rev = db.relationship("FileRev")
+ editgroup = db.relationship("EditGroup")
+
+
+### Editing #################################################################
+
+class EditGroup(db.Model):
+ __tablename__ = 'editgroup'
+ id = db.Column(db.Integer, primary_key=True, autoincrement=True)
+ editor_id = db.Column(db.ForeignKey('editor.id'), nullable=False)
+ description = db.Column(db.String)
+ extra_json = db.Column(db.String, nullable=True)
+
+ editor = db.relationship("Editor", foreign_keys="EditGroup.editor_id")
+
+class Editor(db.Model):
+ __tablename__ = 'editor'
+ id = db.Column(db.Integer, primary_key=True, autoincrement=True)
+ username = db.Column(db.String, nullable=False, unique=True)
+ is_admin = db.Column(db.Boolean, nullable=False, default=False)
+ active_editgroup_id = db.Column(db.ForeignKey('editgroup.id', use_alter=True))
+ active_editgroup = db.relationship('EditGroup', foreign_keys='Editor.active_editgroup_id')
+
+class ChangelogEntry(db.Model):
+ __tablename__= 'changelog'
+ id = db.Column(db.Integer, primary_key=True, autoincrement=True)
+ editgroup_id = db.Column(db.ForeignKey('editgroup.id'))
+ timestamp = db.Column(db.Integer)
+ editgroup = db.relationship("EditGroup")
+
+
+### Marshmallow Wrappers ####################################################
+
+class ExtraJsonSchema(ma.ModelSchema):
+
+ @post_dump(pass_many=False)
+ def json_unflatten(self, data):
+ extra = data.pop('extra_json', None)
+ if extra != None:
+ extra = json.loads(extra)
+ data['extra'] = extra
+
+ @pre_load(pass_many=False)
+ def json_flatten(self, data):
+ extra = data.pop('extra', None)
+ if extra != None:
+ extra = json.dumps(extra)
+ data['extra_json'] = extra
+
+class EntitySchema(ExtraJsonSchema):
+
+ @post_dump(pass_many=False)
+ def merge_rev(self, data):
+ if data.get('rev', None) != None:
+ rev_id = data['rev'].pop('id')
+ data.update(data['rev'])
+ data['rev'] = rev_id
+ else:
+ data['rev'] = None
+
+class ReleaseContribSchema(ma.ModelSchema):
+ class Meta:
+ model = ReleaseContrib
+ creator = db.relationship("CreatorIdent")
+ release = db.relationship("ReleaseRev")
+
+class ReleaseRefSchema(ma.ModelSchema):
+ class Meta:
+ model = ReleaseRef
+ release = db.relationship("ReleaseRev")
+ target = db.relationship("ReleaseIdent")
+
+class FileReleaseSchema(ma.ModelSchema):
+ class Meta:
+ model = FileRelease
+ release = db.relationship("ReleaseIdent")
+ file = db.relationship("FileRev")
+
+class WorkRevSchema(ma.ModelSchema):
+ class Meta:
+ model = WorkRev
+ include_fk = True
+
+class WorkSchema(EntitySchema):
+ class Meta:
+ model = WorkIdent
+ include_fk = True
+ rev = ma.Nested(WorkRevSchema)
+
+class WorkEditSchema(ma.ModelSchema):
+ class Meta:
+ model = WorkEdit
+
+work_rev_schema = WorkRevSchema()
+work_schema = WorkSchema()
+work_edit_schema = WorkEditSchema()
+
+
+class ReleaseRevSchema(ma.ModelSchema):
+ class Meta:
+ model = ReleaseRev
+ include_fk = True
+ work = ma.Nested('WorkSchema')
+ container = ma.Nested('ContainerSchema')
+ creators = ma.Nested(ReleaseContribSchema, many=True)
+ refs = ma.Nested(ReleaseRefSchema, many=True)
+
+class ReleaseSchema(EntitySchema):
+ class Meta:
+ model = ReleaseIdent
+ include_fk = True
+ rev = ma.Nested(ReleaseRevSchema)
+ # XXX: files = ma.Nested('FileSchema', many=True)
+
+class ReleaseEditSchema(ma.ModelSchema):
+ class Meta:
+ model = ReleaseEdit
+
+release_rev_schema = ReleaseRevSchema()
+release_schema = ReleaseSchema()
+release_edit_schema = ReleaseEditSchema()
+
+
+class CreatorRevSchema(ma.ModelSchema):
+ class Meta:
+ model = CreatorRev
+ include_fk = True
+
+class CreatorSchema(EntitySchema):
+ class Meta:
+ model = CreatorIdent
+ include_fk = True
+ rev = ma.Nested(CreatorRevSchema)
+
+class CreatorEditSchema(ma.ModelSchema):
+ class Meta:
+ model = CreatorEdit
+
+creator_rev_schema = CreatorRevSchema()
+creator_schema = CreatorSchema()
+creator_edit_schema = CreatorEditSchema()
+
+
+class ContainerRevSchema(ma.ModelSchema):
+ class Meta:
+ model = ContainerRev
+ include_fk = True
+
+class ContainerSchema(EntitySchema):
+ class Meta:
+ model = ContainerIdent
+ include_fk = True
+ rev = ma.Nested(ContainerRevSchema)
+
+class ContainerEditSchema(ma.ModelSchema):
+ class Meta:
+ model = ContainerEdit
+
+container_rev_schema = ContainerRevSchema()
+container_schema = ContainerSchema()
+container_edit_schema = ContainerEditSchema()
+
+
+class FileRevSchema(ma.ModelSchema):
+ class Meta:
+ model = FileRev
+ include_fk = True
+
+ releases = ma.Nested(FileReleaseSchema, many=True)
+
+class FileSchema(EntitySchema):
+ class Meta:
+ model = FileIdent
+ include_fk = True
+ rev = ma.Nested(FileRevSchema)
+
+class FileEditSchema(ma.ModelSchema):
+ class Meta:
+ model = FileEdit
+
+file_rev_schema = FileRevSchema()
+file_schema = FileSchema()
+file_edit_schema = FileEditSchema()
+
+
+class EditorSchema(ma.ModelSchema):
+ class Meta:
+ model = Editor
+
+class EditGroupSchema(ma.ModelSchema):
+ class Meta:
+ model = EditGroup
+ editor = ma.Nested(EditorSchema)
+
+editor_schema = EditorSchema()
+editgroup_schema = EditGroupSchema()
+
+class ChangelogEntrySchema(ma.ModelSchema):
+ class Meta:
+ model = ChangelogEntry
+
+changelogentry_schema = ChangelogEntrySchema()
diff --git a/python/fatcat/routes.py b/python/fatcat/routes.py
new file mode 100644
index 00000000..0c86bd78
--- /dev/null
+++ b/python/fatcat/routes.py
@@ -0,0 +1,129 @@
+
+import os
+import json
+from flask import Flask, render_template, send_from_directory, request, \
+ url_for, abort, g, redirect, jsonify, session
+from fatcat import app, db, api
+
+
+### Views ###################################################################
+
+@app.route('/work/create', methods=['GET'])
+def work_create():
+ return render_template('work_add.html')
+
+@app.route('/work/random', methods=['GET'])
+def work_random():
+ rv = api.api_work_random()
+ ident = rv.location.split('/')[-1]
+ return redirect("/work/{}".format(ident))
+
+@app.route('/work/<int:ident>', methods=['GET'])
+def work_view(ident):
+ rv = api.api_work_get(ident)
+ entity = json.loads(rv.data.decode('utf-8'))
+ return render_template('work_view.html', work=entity)
+
+@app.route('/release/<int:ident>', methods=['GET'])
+def release_view(ident):
+ rv = api.api_release_get(ident)
+ entity = json.loads(rv.data.decode('utf-8'))
+ return render_template('release_view.html', release=entity)
+
+@app.route('/release/<int:ident>/changelog', methods=['GET'])
+def release_changelog(ident):
+ rv = api.api_release_get(ident)
+ release = json.loads(rv.data.decode('utf-8'))
+ rv = api.api_release_changelog(ident)
+ changelog_entries = json.loads(rv.data.decode('utf-8'))
+ return render_template('release_changelog.html', release=release,
+ changelog_entries=changelog_entries)
+
+@app.route('/release/random', methods=['GET'])
+def release_random():
+ rv = api.api_release_random()
+ ident = rv.location.split('/')[-1]
+ return redirect("/release/{}".format(ident))
+
+@app.route('/container/create', methods=['GET'])
+def container_create_view():
+ return render_template('container_add.html')
+
+@app.route('/container/create', methods=['POST'])
+def container_create():
+ params = dict()
+ for k in request.form:
+ if k.startswith('container_'):
+ params[k[10:]] = request.form[k]
+ rv = api.api_container_create(params=params)
+ container = json.loads(rv.data.decode('utf-8'))
+ return redirect("/container/{}".format(container['id']))
+
+@app.route('/creator/<int:ident>', methods=['GET'])
+def creator_view(ident):
+ rv = api.api_creator_get(ident)
+ entity = json.loads(rv.data.decode('utf-8'))
+ return render_template('creator_view.html', creator=entity)
+
+@app.route('/container/<int:ident>', methods=['GET'])
+def container_view(ident):
+ rv = api.api_container_get(ident)
+ entity = json.loads(rv.data.decode('utf-8'))
+ return render_template('container_view.html', container=entity)
+
+@app.route('/file/<int:ident>', methods=['GET'])
+def file_view(ident):
+ rv = api.api_file_get(ident)
+ entity = json.loads(rv.data.decode('utf-8'))
+ return render_template('file_view.html', file=entity)
+
+@app.route('/editgroup/<int:ident>', methods=['GET'])
+def editgroup_view(ident):
+ rv = api.api_editgroup_get(ident)
+ entity = json.loads(rv.data.decode('utf-8'))
+ return render_template('editgroup_view.html', editgroup=entity)
+
+@app.route('/editgroup/current', methods=['GET'])
+def editgroup_current():
+ eg = api.get_or_create_editgroup()
+ return redirect('/editgroup/{}'.format(eg.id))
+
+@app.route('/editor/<username>', methods=['GET'])
+def editor_view(username):
+ rv = api.api_editor_get(username)
+ entity = json.loads(rv.data.decode('utf-8'))
+ return render_template('editor_view.html', editor=entity)
+
+@app.route('/editor/<username>/changelog', methods=['GET'])
+def editor_changelog(username):
+ rv = api.api_editor_get(username)
+ editor = json.loads(rv.data.decode('utf-8'))
+ rv = api.api_editor_changelog(username)
+ changelog_entries = json.loads(rv.data.decode('utf-8'))
+ return render_template('editor_changelog.html', editor=editor,
+ changelog_entries=changelog_entries)
+
+
+### Static Routes ###########################################################
+
+@app.errorhandler(404)
+def page_not_found(e):
+ return render_template('404.html'), 404
+
+@app.route('/', methods=['GET'])
+def homepage():
+ return render_template('home.html')
+
+@app.route('/about', methods=['GET'])
+def aboutpage():
+ return render_template('about.html')
+
+@app.route('/robots.txt', methods=['GET'])
+def robots():
+ return send_from_directory(os.path.join(app.root_path, 'static'),
+ 'robots.txt',
+ mimetype='text/plain')
+
+@app.route('/health', methods=['GET'])
+def health():
+ return jsonify({'ok': True})
diff --git a/python/fatcat/sql.py b/python/fatcat/sql.py
new file mode 100644
index 00000000..9b1922ba
--- /dev/null
+++ b/python/fatcat/sql.py
@@ -0,0 +1,150 @@
+
+import json
+import time
+import random
+import hashlib
+from sqlalchemy.orm.session import make_transient
+from fatcat import db
+import fatcat.api
+from fatcat.models import *
+
+def populate_db():
+ admin_editor = Editor(id=1, username="admin", is_admin=True)
+ db.session.add(admin_editor)
+ db.session.commit()
+
+def add_crossref_via_model(meta):
+
+ title = meta['title'][0]
+
+ # authors
+ author_revs = []
+ author_ids = []
+ for am in meta['author']:
+ ar = CreatorRev(
+ name="{} {}".format(am['given'], am['family']),
+ sortname="{}, {}".format(am['family'], am['given']),
+ orcid=None)
+ author_revs.append(ar)
+ author_ids.append(CreatorIdent(rev=ar))
+
+ # container
+ container = ContainerRev(
+ issn=meta['ISSN'][0],
+ name=meta['container-title'][0],
+ #container_id=None,
+ publisher=meta['publisher'],
+ sortname=meta['short-container-title'][0])
+ container_id = ContainerIdent(rev=container)
+
+ # work and release
+ work = WorkRev(title=title)
+ work_id = WorkIdent(rev=work)
+ release = ReleaseRev(
+ title=title,
+ creators=[ReleaseContrib(creator=a) for a in author_ids],
+ # XXX: work=work,
+ container=container_id,
+ release_type=meta['type'],
+ doi=meta['DOI'],
+ date=meta['created']['date-time'],
+ license=meta.get('license', [dict(URL=None)])[0]['URL'] or None,
+ issue=meta.get('issue', None),
+ volume=meta.get('volume', None),
+ pages=meta.get('page', None))
+ release_id = ReleaseIdent(rev=release)
+ work.primary_release = release_id
+ release.extra_json = json.dumps({
+ 'crossref': {
+ 'links': meta.get('link', []),
+ 'subject': meta['subject'],
+ 'type': meta['type'],
+ 'alternative-id': meta.get('alternative-id', []),
+ }
+ }, indent=None).encode('utf-8')
+
+ # references
+ for i, rm in enumerate(meta.get('reference', [])):
+ ref = ReleaseRef(
+ release_rev=release,
+ doi=rm.get("DOI", None),
+ index=i+1,
+ # TODO: how to generate a proper stub here from k/v metadata?
+ stub="| ".join(rm.values()))
+ release.refs.append(ref)
+
+ db.session.add_all([work, work_id, release, release_id, container,
+ container_id])
+ db.session.add_all(author_revs)
+ db.session.add_all(author_ids)
+ db.session.commit()
+
+def accept_editgroup(eg):
+
+ # check if already accepted
+ # XXX: add a test for this
+ assert ChangelogEntry.query.filter(ChangelogEntry.editgroup_id==eg.id).count() == 0
+
+ # start transaction (TODO: explicitly?)
+
+ # for each entity type:
+ for cls in (WorkEdit, ReleaseEdit, CreatorEdit, ContainerEdit, FileEdit):
+ edits = cls.query.filter(cls.editgroup_id==eg.id).all()
+ # for each entity edit->ident:
+ for edit in edits:
+ # update entity ident state (activate, redirect, delete)
+ edit.ident.is_live = True
+ edit.ident.rev_id = edit.rev_id
+ edit.ident.redirect_id = edit.redirect_id
+ db.session.add(edit.ident)
+
+ # append log/changelog row
+ cle = ChangelogEntry(
+ editgroup_id=eg.id,
+ # TODO: is this UTC?
+ timestamp=int(time.time()))
+ db.session.add(cle)
+
+ # update edit group state
+ db.session.add(eg)
+
+ # no longer "active"
+ eg.editor.active_editgroup = None
+ db.session.add(eg.editor)
+
+ db.session.commit()
+
+def merge_works(left_id, right_id, editgroup=None):
+ """Helper to merge two works together."""
+ left = WorkIdent.query.get_or_404(left_id)
+ right = WorkIdent.query.get_or_404(right_id)
+ assert left.is_live and right.is_live
+ assert left.rev and right.rev
+ assert (left.redirect_id is None) and (right.redirect_id is None)
+
+ if editgroup is None:
+ editgroup = fatcat.api.get_or_create_editgroup()
+
+ releases = ReleaseIdent.query\
+ .join(ReleaseIdent.rev)\
+ .filter(ReleaseRev.work_ident_id==right_id)\
+ .filter(ReleaseIdent.is_live==True)\
+ .all()
+
+ # update all right releases to point to left
+ for release_ident in releases:
+ rev = release_ident.rev
+ old_id = rev.id
+ db.session.expunge(rev)
+ make_transient(rev)
+ rev.id = None
+ rev.parent = old_id
+ rev.work_ident_id = left.id
+ re = ReleaseEdit(editgroup=editgroup, ident=release_ident, rev=rev)
+ db.session.add_all([rev, re])
+
+ # redirect right id to left (via editgroup)
+ neww = WorkEdit(editgroup=editgroup, ident=right,
+ rev=left.rev, redirect_id=left.id)
+
+ db.session.add_all([neww])
diff --git a/python/fatcat/static/robots.txt b/python/fatcat/static/robots.txt
new file mode 100644
index 00000000..a168f11b
--- /dev/null
+++ b/python/fatcat/static/robots.txt
@@ -0,0 +1 @@
+# Hello friends!
diff --git a/python/fatcat/templates/404.html b/python/fatcat/templates/404.html
new file mode 100644
index 00000000..c8fbfeac
--- /dev/null
+++ b/python/fatcat/templates/404.html
@@ -0,0 +1,6 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>404: Not Found</h1>
+
+{% endblock %}
diff --git a/python/fatcat/templates/about.html b/python/fatcat/templates/about.html
new file mode 100644
index 00000000..ce194099
--- /dev/null
+++ b/python/fatcat/templates/about.html
@@ -0,0 +1,161 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>About fatcat!</h1>
+
+<p>fatcat is a half-baked idea to build an open, independent, collaboratively editable bibliographic database of most written works, with a focus on published research outputs like journal articles, pre-prints, and conference proceedings.</p>
+<h2 id="technical-architecture">Technical Architecture</h2>
+<p>The canonical backend datastore would be a very large transactional SQL server. A relatively simple and stable back-end daemon would expose an API (could be REST, GraphQL, gRPC, etc). As little &quot;application logic&quot; as possible would be embedded in this back-end; as much as possible would be pushed to bots which could be authored and operated by anybody. A separate web interface project would talk to the API backend and could be developed more rapidly.</p>
+<p>A cronjob would make periodic database dumps, both in &quot;full&quot; form (all tables and all edit history, removing only authentication credentials) and &quot;flat&quot; form (with only the most recent version of each entity, using only persistent IDs between entities).</p>
+<p>A goal is to be linked-data/RDF/JSON-LD/semantic-web &quot;compatible&quot;, but not necessarily &quot;first&quot;. It should be possible to export the database in a relatively clean RDF form, and to fetch data in a variety of formats, but internally fatcat would not be backed by a triple-store, and would not be bound to a specific third party ontology or schema.</p>
+<p>Microservice daemons should be able to proxy between the primary API and standard protocols like ResourceSync and OAI-PMH, and bots could consume external databases in those formats.</p>
+<h2 id="licensing">Licensing</h2>
+<p>The core fatcat database should only contain verifyable factual statements (which isn't to say that all statements are &quot;true&quot;), not creative or derived content.</p>
+<p>The goal is to have a very permissively licensed database: CC-0 (no rights reserved) if possible. Under US law, it should be possible to scrape and pull in factual data from other corpuses without adopting their licenses. The goal here isn't to avoid all attibution (progeny information will be included, and a large sources and acknowledgements statement should be maintained), but trying to manage the intersection of all upstream source licenses seems untenable, and creates burdens for downstream users.</p>
+<p>Special care will need to be taken around copyright and original works. I would propose either not accepting abstracts at all, or including them in a partitioned database to prevent copyright contamination. Likewise, even simple user-created content like lists, reviews, ratings, comments, discussion, documentation, etc should go in separate services.</p>
+<h2 id="basic-editing-workflow-and-bots">Basic Editing Workflow and Bots</h2>
+<p>Both human editors and bots would have edits go through the same API, with humans using either the default web interface or arbitrary integrations or client software.</p>
+<p>The usual workflow would be to create edits (or creations, merges, deletions) to individual entities one at a time, all under a single &quot;edit group&quot; of related edits (eg, correcting authorship info for multiple works related to a single author). When ready, the editor would &quot;submit&quot; the edit group for review. During the review period, humans could vote (or veto/approve if they have higher permissions), and bots can perform automated checks. During this period the editor can make tweaks if necessary. After some fixed time period (72 hours?) with no changes and no blocking issues, the edit group would be auto-accepted, if no auto-resolvable merge-conflicts have arisen. This process balances editing labor (reviews are easy, but optional) against quality (cool-down period makes it easier to detect and prevent spam or out-of-control bots). Advanced permissions could allow some trusted human and bot editors to push through edits more rapidly.</p>
+<p>Bots would need to be tuned to have appropriate edit group sizes (eg, daily batches, instead of millions of works in a single edit) to make human QA and reverts possible.</p>
+<p>Data progeny and citation would be left to the edit history. In the case of importing external databases, the expectation would be that special-purpose bot accounts would be used. Human editors would leave edit messages to clarify their sources.</p>
+<p>A style guide (wiki), chat room, and discussion forum would be hosted as separate stand-alone services for editors to propose projects and debate process or scope changes. It would be best if these could use federated account authorization (oauth?) to have consistent account IDs across mediums.</p>
+<h2 id="edit-log">Edit Log</h2>
+<p>As part of the process of &quot;accepting&quot; an edit group, a row would be written to an immutable, append-only log table (which internally could be a SQL table) documenting each identifier change. This log establishes a monotonically increasing version number for the entire corpus, and should make interaction with other systems easier (eg, search engines, replicated databases, alternative storage backends, notification frameworks, etc).</p>
+<h2 id="itentifiers">Itentifiers</h2>
+<p>A fixed number of first class &quot;entities&quot; would be definied, with common behavior and schema layouts. These would all be semantic entities like &quot;work&quot;, &quot;release&quot;, &quot;container&quot;, and &quot;person&quot;.</p>
+<p>fatcat identifiers would be semanticly meaningless fixed length random numbers, usually represented in case-insensitive base32 format. Each entity type would have it's own identifier namespace. Eg, 96 bit identifiers would have 20 characters and look like:</p>
+<pre><code>fcwork_rzga5b9cd7efgh04iljk
+https://fatcat.org/work/rzga5b9cd7efgh04iljk</code></pre>
+<p>128-bit (UUID size) would have 26 characters:</p>
+<pre><code>fcwork_rzga5b9cd7efgh04iljk8f3jvz
+https://fatcat.org/work/rzga5b9cd7efgh04iljk8f3jvz</code></pre>
+<p>A 64 bit namespace is probably plenty though, and would work with most databse Integer columns:</p>
+<pre><code>fcwork_rzga5b9cd7efg
+https://fatcat.org/work/rzga5b9cd7efg</code></pre>
+<p>The idea would be to only have fatcat identifiers be used to interlink between databases, <em>not</em> to supplant DOIs, ISBNs, handle, ARKs, and other &quot;registered&quot; persistant identifiers.</p>
+<h2 id="entities-and-internal-schema">Entities and Internal Schema</h2>
+<p>Internally, identifiers would be lightweight pointers to actual metadata objects, which can be thought of as &quot;versions&quot;. The metadata objects themselves would be immutable once commited; the edit process is one of creating new objects and, if the edit is approved, pointing the identifier to the new version. Entities would reference between themselves by identifier.</p>
+<p>Edit objects represent a change to a single entity; edits get batched together into edit groups (like &quot;commits&quot; and &quot;pull requests&quot; in git parlance).</p>
+<p>SQL tables would probably look something like the following, though be specific to each entity type (eg, there would be an actual <code>work_revision</code> table, but not an actual <code>entity_revision</code> table):</p>
+<pre><code>entity_id
+ uuid
+ current_revision
+
+entity_revision
+ entity_id (bi-directional?)
+ previous: entity_revision or none
+ state: normal, redirect, deletion
+ redirect_entity_id: optional
+ extra: json blob
+ edit_id
+
+edit
+ mutable: boolean
+ edit_group
+ editor
+
+edit_group</code></pre>
+<p>Additional type-specific columns would hold actual metadata. Additional tables (which would reference both <code>entity_revision</code> and <code>entity_id</code> foreign keys as appropriate) would represent things like external identifiers, ordered author/work relationships, citations between works, etc. Every revision of an entity would require duplicating all of these associated rows, which could end up being a large source of inefficiency, but is necessary to represent the full history of an object.</p>
+<h2 id="scope">Scope</h2>
+<p>Want the &quot;scholarly web&quot;: the graph of works that cite other works. Certainly every work that is cited more than once and every work that both cites and is cited; &quot;leaf nodes&quot; and small islands might not be in scope.</p>
+<p>Focusing on written works, with some exceptions. Expect core media (for which we would pursue &quot;completeness&quot;) to be:</p>
+<pre><code>journal articles
+books
+conference proceedings
+technical memos
+dissertations</code></pre>
+<p>Probably in scope:</p>
+<pre><code>reports
+magazine articles
+published poetry
+essays
+government documents
+conference
+presentations (slides, video)
+datasets</code></pre>
+<p>Probably not:</p>
+<pre><code>patents
+court cases and legal documents
+manuals
+datasheets
+courses</code></pre>
+<p>Definitely not:</p>
+<pre><code>audio recordings
+tv show episodes
+musical scores
+advertisements</code></pre>
+<p>Author, citation, and work disambiguation would be core tasks. Linking pre-prints to final publication is in scope.</p>
+<p>I'm much less interested in altmetrics, funding, and grant relationships than most existing databases in this space.</p>
+<p>fatcat would not include any fulltext content itself, even for cleanly licensed (open access) works, but would have &quot;strong&quot; (verified) links to fulltext content, and would include file-level metadata (like hashes and fingerprints) to help discovery and identify content from any source. Typed file-level links should make fatcat more useful for both humans and machines to quickly access fulltext content of a given mimetype than existing redirect or landing page systems.</p>
+<h2 id="ontology">Ontology</h2>
+<p>Loosely following FRBR, but removing the &quot;manifestation&quot; abstraction, and favoring files (digital artifacts) over physical items, the primary entities are:</p>
+<pre><code>work
+ type
+ &lt;has&gt; contributors
+ &lt;about&gt; subject/category
+ &lt;has-primary&gt; release
+
+release (aka &quot;edition&quot;, &quot;variant&quot;)
+ title
+ volume/pages/issue/chapter
+ open-access status
+ &lt;published&gt; date
+ &lt;of a&gt; work
+ &lt;published-by&gt; publisher
+ &lt;published in&gt; container
+ &lt;has&gt; contributors
+ &lt;citation&gt; citetext &lt;to&gt; release
+ &lt;has&gt; identifier
+
+file (aka &quot;digital artifact&quot;)
+ &lt;of a&gt; release
+ &lt;has&gt; hashes
+ &lt;found at&gt; URLs
+ &lt;held-at&gt; institution &lt;with&gt; accession
+
+contributor
+ name
+ &lt;has&gt; aliases
+ &lt;has&gt; affiliation &lt;for&gt; date span
+ &lt;has&gt; identifier
+
+container
+ name
+ open-access policy
+ peer-review policy
+ &lt;has&gt; aliases, acronyms
+ &lt;about&gt; subject/category
+ &lt;has&gt; identifier
+ &lt;published in&gt; container
+ &lt;published-by&gt; publisher
+
+publisher
+ name
+ &lt;has&gt; aliases, acronyms
+ &lt;has&gt; identifier</code></pre>
+<h2 id="controlled-vocabularies">Controlled Vocabularies</h2>
+<p>Some special namespace tables and enums would probably be helpful; these should live in the database (not requiring a database migration to update), but should have more controlled editing workflow... perhaps versioned in the codebase:</p>
+<ul>
+<li>identifier namespaces (DOI, ISBN, ISSN, ORCID, etc)</li>
+<li>subject categorization</li>
+<li>license and open access status</li>
+<li>work &quot;types&quot; (article vs. book chapter vs. proceeding, etc)</li>
+<li>contributor types (author, translator, illustrator, etc)</li>
+<li>human languages</li>
+<li>file mimetypes</li>
+</ul>
+<h2 id="unresolved-questions">Unresolved Questions</h2>
+<p>How to handle translations of, eg, titles and author names? To be clear, not translations of works (which are just separate releases).</p>
+<p>Are bi-directional links a schema anti-pattern? Eg, should &quot;work&quot; point to a primary &quot;release&quot; (which itself points back to the work), or should &quot;release&quot; have a &quot;is-primary&quot; flag?</p>
+<p>Should <code>identifier</code> and <code>citation</code> be their own entities, referencing other entities by UUID instead of by revision? This could save a ton of database space and chunder.</p>
+<p>Should contributor/author contact information be retained? It could be very useful for disambiguation, but we don't want to build a huge database for spammers or &quot;innovative&quot; start-up marketing.</p>
+<p>Would general purpose SQL databases like Postgres or MySQL scale well enough told hold several tables with billions of entries? Right from the start there are hundreds of millions of works and releases, many of which having dozens of citations, many authors, and many identifiers, and then we'll have potentially dozens of edits for each of these, which multiply out to <code>1e8 * 2e1 * 2e1 = 4e10</code>, or 40 billion rows in the citation table. If each row was 32 bytes on average (uncompressed, not including index size), that would be 1.3 TByte on it's own, larger than common SSD disk. I think a transactional SQL datastore is the right answer. In my experience locking and index rebuild times are usually the biggest scaling challenges; the largely-immutable architecture here should mitigate locking. Hopefully few indexes would be needed in the primary database, as user interfaces could rely on secondary read-only search engines for more complex queries and views.</p>
+<p>I see a tension between focus and scope creep. If a central database like fatcat doesn't support enough fields and metadata, then it will not be possible to completely import other corpuses, and this becomes &quot;yet another&quot; partial bibliographic database. On the other hand, accepting arbitrary data leads to other problems: sparseness increases (we have more &quot;partial&quot; data), potential for redundancy is high, humans will start editing content that might be bulk-replaced, etc.</p>
+<p>There might be a need to support &quot;stub&quot; references between entities. Eg, when adding citations from PDF extraction, the cited works are likely to be ambiguous. Could create &quot;stub&quot; works to be merged/resolved later, or could leave the citation hanging. Same with authors, containers (journals), etc.</p>
+<h2 id="references-and-previous-work">References and Previous Work</h2>
+<p>The closest overall analog of fatcat is <a href="https://musicbrainz.org">MusicBrainz</a>, a collaboratively edited music database. <a href="https://openlibrary.org">Open Library</a> is a very similar existing service, which exclusively contains book metadata.</p>
+<p><a href="https://wikidata.org">Wikidata</a> seems to be the most successful and actively edited/developed open bibliographic database at this time (early 2018), including the <a href="https://meta.wikimedia.org/wiki/WikiCite_2017">wikicite</a> conference and related Wikimedia/Wikipedia projects. Wikidata is a general purpose semantic database of entities, facts, and relationships; bibliographic metadata has become a large fraction of all content in recent years. The focus there seems to be linking knowledge (statements) to specific sources unambigiously. Potential advantages fatcat would have would be a focus on a specific scope (not a general purpose database of entities) and a goal of completeness (capturing as many works and relationships as rapidly as possible). However, it might be better to just pitch in to the wikidata efforts.</p>
+<p>The technical design of fatcat is loosely inspired by the git branch/tag/commit/tree architecture, and specifically inspired by Oliver Charles' &quot;New Edit System&quot; <a href="https://ocharles.org.uk/blog/posts/2012-07-10-nes-does-it-better-1.html">blog posts</a> from 2012.</p>
+<p>There are a whole bunch of proprietary, for-profit bibliographic databases, including Web of Science, Google Scholar, Microsoft Academic Graph, aminer, Scopus, and Dimensions. There are excellent field-limited databases like dblp, MEDLINE, and Semantic Scholar. There are some large general-purpose databases that are not directly user-editable, including the OpenCitation corpus, CORE, BASE, and CrossRef. I don't know of any large (more than 60 million works), open (bulk-downloadable with permissive or no license), field agnostic, user-editable corpus of scholarly publication bibliographic metadata.</p>
+
+{% endblock %}
diff --git a/python/fatcat/templates/base.html b/python/fatcat/templates/base.html
new file mode 100644
index 00000000..697705c3
--- /dev/null
+++ b/python/fatcat/templates/base.html
@@ -0,0 +1,70 @@
+<!DOCTYPE html>
+<html lang="en" style="position: relative; min-height: 100%; height: auto;">
+<head>
+ <meta charset="utf-8" />
+ <meta name="viewport" content="width=device-width">
+
+ <title>{% block title %}fatcat!{% endblock %}</title>
+ <meta name="viewport" content="width=device-width, initial-scale=1">
+ <link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/semantic-ui@2.2.13/dist/semantic.min.css">
+ <script
+ src="https://code.jquery.com/jquery-3.1.1.min.js"
+ integrity="sha256-hVVnYaiADRTO2PzUGmuLJr8BLUSjGIZsDYGmIJLv2b8="
+ crossorigin="anonymous"></script>
+ <script src="https://cdn.jsdelivr.net/npm/semantic-ui@2.2.13/dist/semantic.min.js"></script>
+
+</head>
+<body style="margin-bottom: 100px; height: auto;">
+
+<header class="ui fixed inverted menu">
+ <div class="ui container">
+ <a href="/" class="header item">
+ <!-- <img class="logo" src="assets/images/logo.png"> -->
+ fatcat!
+ </a>
+ <a href="/about" class="item">About</a>
+ <a href="#" class="item">Guide</a>
+ <div class="right menu">
+ <div class="item">
+ <div class="ui transparent inverted icon input">
+ <i class="search icon"></i>
+ <input type="text" placeholder="Search...">
+ </div>
+ </div>
+ <div class="ui simple dropdown item">
+ acidburn <i class="dropdown icon"></i>
+ <div class="menu">
+ <a class="item" href="/editgroup/current">Open Submissions</a>
+ <a class="item" href="/editor/admin/changelog">Edit History</a>
+ <div class="divider"></div>
+ <a class="item" href="/editor/admin">Account</a>
+ <a class="item" href="/logout">Logout</a>
+ </div>
+ </div>
+
+ </div>
+ </div>
+</header>
+
+<main class="ui main text container" style="margin-top: 4em; margin-bottom: 2em;">
+{% block body %}Nothing to see here.{% endblock %}
+</main>
+
+
+<footer class="ui inverted vertical footer segment" style="margin-top: 2em; padding-top: 2em; padding-bottom:2em; position: absolute; bottom: 0px; width: 100%;">
+ <div class="ui center aligned container">
+ <div class="ui horizontal inverted small divided link list">
+ <span class="item">fatcat!</span>
+ <a class="item" href="/about">About</a>
+ <a class="item" href="#">Sources</a>
+ <a class="item" href="#">Status</a>
+ <a class="item" href="#">Datasets</a>
+ <a class="item" href="https://git.bnewbold.net/fatcat/">Source Code</a>
+ </div>
+ </div>
+</footer>
+
+{% block postscript %}{% endblock %}
+
+</body>
+</html>
diff --git a/python/fatcat/templates/container_add.html b/python/fatcat/templates/container_add.html
new file mode 100644
index 00000000..15288142
--- /dev/null
+++ b/python/fatcat/templates/container_add.html
@@ -0,0 +1,168 @@
+{% extends "base.html" %}
+{% block body %}
+<div class="ui segment">
+<h1 class="ui header">Adding a New Container</h1>
+
+<p>A "container" is a anything that groups publications together. For example,
+a journal (eg, "New England Journal of Medicine"), conference proceedings, a
+book series, or a blog.
+
+<p>Not all publications are in a container.
+
+<form class="ui form" id="add_container_form" method="post" action="/container/create">
+
+ <h3 class="ui dividing header">The Basics</h3>
+
+ <div class="ui huge field required">
+ <label>Name or Title</label>
+ <input name="container_name" type="text" placeholder="Title of Container (in English)">
+ </div>
+
+ <div class="ui field required">
+ <label>Type of Container</label>
+ <select class="ui dropdown" id="container_type">
+ <option value="">Primary Type</option>
+ <option value="journal">Journal</option>
+ <option value="book-series">Book Series</option>
+ <option value="conference">Conference Proceedings</option>
+ <option value="blog">Blog</option>
+ <option value="other">Other</option>
+ </select>
+ </div>
+
+ <!-- Publisher -->
+ <div class="ui huge field required">
+ <label>Name of Publisher</label>
+ <input name="container_publisher" type="text" placeholder="Name of Publisher">
+ </div>
+
+ <!-- Identifier -->
+ <div class="ui huge field required">
+ <label>ISSN Number</label>
+ <input name="container_issn" type="text" placeholder="eg, 1234-567X">
+ </div>
+
+ <!-- Primary/Original Language -->
+ <div class="field">
+ <label>Primary Language</label>
+ <select class="ui search select dropdown" id="language-select">
+ <option value="">Select if Appropriate</option>
+ <option value="en">English</option>
+ <option value="es">Spanish</option>
+ <option value="">Russian</option>
+ <option value="">Thai</option>
+ <option value="">Indonesian</option>
+ <option value="">Chinese</option>
+ </select>
+ </div>
+
+ <!-- Subject / Categorization / Tags -->
+ <div class="field">
+ <label>Subject</label>
+ <select multiple="" class="ui dropdown" id="subjects">
+ <option value="">Select Subject/Tags</option>
+ <option value="AF">Natural Sciences</option>
+ <option value="AX">Humanities</option>
+ <option value="AL">Arts</option>
+ <option value="AL">Engineering</option>
+ <option value="AL">Other</option>
+ </select>
+ </div>
+
+ <!-- Date -->
+ <!-- Container / Part-Of -->
+ <!-- Region -->
+
+ <!-- Anything Else? -->
+ <h3 class="ui dividing header">Anything Else?</h3>
+
+<div class="ui submit button">Create container</div>
+
+<p><i>Entity will be created as part of the current edit group, which needs to be
+submited and approved before the entity will formally be included in the
+catalog.</i>
+
+</form>
+
+</div>
+{% endblock %}
+
+{% block postscript %}
+<script>
+<!-- Form validation code -->
+$(document).ready(function() {
+
+ $('#add_container_form')
+ .form({
+ fields: {
+ name: {
+ identifier: 'name',
+ rules: [
+ {
+ type : 'empty',
+ prompt : 'Please enter your name'
+ }
+ ]
+ },
+ skills: {
+ identifier: 'skills',
+ rules: [
+ {
+ type : 'minCount[2]',
+ prompt : 'Please select at least two skills'
+ }
+ ]
+ },
+ gender: {
+ identifier: 'gender',
+ rules: [
+ {
+ type : 'empty',
+ prompt : 'Please select a gender'
+ }
+ ]
+ },
+ username: {
+ identifier: 'username',
+ rules: [
+ {
+ type : 'empty',
+ prompt : 'Please enter a username'
+ }
+ ]
+ },
+ password: {
+ identifier: 'password',
+ rules: [
+ {
+ type : 'empty',
+ prompt : 'Please enter a password'
+ },
+ {
+ type : 'minLength[6]',
+ prompt : 'Your password must be at least {ruleValue} characters'
+ }
+ ]
+ },
+ terms: {
+ identifier: 'terms',
+ rules: [
+ {
+ type : 'checked',
+ prompt : 'You must agree to the terms and conditions'
+ }
+ ]
+ }
+ }
+ })
+ ;
+
+ $('#container_type').dropdown();
+ $('#subjects').dropdown();
+ $('#language-select').dropdown();
+
+ console.log("Page loaded");
+
+});
+</script>
+{% endblock %}
diff --git a/python/fatcat/templates/container_view.html b/python/fatcat/templates/container_view.html
new file mode 100644
index 00000000..483886b5
--- /dev/null
+++ b/python/fatcat/templates/container_view.html
@@ -0,0 +1,14 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>Container: {{ container.name }}</h1>
+
+<p>ID: {{ container.id }}
+<p>ISSN: {{ container.issn }}
+<p>Publisher: {{ container.publisher }}
+
+<p>TODO:
+
+<pre>{{ container }}</pre>
+
+{% endblock %}
diff --git a/python/fatcat/templates/creator_view.html b/python/fatcat/templates/creator_view.html
new file mode 100644
index 00000000..f7be9f2c
--- /dev/null
+++ b/python/fatcat/templates/creator_view.html
@@ -0,0 +1,10 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>Creator: {{ creator.id }}</h1>
+
+TODO:
+
+<pre>{{ creator }}</pre>
+
+{% endblock %}
diff --git a/python/fatcat/templates/editgroup_view.html b/python/fatcat/templates/editgroup_view.html
new file mode 100644
index 00000000..4ed08501
--- /dev/null
+++ b/python/fatcat/templates/editgroup_view.html
@@ -0,0 +1,49 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>Edit Group: #{{ editgroup.id}}</h1>
+
+<p>Editor: <a href="/editor/{{ editgroup.editor.username }}">{{ editgroup.editor.username }}</a>
+<p>Description: {{ editgroup.description }}
+
+<h3>Work Edits ({{ editgroup.work_edits|count }})</h3>
+<ul>
+{% for edit in editgroup.work_edits %}
+ <li><a href="/work/edit/{{ edit.id }}">Edit #{{ edit.id }}</a>:
+ <a href="/work/{{ edit.ident }}">{{ edit.ident }}</a> to rev {{ edit.rev }}
+{% endfor %}
+</ul>
+
+<h3>Release Edits ({{ editgroup.release_edits|count }})</h3>
+<ul>
+{% for edit in editgroup.release_edits %}
+ <li><a href="/release/edit/{{ edit.id }}">Edit #{{ edit.id }}</a>
+ <a href="/release/{{ edit.ident }}">{{ edit.ident }}</a> to rev {{ edit.rev }}
+{% endfor %}
+</ul>
+
+<h3>Container Edits ({{ editgroup.container_edits|count }})</h3>
+<ul>
+{% for edit in editgroup.container_edits %}
+ <li><a href="/container/edit/{{ edit.id }}">Edit #{{ edit.id }}</a>
+ <a href="/container/{{ edit.ident }}">{{ edit.ident }}</a> to rev {{ edit.rev }}
+{% endfor %}
+</ul>
+
+<h3>Creator Edits ({{ editgroup.creator_edits|count }})</h3>
+<ul>
+{% for edit in editgroup.creator_edits %}
+ <li><a href="/creator/edit/{{ edit.id }}">Edit #{{ edit.id }}</a>
+ <a href="/creator/{{ edit.ident }}">{{ edit.ident }}</a> to rev {{ edit.rev }}
+{% endfor %}
+</ul>
+
+<h3>File Edits ({{ editgroup.file_edits|count }})</h3>
+<ul>
+{% for edit in editgroup.file_edits %}
+ <li><a href="/file/edit/{{ edit.id }}">Edit #{{ edit.id }}</a>
+ <a href="/file/{{ edit.ident }}">{{ edit.ident }}</a> to rev {{ edit.rev }}
+{% endfor %}
+</ul>
+
+{% endblock %}
diff --git a/python/fatcat/templates/editor_changelog.html b/python/fatcat/templates/editor_changelog.html
new file mode 100644
index 00000000..e1410874
--- /dev/null
+++ b/python/fatcat/templates/editor_changelog.html
@@ -0,0 +1,17 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>Editor Changelog: {{ editor.username }}</h1>
+
+<p>Editor: <a href="/editor/{{ editor.username }}">{{ editor.username }}</a>
+
+<p>Changes accepted (aka, merged editgroups):
+<ul>
+{% for entry in changelog_entries %}
+ <li><a href="/editgroup/{{ entry.editgroup }}">Edit Group #{{ entry.editgroup }}</a> (on {{ entry.timestamp }})
+{% else %}
+NONE
+{% endfor %}
+</ul>
+
+{% endblock %}
diff --git a/python/fatcat/templates/editor_view.html b/python/fatcat/templates/editor_view.html
new file mode 100644
index 00000000..e0625c42
--- /dev/null
+++ b/python/fatcat/templates/editor_view.html
@@ -0,0 +1,9 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>Editor: {{ editor.username }}</h1>
+
+<p>Is Admin? {{ editor.is_admin }}
+<p><a href="/editor/{{ editor.username }}/changelog">Changelog</a>
+
+{% endblock %}
diff --git a/python/fatcat/templates/file_view.html b/python/fatcat/templates/file_view.html
new file mode 100644
index 00000000..ff55e21c
--- /dev/null
+++ b/python/fatcat/templates/file_view.html
@@ -0,0 +1,10 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>File: {{ file.id }}</h1>
+
+TODO:
+
+<pre>{{ file }}</pre>
+
+{% endblock %}
diff --git a/python/fatcat/templates/home.html b/python/fatcat/templates/home.html
new file mode 100644
index 00000000..cea4f687
--- /dev/null
+++ b/python/fatcat/templates/home.html
@@ -0,0 +1,29 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>Salutations!</h1>
+
+Just mockups for now...
+
+<ul>
+ <li><b>Work:</b>
+ <a href="/work/create">Create</a>,
+ <a href="/work/random">Random</a>
+ <li><b>Release:</b>
+ <a href="/release/create">Create</a>,
+ <a href="/release/random">Random</a>
+ <li><b><strike>File:</strike></b>
+ <a href="/file/create">Create</a>,
+ <a href="/file/random">Random</a>
+ <li><b><strike>Contributor:</strike></b>
+ <a href="/contrib/create">Create</a>,
+ <a href="/contrib/random">Random</a>
+ <li><b><strike>Container:</strike></b>
+ <a href="/container/create">Create</a>,
+ <a href="/container/random">Random</a>
+ <li>Edit groups...
+ <li>Changelog...
+ <li>Login/Signup...
+</ul>
+
+{% endblock %}
diff --git a/python/fatcat/templates/release_changelog.html b/python/fatcat/templates/release_changelog.html
new file mode 100644
index 00000000..706a5642
--- /dev/null
+++ b/python/fatcat/templates/release_changelog.html
@@ -0,0 +1,17 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>Release Changelog: {{ release.id }}</h1>
+
+<p>release: <a href="/release/{{ release.id }}">{{ release.id }}</a>
+
+<p>Changelog:
+<ul>
+{% for entry in changelog_entries %}
+ <li><a href="/editgroup/{{ entry.editgroup }}">Edit Group #{{ entry.editgroup }}</a> (on {{ entry.timestamp }})
+{% else %}
+NONE
+{% endfor %}
+</ul>
+
+{% endblock %}
diff --git a/python/fatcat/templates/release_view.html b/python/fatcat/templates/release_view.html
new file mode 100644
index 00000000..ee68161c
--- /dev/null
+++ b/python/fatcat/templates/release_view.html
@@ -0,0 +1,31 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>{{ release.title }}</h1>
+
+<p>Release type: {{ release.type }}
+<p><a href="/release/{{ release.id }}/history">History</a>
+<p>Contributors:
+{% for c in release.contributors %} {{ c.name }}; {% endfor %}
+
+<p>Title: {{ release.title }}
+<p>Date: {{ release.date }}
+
+{% if release.container %}
+<p>Container: <a href="/container/{{ release.container.id }}">{{ release.container.title }}</a>
+{% endif %}
+
+{% if release.doi %}
+<p>DOI: <a href="https://dx.doi.org/{{ release.doi }}">{{ release.doi }}</a>
+{% endif %}
+
+{% if releases %}
+<ul>
+{% for r in releases %}
+ <ul><a href="/release/{{ r.id }}">{{ r.title }}</a> ({{ y.date }} - {{ y.release_type }})
+{% endfor %}
+</ul>
+{% else %}
+{% endif %}
+
+{% endblock %}
diff --git a/python/fatcat/templates/work_add.html b/python/fatcat/templates/work_add.html
new file mode 100644
index 00000000..ac8a8169
--- /dev/null
+++ b/python/fatcat/templates/work_add.html
@@ -0,0 +1,215 @@
+{% extends "base.html" %}
+{% block body %}
+<div class="ui segment">
+<h1 class="ui header">Adding a New Thing</h1>
+
+<form class="ui form" id="add_work_form">
+
+ <h3 class="ui dividing header">The Basics</h3>
+
+ <div class="ui huge field required">
+ <label>Title</label>
+ <input name="work_title" type="text" placeholder="Title of Work (in English)">
+ </div>
+
+ <div class="ui field required">
+ <label>Type of Work</label>
+ <select class="ui dropdown" id="work_type">
+ <option value="">Primary Type</option>
+ <option value="journal-article">Journal Article</option>
+ <option value="book">Book</option>
+ <option value="book-chapter">Book Chapter</option>
+ <option value="dataset">Dataset</option>
+ <option value="dissertation">Thesis or Dissertation</option>
+ <option value="monograph">Monograph</option>
+ <option value="proceedings-article">Conference Proceeding</option>
+ <option value="report">Report</option>
+ <option value="other">Other</option>
+ </select>
+ </div>
+
+ <!-- Primary Creators/Authors -->
+ <div class="ui field search" id="work_creators">
+ <label>Primary Creator(s)</label>
+ <div class="ui icon input">
+ <input class="prompt" type="text" placeholder="Search...">
+ <i class="search icon"></i>
+ </div>
+ <div class="results"></div>
+ </div>
+
+ <!-- Description (not an abstract) -->
+ <div class="ui field">
+ <label>Description</label>
+ <div class="field">
+ <label>Not an abstract...</label>
+ <textarea rows="2"></textarea>
+ </div>
+ </div>
+
+ <!-- Primary/Original Language -->
+ <div class="field">
+ <label>Primary Language</label>
+ <select class="ui search select dropdown" id="language-select">
+ <option value="">Select if Appropriate</option>
+ <option value="en">English</option>
+ <option value="es">Spanish</option>
+ </select>
+ </div>
+
+ <!-- Subject / Categorization / Tags -->
+ <div class="field">
+ <label>Subject</label>
+ <select multiple="" class="ui dropdown" id="subjects">
+ <option value="">Select Subject/Tags</option>
+ <option value="AF">Afghanistan</option>
+ <option value="AX">Åland Islands</option>
+ <option value="AL">Albania</option>
+ <option value="DZ">Algeria</option>
+ <option value="AS">American Samoa</option>
+ <option value="AD">Andorra</option>
+ <option value="AO">Angola</option>
+ </select>
+ </div>
+
+
+ <h3 class="ui dividing header">Primary Release / Edition</h3>
+
+ <!-- Contributors (and how) -->
+ <div class="ui field search" id="release_creators">
+ <label>Primary Creator(s)</label>
+ <div class="ui icon input">
+ <input class="prompt" type="text" placeholder="Search...">
+ <i class="search icon"></i>
+ </div>
+ <div class="results"></div>
+ </div>
+
+ <!-- Date -->
+ <!-- Container / Part-Of -->
+ <!-- Publisher -->
+ <!-- Identifier -->
+ <!-- Language -->
+ <!-- Type / Media -->
+ <!-- Issue / Volume / Pages / Chapter -->
+
+ <!-- Anything Else? -->
+ <h3 class="ui dividing header">Anything Else?</h3>
+
+ <!-- File / Copy / URL -->
+ <!-- Citations -->
+
+<div class="ui submit button">Create Work</div>
+</form>
+
+</div>
+{% endblock %}
+
+{% block postscript %}
+<script>
+<!-- Form validation code -->
+$(document).ready(function() {
+
+ $('#add_work_form')
+ .form({
+ fields: {
+ name: {
+ identifier: 'name',
+ rules: [
+ {
+ type : 'empty',
+ prompt : 'Please enter your name'
+ }
+ ]
+ },
+ skills: {
+ identifier: 'skills',
+ rules: [
+ {
+ type : 'minCount[2]',
+ prompt : 'Please select at least two skills'
+ }
+ ]
+ },
+ gender: {
+ identifier: 'gender',
+ rules: [
+ {
+ type : 'empty',
+ prompt : 'Please select a gender'
+ }
+ ]
+ },
+ username: {
+ identifier: 'username',
+ rules: [
+ {
+ type : 'empty',
+ prompt : 'Please enter a username'
+ }
+ ]
+ },
+ password: {
+ identifier: 'password',
+ rules: [
+ {
+ type : 'empty',
+ prompt : 'Please enter a password'
+ },
+ {
+ type : 'minLength[6]',
+ prompt : 'Your password must be at least {ruleValue} characters'
+ }
+ ]
+ },
+ terms: {
+ identifier: 'terms',
+ rules: [
+ {
+ type : 'checked',
+ prompt : 'You must agree to the terms and conditions'
+ }
+ ]
+ }
+ }
+ })
+ ;
+
+ var example_authors = [
+ { title: 'Andorra' },
+ { title: 'United Arab Emirates' },
+ { title: 'Afghanistan' },
+ { title: 'Antigua' },
+ { title: 'Anguilla' },
+ { title: 'Albania' },
+ { title: 'Armenia' },
+ { title: 'Netherlands Antilles' },
+ { title: 'Angola' },
+ { title: 'Argentina' },
+ { title: 'American Samoa' },
+ { title: 'Austria' },
+ { title: 'Australia' },
+ { title: 'Aruba' },
+ ];
+
+ $('#work_creators')
+ .search({
+ source: example_authors
+ })
+ ;
+
+ $('#release_creators')
+ .search({
+ source: example_authors
+ })
+ ;
+
+ $('#work_type').dropdown();
+ $('#subjects').dropdown();
+ $('#language-select').dropdown();
+
+ console.log("Page loaded");
+
+});
+</script>
+{% endblock %}
diff --git a/python/fatcat/templates/work_view.html b/python/fatcat/templates/work_view.html
new file mode 100644
index 00000000..8c5e955d
--- /dev/null
+++ b/python/fatcat/templates/work_view.html
@@ -0,0 +1,37 @@
+{% extends "base.html" %}
+{% block body %}
+
+<h1>{{ work.title }}</h1>
+
+<p>Work type: {{ work.type }}
+<p><a href="/work/{{ work.id }}/history">History</a>
+<p>Contributors:
+{% for c in work.contributors %} {{ c.name }}; {% endfor %}
+
+{% if primary %}
+<h2>Primary Release/Edition</h2>
+<p>Title: {{ primary.title }}
+<p>Date: {{ primary.date }}
+
+{% if primary.container %}
+<p>Container: <a href="/container/{{ primary.container.id }}">{{ primary.container.title }}</a>
+{% endif %}
+
+{% if primary.doi %}
+<p>DOI: <a href="https://dx.doi.org/{{ primary.doi }}">{{ primary.doi }}</a>
+{% endif %}
+
+{% else %}
+<p>No primary release
+{% endif %}
+
+{% if releases %}
+<ul>
+{% for r in releases %}
+ <ul><a href="/release/{{ r.id }}">{{ r.title }}</a> ({{ y.date }} - {{ y.release_type }})
+{% endfor %}
+</ul>
+{% else %}
+{% endif %}
+
+{% endblock %}
diff --git a/python/fatcat_client.py b/python/fatcat_client.py
new file mode 100755
index 00000000..d1580be5
--- /dev/null
+++ b/python/fatcat_client.py
@@ -0,0 +1,41 @@
+#!/usr/bin/env python3
+
+import argparse
+from fatcat.api_client import FatCatApiClient
+
+def import_crossref(args):
+ fcc = FatCatApiClient(args.host_url)
+ fcc.import_crossref_file(args.json_file,
+ create_containers=args.create_containers)
+
+def health(args):
+ fcc = FatCatApiClient(args.host_url)
+ print(fcc.health())
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--debug',
+ action='store_true',
+ help="enable debugging interface")
+ parser.add_argument('--host-url',
+ default="http://localhost:8040",
+ help="connect to this host/port")
+ subparsers = parser.add_subparsers()
+
+ sub_import_crossref = subparsers.add_parser('import-crossref',
+ aliases=['ic'])
+ sub_import_crossref.set_defaults(func=import_crossref)
+ sub_import_crossref.add_argument('json_file',
+ help="crossref JSON file to import from")
+ sub_import_crossref.add_argument('--create-containers',
+ action='store_true',
+ help="if true, create containers based on ISSN")
+
+ sub_health = subparsers.add_parser('health')
+ sub_health.set_defaults(func=health)
+
+ args = parser.parse_args()
+ args.func(args)
+
+if __name__ == '__main__':
+ main()
diff --git a/python/pytest.ini b/python/pytest.ini
new file mode 100644
index 00000000..0a5e9216
--- /dev/null
+++ b/python/pytest.ini
@@ -0,0 +1,8 @@
+
+[pytest]
+
+# allow imports from files in current directory
+python_paths = .
+
+# search for 'test_*' functions in all python files, not just under tests
+python_files = *.py
diff --git a/python/run.py b/python/run.py
new file mode 100755
index 00000000..0fbd6194
--- /dev/null
+++ b/python/run.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python3
+
+import argparse
+import fatcat.sql
+from fatcat import app, db
+
+def main():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('--debug',
+ action='store_true',
+ help="enable debugging interface (note: not for everything)")
+ parser.add_argument('--host',
+ default="127.0.0.1",
+ help="listen on this host/IP")
+ parser.add_argument('--port',
+ type=int,
+ default=8040,
+ help="listen on this port")
+ parser.add_argument('--database-uri',
+ default=app.config['SQLALCHEMY_DATABASE_URI'],
+ help="sqlalchemy database string")
+ parser.add_argument('--init-db',
+ action='store_true',
+ help="create database tables and insert dummy data")
+ args = parser.parse_args()
+
+ app.config['SQLALCHEMY_DATABASE_URI'] = args.database_uri
+
+ if args.init_db:
+ db.create_all()
+ fatcat.sql.populate_db()
+ print("Dummy database configured: " + app.config['SQLALCHEMY_DATABASE_URI'])
+ return
+
+ app.run(debug=args.debug, host=args.host, port=args.port)
+
+if __name__ == '__main__':
+ main()
diff --git a/python/schema.sql b/python/schema.sql
new file mode 100644
index 00000000..ec38bcb3
--- /dev/null
+++ b/python/schema.sql
@@ -0,0 +1,1078 @@
+
+
+SET statement_timeout = 0;
+SET lock_timeout = 0;
+SET idle_in_transaction_session_timeout = 0;
+SET client_encoding = 'UTF8';
+SET standard_conforming_strings = on;
+SELECT pg_catalog.set_config('search_path', '', false);
+SET check_function_bodies = false;
+SET client_min_messages = warning;
+SET row_security = off;
+
+
+COMMENT ON DATABASE postgres IS 'default administrative connection database';
+
+
+
+CREATE EXTENSION IF NOT EXISTS plpgsql WITH SCHEMA pg_catalog;
+
+
+
+COMMENT ON EXTENSION plpgsql IS 'PL/pgSQL procedural language';
+
+
+SET default_tablespace = '';
+
+SET default_with_oids = false;
+
+
+CREATE TABLE public.changelog (
+ id integer NOT NULL,
+ editgroup_id integer,
+ "timestamp" integer
+);
+
+
+ALTER TABLE public.changelog OWNER TO postgres;
+
+
+CREATE SEQUENCE public.changelog_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.changelog_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.changelog_id_seq OWNED BY public.changelog.id;
+
+
+
+CREATE TABLE public.container_edit (
+ id integer NOT NULL,
+ ident_id integer,
+ rev_id integer,
+ redirect_id integer,
+ editgroup_id integer,
+ extra_json_id character varying
+);
+
+
+ALTER TABLE public.container_edit OWNER TO postgres;
+
+
+CREATE SEQUENCE public.container_edit_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.container_edit_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.container_edit_id_seq OWNED BY public.container_edit.id;
+
+
+
+CREATE TABLE public.container_ident (
+ id integer NOT NULL,
+ is_live boolean NOT NULL,
+ rev_id integer,
+ redirect_id integer
+);
+
+
+ALTER TABLE public.container_ident OWNER TO postgres;
+
+
+CREATE SEQUENCE public.container_ident_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.container_ident_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.container_ident_id_seq OWNED BY public.container_ident.id;
+
+
+
+CREATE TABLE public.container_rev (
+ id integer NOT NULL,
+ extra_json_id character varying,
+ name character varying,
+ parent_id integer,
+ publisher character varying,
+ sortname character varying,
+ issn character varying
+);
+
+
+ALTER TABLE public.container_rev OWNER TO postgres;
+
+
+CREATE SEQUENCE public.container_rev_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.container_rev_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.container_rev_id_seq OWNED BY public.container_rev.id;
+
+
+
+CREATE TABLE public.creator_edit (
+ id integer NOT NULL,
+ ident_id integer,
+ rev_id integer,
+ redirect_id integer,
+ editgroup_id integer,
+ extra_json_id character varying
+);
+
+
+ALTER TABLE public.creator_edit OWNER TO postgres;
+
+
+CREATE SEQUENCE public.creator_edit_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.creator_edit_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.creator_edit_id_seq OWNED BY public.creator_edit.id;
+
+
+
+CREATE TABLE public.creator_ident (
+ id integer NOT NULL,
+ is_live boolean NOT NULL,
+ rev_id integer,
+ redirect_id integer
+);
+
+
+ALTER TABLE public.creator_ident OWNER TO postgres;
+
+
+CREATE SEQUENCE public.creator_ident_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.creator_ident_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.creator_ident_id_seq OWNED BY public.creator_ident.id;
+
+
+
+CREATE TABLE public.creator_rev (
+ id integer NOT NULL,
+ extra_json_id character varying,
+ name character varying,
+ sortname character varying,
+ orcid character varying
+);
+
+
+ALTER TABLE public.creator_rev OWNER TO postgres;
+
+
+CREATE SEQUENCE public.creator_rev_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.creator_rev_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.creator_rev_id_seq OWNED BY public.creator_rev.id;
+
+
+
+CREATE TABLE public.editgroup (
+ id integer NOT NULL,
+ editor_id integer NOT NULL,
+ description character varying,
+ extra_json_id character varying
+);
+
+
+ALTER TABLE public.editgroup OWNER TO postgres;
+
+
+CREATE SEQUENCE public.editgroup_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.editgroup_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.editgroup_id_seq OWNED BY public.editgroup.id;
+
+
+
+CREATE TABLE public.editor (
+ id integer NOT NULL,
+ username character varying NOT NULL,
+ is_admin boolean NOT NULL,
+ active_editgroup_id integer
+);
+
+
+ALTER TABLE public.editor OWNER TO postgres;
+
+
+CREATE SEQUENCE public.editor_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.editor_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.editor_id_seq OWNED BY public.editor.id;
+
+
+
+CREATE TABLE public.extra_json (
+ sha1 character varying NOT NULL,
+ json character varying NOT NULL
+);
+
+
+ALTER TABLE public.extra_json OWNER TO postgres;
+
+
+CREATE TABLE public.file_edit (
+ id integer NOT NULL,
+ ident_id integer,
+ rev_id integer,
+ redirect_id integer,
+ editgroup_id integer,
+ extra_json_id character varying
+);
+
+
+ALTER TABLE public.file_edit OWNER TO postgres;
+
+
+CREATE SEQUENCE public.file_edit_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.file_edit_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.file_edit_id_seq OWNED BY public.file_edit.id;
+
+
+
+CREATE TABLE public.file_ident (
+ id integer NOT NULL,
+ is_live boolean NOT NULL,
+ rev_id integer,
+ redirect_id integer
+);
+
+
+ALTER TABLE public.file_ident OWNER TO postgres;
+
+
+CREATE SEQUENCE public.file_ident_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.file_ident_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.file_ident_id_seq OWNED BY public.file_ident.id;
+
+
+
+CREATE TABLE public.file_release (
+ id integer NOT NULL,
+ file_rev integer NOT NULL,
+ release_ident_id integer NOT NULL
+);
+
+
+ALTER TABLE public.file_release OWNER TO postgres;
+
+
+CREATE SEQUENCE public.file_release_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.file_release_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.file_release_id_seq OWNED BY public.file_release.id;
+
+
+
+CREATE TABLE public.file_rev (
+ id integer NOT NULL,
+ extra_json_id character varying,
+ size integer,
+ sha1 character varying,
+ url integer
+);
+
+
+ALTER TABLE public.file_rev OWNER TO postgres;
+
+
+CREATE SEQUENCE public.file_rev_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.file_rev_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.file_rev_id_seq OWNED BY public.file_rev.id;
+
+
+
+CREATE TABLE public.release_contrib (
+ release_rev integer NOT NULL,
+ creator_ident_id integer NOT NULL,
+ stub character varying,
+ type character varying
+);
+
+
+ALTER TABLE public.release_contrib OWNER TO postgres;
+
+
+CREATE TABLE public.release_edit (
+ id integer NOT NULL,
+ ident_id integer,
+ rev_id integer,
+ redirect_id integer,
+ editgroup_id integer,
+ extra_json_id character varying
+);
+
+
+ALTER TABLE public.release_edit OWNER TO postgres;
+
+
+CREATE SEQUENCE public.release_edit_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.release_edit_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.release_edit_id_seq OWNED BY public.release_edit.id;
+
+
+
+CREATE TABLE public.release_ident (
+ id integer NOT NULL,
+ is_live boolean NOT NULL,
+ rev_id integer,
+ redirect_id integer
+);
+
+
+ALTER TABLE public.release_ident OWNER TO postgres;
+
+
+CREATE SEQUENCE public.release_ident_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.release_ident_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.release_ident_id_seq OWNED BY public.release_ident.id;
+
+
+
+CREATE TABLE public.release_ref (
+ id integer NOT NULL,
+ release_rev integer NOT NULL,
+ target_release_ident_id integer,
+ index integer,
+ stub character varying,
+ doi character varying
+);
+
+
+ALTER TABLE public.release_ref OWNER TO postgres;
+
+
+CREATE SEQUENCE public.release_ref_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.release_ref_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.release_ref_id_seq OWNED BY public.release_ref.id;
+
+
+
+CREATE TABLE public.release_rev (
+ id integer NOT NULL,
+ extra_json_id character varying,
+ work_ident_id integer,
+ container_ident_id integer,
+ title character varying NOT NULL,
+ license character varying,
+ release_type character varying,
+ date character varying,
+ doi character varying,
+ volume character varying,
+ pages character varying,
+ issue character varying
+);
+
+
+ALTER TABLE public.release_rev OWNER TO postgres;
+
+
+CREATE SEQUENCE public.release_rev_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.release_rev_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.release_rev_id_seq OWNED BY public.release_rev.id;
+
+
+
+CREATE TABLE public.work_edit (
+ id integer NOT NULL,
+ ident_id integer,
+ rev_id integer,
+ redirect_id integer,
+ editgroup_id integer,
+ extra_json_id character varying
+);
+
+
+ALTER TABLE public.work_edit OWNER TO postgres;
+
+
+CREATE SEQUENCE public.work_edit_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.work_edit_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.work_edit_id_seq OWNED BY public.work_edit.id;
+
+
+
+CREATE TABLE public.work_ident (
+ id integer NOT NULL,
+ is_live boolean NOT NULL,
+ rev_id integer,
+ redirect_id integer
+);
+
+
+ALTER TABLE public.work_ident OWNER TO postgres;
+
+
+CREATE SEQUENCE public.work_ident_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.work_ident_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.work_ident_id_seq OWNED BY public.work_ident.id;
+
+
+
+CREATE TABLE public.work_rev (
+ id integer NOT NULL,
+ extra_json_id character varying,
+ title character varying,
+ work_type character varying,
+ primary_release_id integer
+);
+
+
+ALTER TABLE public.work_rev OWNER TO postgres;
+
+
+CREATE SEQUENCE public.work_rev_id_seq
+ AS integer
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+ALTER TABLE public.work_rev_id_seq OWNER TO postgres;
+
+
+ALTER SEQUENCE public.work_rev_id_seq OWNED BY public.work_rev.id;
+
+
+
+ALTER TABLE ONLY public.changelog ALTER COLUMN id SET DEFAULT nextval('public.changelog_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.container_edit ALTER COLUMN id SET DEFAULT nextval('public.container_edit_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.container_ident ALTER COLUMN id SET DEFAULT nextval('public.container_ident_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.container_rev ALTER COLUMN id SET DEFAULT nextval('public.container_rev_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.creator_edit ALTER COLUMN id SET DEFAULT nextval('public.creator_edit_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.creator_ident ALTER COLUMN id SET DEFAULT nextval('public.creator_ident_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.creator_rev ALTER COLUMN id SET DEFAULT nextval('public.creator_rev_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.editgroup ALTER COLUMN id SET DEFAULT nextval('public.editgroup_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.editor ALTER COLUMN id SET DEFAULT nextval('public.editor_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.file_edit ALTER COLUMN id SET DEFAULT nextval('public.file_edit_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.file_ident ALTER COLUMN id SET DEFAULT nextval('public.file_ident_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.file_release ALTER COLUMN id SET DEFAULT nextval('public.file_release_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.file_rev ALTER COLUMN id SET DEFAULT nextval('public.file_rev_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.release_edit ALTER COLUMN id SET DEFAULT nextval('public.release_edit_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.release_ident ALTER COLUMN id SET DEFAULT nextval('public.release_ident_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.release_ref ALTER COLUMN id SET DEFAULT nextval('public.release_ref_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.release_rev ALTER COLUMN id SET DEFAULT nextval('public.release_rev_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.work_edit ALTER COLUMN id SET DEFAULT nextval('public.work_edit_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.work_ident ALTER COLUMN id SET DEFAULT nextval('public.work_ident_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.work_rev ALTER COLUMN id SET DEFAULT nextval('public.work_rev_id_seq'::regclass);
+
+
+
+ALTER TABLE ONLY public.changelog
+ ADD CONSTRAINT changelog_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.container_edit
+ ADD CONSTRAINT container_edit_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.container_ident
+ ADD CONSTRAINT container_ident_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.container_rev
+ ADD CONSTRAINT container_rev_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.creator_edit
+ ADD CONSTRAINT creator_edit_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.creator_ident
+ ADD CONSTRAINT creator_ident_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.creator_rev
+ ADD CONSTRAINT creator_rev_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.editgroup
+ ADD CONSTRAINT editgroup_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.editor
+ ADD CONSTRAINT editor_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.editor
+ ADD CONSTRAINT editor_username_key UNIQUE (username);
+
+
+
+ALTER TABLE ONLY public.extra_json
+ ADD CONSTRAINT extra_json_pkey PRIMARY KEY (sha1);
+
+
+
+ALTER TABLE ONLY public.file_edit
+ ADD CONSTRAINT file_edit_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.file_ident
+ ADD CONSTRAINT file_ident_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.file_release
+ ADD CONSTRAINT file_release_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.file_rev
+ ADD CONSTRAINT file_rev_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.release_contrib
+ ADD CONSTRAINT release_contrib_pkey PRIMARY KEY (release_rev, creator_ident_id);
+
+
+
+ALTER TABLE ONLY public.release_edit
+ ADD CONSTRAINT release_edit_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.release_ident
+ ADD CONSTRAINT release_ident_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.release_ref
+ ADD CONSTRAINT release_ref_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.release_rev
+ ADD CONSTRAINT release_rev_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.work_edit
+ ADD CONSTRAINT work_edit_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.work_ident
+ ADD CONSTRAINT work_ident_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.work_rev
+ ADD CONSTRAINT work_rev_pkey PRIMARY KEY (id);
+
+
+
+ALTER TABLE ONLY public.changelog
+ ADD CONSTRAINT changelog_editgroup_id_fkey FOREIGN KEY (editgroup_id) REFERENCES public.editgroup(id);
+
+
+
+ALTER TABLE ONLY public.container_edit
+ ADD CONSTRAINT container_edit_editgroup_id_fkey FOREIGN KEY (editgroup_id) REFERENCES public.editgroup(id);
+
+
+
+ALTER TABLE ONLY public.container_edit
+ ADD CONSTRAINT container_edit_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.container_edit
+ ADD CONSTRAINT container_edit_ident_id_fkey FOREIGN KEY (ident_id) REFERENCES public.container_ident(id);
+
+
+
+ALTER TABLE ONLY public.container_edit
+ ADD CONSTRAINT container_edit_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.container_ident(id);
+
+
+
+ALTER TABLE ONLY public.container_edit
+ ADD CONSTRAINT container_edit_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.container_rev(id);
+
+
+
+ALTER TABLE ONLY public.container_ident
+ ADD CONSTRAINT container_ident_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.container_ident(id);
+
+
+
+ALTER TABLE ONLY public.container_ident
+ ADD CONSTRAINT container_ident_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.container_rev(id);
+
+
+
+ALTER TABLE ONLY public.container_rev
+ ADD CONSTRAINT container_rev_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.container_rev
+ ADD CONSTRAINT container_rev_parent_id_fkey FOREIGN KEY (parent_id) REFERENCES public.container_ident(id);
+
+
+
+ALTER TABLE ONLY public.creator_edit
+ ADD CONSTRAINT creator_edit_editgroup_id_fkey FOREIGN KEY (editgroup_id) REFERENCES public.editgroup(id);
+
+
+
+ALTER TABLE ONLY public.creator_edit
+ ADD CONSTRAINT creator_edit_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.creator_edit
+ ADD CONSTRAINT creator_edit_ident_id_fkey FOREIGN KEY (ident_id) REFERENCES public.creator_ident(id);
+
+
+
+ALTER TABLE ONLY public.creator_edit
+ ADD CONSTRAINT creator_edit_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.creator_ident(id);
+
+
+
+ALTER TABLE ONLY public.creator_edit
+ ADD CONSTRAINT creator_edit_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.creator_rev(id);
+
+
+
+ALTER TABLE ONLY public.creator_ident
+ ADD CONSTRAINT creator_ident_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.creator_ident(id);
+
+
+
+ALTER TABLE ONLY public.creator_ident
+ ADD CONSTRAINT creator_ident_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.creator_rev(id);
+
+
+
+ALTER TABLE ONLY public.creator_rev
+ ADD CONSTRAINT creator_rev_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.editgroup
+ ADD CONSTRAINT editgroup_editor_id_fkey FOREIGN KEY (editor_id) REFERENCES public.editor(id);
+
+
+
+ALTER TABLE ONLY public.editgroup
+ ADD CONSTRAINT editgroup_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.editor
+ ADD CONSTRAINT editor_active_editgroup_id_fkey FOREIGN KEY (active_editgroup_id) REFERENCES public.editgroup(id);
+
+
+
+ALTER TABLE ONLY public.file_edit
+ ADD CONSTRAINT file_edit_editgroup_id_fkey FOREIGN KEY (editgroup_id) REFERENCES public.editgroup(id);
+
+
+
+ALTER TABLE ONLY public.file_edit
+ ADD CONSTRAINT file_edit_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.file_edit
+ ADD CONSTRAINT file_edit_ident_id_fkey FOREIGN KEY (ident_id) REFERENCES public.file_ident(id);
+
+
+
+ALTER TABLE ONLY public.file_edit
+ ADD CONSTRAINT file_edit_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.file_ident(id);
+
+
+
+ALTER TABLE ONLY public.file_edit
+ ADD CONSTRAINT file_edit_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.file_rev(id);
+
+
+
+ALTER TABLE ONLY public.file_ident
+ ADD CONSTRAINT file_ident_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.file_ident(id);
+
+
+
+ALTER TABLE ONLY public.file_ident
+ ADD CONSTRAINT file_ident_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.file_rev(id);
+
+
+
+ALTER TABLE ONLY public.file_release
+ ADD CONSTRAINT file_release_file_rev_fkey FOREIGN KEY (file_rev) REFERENCES public.file_rev(id);
+
+
+
+ALTER TABLE ONLY public.file_release
+ ADD CONSTRAINT file_release_release_ident_id_fkey FOREIGN KEY (release_ident_id) REFERENCES public.release_ident(id);
+
+
+
+ALTER TABLE ONLY public.file_rev
+ ADD CONSTRAINT file_rev_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.release_contrib
+ ADD CONSTRAINT release_contrib_creator_ident_id_fkey FOREIGN KEY (creator_ident_id) REFERENCES public.creator_ident(id);
+
+
+
+ALTER TABLE ONLY public.release_contrib
+ ADD CONSTRAINT release_contrib_release_rev_fkey FOREIGN KEY (release_rev) REFERENCES public.release_rev(id);
+
+
+
+ALTER TABLE ONLY public.release_edit
+ ADD CONSTRAINT release_edit_editgroup_id_fkey FOREIGN KEY (editgroup_id) REFERENCES public.editgroup(id);
+
+
+
+ALTER TABLE ONLY public.release_edit
+ ADD CONSTRAINT release_edit_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.release_edit
+ ADD CONSTRAINT release_edit_ident_id_fkey FOREIGN KEY (ident_id) REFERENCES public.release_ident(id);
+
+
+
+ALTER TABLE ONLY public.release_edit
+ ADD CONSTRAINT release_edit_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.release_ident(id);
+
+
+
+ALTER TABLE ONLY public.release_edit
+ ADD CONSTRAINT release_edit_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.release_rev(id);
+
+
+
+ALTER TABLE ONLY public.release_ident
+ ADD CONSTRAINT release_ident_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.release_ident(id);
+
+
+
+ALTER TABLE ONLY public.release_ident
+ ADD CONSTRAINT release_ident_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.release_rev(id);
+
+
+
+ALTER TABLE ONLY public.release_ref
+ ADD CONSTRAINT release_ref_release_rev_fkey FOREIGN KEY (release_rev) REFERENCES public.release_rev(id);
+
+
+
+ALTER TABLE ONLY public.release_ref
+ ADD CONSTRAINT release_ref_target_release_ident_id_fkey FOREIGN KEY (target_release_ident_id) REFERENCES public.release_ident(id);
+
+
+
+ALTER TABLE ONLY public.release_rev
+ ADD CONSTRAINT release_rev_container_ident_id_fkey FOREIGN KEY (container_ident_id) REFERENCES public.container_ident(id);
+
+
+
+ALTER TABLE ONLY public.release_rev
+ ADD CONSTRAINT release_rev_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.release_rev
+ ADD CONSTRAINT release_rev_work_ident_id_fkey FOREIGN KEY (work_ident_id) REFERENCES public.work_ident(id);
+
+
+
+ALTER TABLE ONLY public.work_edit
+ ADD CONSTRAINT work_edit_editgroup_id_fkey FOREIGN KEY (editgroup_id) REFERENCES public.editgroup(id);
+
+
+
+ALTER TABLE ONLY public.work_edit
+ ADD CONSTRAINT work_edit_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.work_edit
+ ADD CONSTRAINT work_edit_ident_id_fkey FOREIGN KEY (ident_id) REFERENCES public.work_ident(id);
+
+
+
+ALTER TABLE ONLY public.work_edit
+ ADD CONSTRAINT work_edit_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.work_ident(id);
+
+
+
+ALTER TABLE ONLY public.work_edit
+ ADD CONSTRAINT work_edit_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.work_rev(id);
+
+
+
+ALTER TABLE ONLY public.work_ident
+ ADD CONSTRAINT work_ident_redirect_id_fkey FOREIGN KEY (redirect_id) REFERENCES public.work_ident(id);
+
+
+
+ALTER TABLE ONLY public.work_ident
+ ADD CONSTRAINT work_ident_rev_id_fkey FOREIGN KEY (rev_id) REFERENCES public.work_rev(id);
+
+
+
+ALTER TABLE ONLY public.work_rev
+ ADD CONSTRAINT work_rev_extra_json_id_fkey FOREIGN KEY (extra_json_id) REFERENCES public.extra_json(sha1);
+
+
+
+ALTER TABLE ONLY public.work_rev
+ ADD CONSTRAINT work_rev_primary_release_id_fkey FOREIGN KEY (primary_release_id) REFERENCES public.release_ident(id);
+
+
+
diff --git a/python/tests/api.py b/python/tests/api.py
new file mode 100644
index 00000000..02875f64
--- /dev/null
+++ b/python/tests/api.py
@@ -0,0 +1,308 @@
+
+import json
+import unittest
+import tempfile
+import pytest
+import fatcat
+import fatcat.sql
+from fatcat.models import *
+from fixtures import *
+
+
+def test_health(app):
+ rv = app.get('/health')
+ obj = json.loads(rv.data.decode('utf-8'))
+ assert obj['ok']
+
+def test_api_work(app):
+ fatcat.dummy.insert_example_works()
+
+ # Invalid Id
+ rv = app.get('/v0/work/_')
+ assert rv.status_code == 404
+
+ # Random
+ rv = app.get('/v0/work/random')
+ rv = app.get(rv.location)
+ work = json.loads(rv.data.decode('utf-8'))
+ check_entity_fields(work)
+ print(work)
+ assert work['title']
+ assert work['work_type']
+
+ # Valid Id (from random above)
+ rv = app.get('/v0/work/{}'.format(work['id']))
+ assert rv.status_code == 200
+
+ # Missing Id
+ rv = app.get('/v0/work/r3zga5b9cd7ef8gh084714iljk')
+ assert rv.status_code == 404
+
+def test_api_work_create(app):
+ assert WorkIdent.query.count() == 0
+ assert WorkRev.query.count() == 0
+ assert WorkEdit.query.count() == 0
+ rv = app.post('/v0/work',
+ data=json.dumps(dict(title="dummy", work_type="thing", extra=dict(a=1, b="zing"))),
+ headers={"content-type": "application/json"})
+ print(rv)
+ assert rv.status_code == 200
+ assert WorkIdent.query.count() == 1
+ assert WorkRev.query.count() == 1
+ assert WorkEdit.query.count() == 1
+ # not alive yet
+ assert WorkIdent.query.filter(WorkIdent.is_live==True).count() == 0
+
+def test_api_rich_create(app):
+
+ # TODO: create user?
+
+ rv = app.post('/v0/editgroup',
+ data=json.dumps(dict(
+ extra=dict(q=1, u="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ editgroup_id = obj['id']
+
+ for cls in (WorkIdent, WorkRev, WorkEdit,
+ ContainerIdent, ContainerRev, ContainerEdit,
+ CreatorIdent, CreatorRev, CreatorEdit,
+ ReleaseIdent, ReleaseRev, ReleaseEdit,
+ FileIdent, FileRev, FileEdit,
+ ChangelogEntry):
+ assert cls.query.count() == 0
+
+ rv = app.post('/v0/container',
+ data=json.dumps(dict(
+ name="schmournal",
+ publisher="society of authors",
+ issn="2222-3333",
+ editgroup=editgroup_id,
+ extra=dict(a=2, i="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ container_id = obj['id']
+
+ rv = app.post('/v0/creator',
+ data=json.dumps(dict(
+ name="anon y. mouse",
+ orcid="0000-0002-1825-0097",
+ editgroup=editgroup_id,
+ extra=dict(w=1, q="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ creator_id = obj['id']
+
+ rv = app.post('/v0/work',
+ data=json.dumps(dict(
+ title="dummy work",
+ work_type="book",
+ editgroup=editgroup_id,
+ extra=dict(a=3, b="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ work_id = obj['id']
+
+ # this stub work will be referenced
+ rv = app.post('/v0/release',
+ data=json.dumps(dict(
+ title="derivative work",
+ work_type="journal-article",
+ work=work_id,
+ creators=[creator_id],
+ doi="10.1234/58",
+ editgroup=editgroup_id,
+ refs=[
+ dict(stub="some other journal article"),
+ ],
+ extra=dict(f=7, b="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ stub_release_id = obj['id']
+
+ rv = app.post('/v0/release',
+ data=json.dumps(dict(
+ title="dummy work",
+ work_type="book",
+ work=work_id,
+ container=container_id,
+ creators=[creator_id],
+ doi="10.1234/5678",
+ editgroup=editgroup_id,
+ refs=[
+ dict(stub="some book", target=stub_release_id),
+ ],
+ extra=dict(f=7, b="loopy"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ release_id = obj['id']
+
+ rv = app.post('/v0/file',
+ data=json.dumps(dict(
+ sha1="deadbeefdeadbeef",
+ size=1234,
+ releases=[release_id],
+ editgroup=editgroup_id,
+ extra=dict(f=4, b="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ file_id = obj['id']
+
+ for cls in (WorkIdent, WorkRev, WorkEdit,
+ ContainerIdent, ContainerRev, ContainerEdit,
+ CreatorIdent, CreatorRev, CreatorEdit,
+ FileIdent, FileRev, FileEdit):
+ assert cls.query.count() == 1
+ for cls in (ReleaseIdent, ReleaseRev, ReleaseEdit):
+ assert cls.query.count() == 2
+
+ for cls in (WorkIdent,
+ ContainerIdent,
+ CreatorIdent,
+ ReleaseIdent,
+ FileIdent):
+ assert cls.query.filter(cls.is_live==True).count() == 0
+
+ assert ChangelogEntry.query.count() == 0
+ rv = app.post('/v0/editgroup/{}/accept'.format(editgroup_id),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ assert ChangelogEntry.query.count() == 1
+
+ for cls in (WorkIdent, WorkRev, WorkEdit,
+ ContainerIdent, ContainerRev, ContainerEdit,
+ CreatorIdent, CreatorRev, CreatorEdit,
+ FileIdent, FileRev, FileEdit):
+ assert cls.query.count() == 1
+ for cls in (ReleaseIdent, ReleaseRev, ReleaseEdit):
+ assert cls.query.count() == 2
+
+ for cls in (WorkIdent,
+ ContainerIdent,
+ CreatorIdent,
+ FileIdent):
+ assert cls.query.filter(cls.is_live==True).count() == 1
+ assert ReleaseIdent.query.filter(ReleaseIdent.is_live==True).count() == 2
+
+ # Test that foreign key relations worked
+ release_rv = json.loads(app.get('/v0/release/{}'.format(release_id)).data.decode('utf-8'))
+ print(release_rv)
+ assert release_rv['creators'][0]['creator'] == creator_id
+ assert release_rv['container']['id'] == container_id
+ assert release_rv['work']['id'] == work_id
+ assert release_rv['refs'][0]['target'] == stub_release_id
+
+ file_rv = json.loads(app.get('/v0/file/{}'.format(file_id)).data.decode('utf-8'))
+ print(file_rv)
+ assert file_rv['releases'][0]['release'] == release_id
+
+ # test that editor's active edit group is now invalid
+ editor = Editor.query.first()
+ assert editor.active_editgroup is None
+
+def test_api_release_lookup(rich_app):
+ app = rich_app
+
+ rv = app.get('/v0/release/1',
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+
+ rv = app.get('/v0/release/lookup',
+ data=json.dumps(dict(doi="10.1234/5678")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ assert obj['doi'] == "10.1234/5678"
+ assert obj.get('id') != None
+
+ rv = app.get('/v0/release/lookup',
+ data=json.dumps(dict(doi="10.1234/5678_noexit")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 404
+
+ rv = app.get('/v0/release/lookup',
+ data=json.dumps(dict(doi="not_even_valid_doi")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 400
+
+def test_api_creator_lookup(rich_app):
+ app = rich_app
+
+ rv = app.get('/v0/creator/1',
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+
+ rv = app.get('/v0/creator/lookup',
+ data=json.dumps(dict(orcid="0000-0002-1825-0097")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ assert obj['orcid'] == "0000-0002-1825-0097"
+ assert obj.get('id') != None
+
+ rv = app.get('/v0/creator/lookup',
+ data=json.dumps(dict(orcid="0000-0002-1825-0098")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 404
+
+ rv = app.get('/v0/creator/lookup',
+ data=json.dumps(dict(orcid="not_even_valid_orcid")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 400
+
+
+def test_api_container_lookup(rich_app):
+ app = rich_app
+
+ rv = app.get('/v0/container/1',
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+
+ rv = app.get('/v0/container/lookup',
+ data=json.dumps(dict(issn="2222-3333")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ assert obj['issn'] == "2222-3333"
+ assert obj.get('id') != None
+
+ rv = app.get('/v0/container/lookup',
+ data=json.dumps(dict(issn="2222-3334")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 404
+
+ rv = app.get('/v0/container/lookup',
+ data=json.dumps(dict(issn="not_even_valid_issn")),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 400
+
+def test_api_editor_get(rich_app):
+ app = rich_app
+
+ rv = app.get('/v0/editor/admin',
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ print(obj)
+ assert obj['username'] == "admin"
+ assert obj['id'] == 1
+
+def test_api_editor_changelog(rich_app):
+ app = rich_app
+
+ rv = app.get('/v0/editor/admin/changelog',
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ print(obj)
+ assert len(obj) == 1
diff --git a/python/tests/api_client.py b/python/tests/api_client.py
new file mode 100644
index 00000000..37e3da56
--- /dev/null
+++ b/python/tests/api_client.py
@@ -0,0 +1,14 @@
+
+import pytest
+import fatcat.api_client
+from fixtures import *
+
+
+def test_client_health(api_client):
+ assert api_client.health() != None
+
+
+def test_import_crossref(api_client):
+ api_client.import_crossref_file('tests/files/crossref-works.2018-01-21.badsample.json')
+
+ # TODO: use API to check that entities actually created...
diff --git a/python/tests/entity_lifecycle.py b/python/tests/entity_lifecycle.py
new file mode 100644
index 00000000..4ac7ee68
--- /dev/null
+++ b/python/tests/entity_lifecycle.py
@@ -0,0 +1,80 @@
+
+import json
+import unittest
+import tempfile
+import pytest
+import fatcat
+import fatcat.sql
+from fatcat.models import *
+from fixtures import *
+
+
+def test_merge_works(app):
+
+ # two works, each with releases
+ rv = app.post('/v0/work',
+ data=json.dumps(dict()),
+ headers={"content-type": "application/json"})
+ workA_id = json.loads(rv.data.decode('utf-8'))['id']
+
+ rv = app.post('/v0/work',
+ data=json.dumps(dict()),
+ headers={"content-type": "application/json"})
+ workB_id = json.loads(rv.data.decode('utf-8'))['id']
+
+ rv = app.post('/v0/release',
+ data=json.dumps(dict(
+ title="some release",
+ work_type="journal-article",
+ work=workA_id,
+ doi="10.1234/A1")),
+ headers={"content-type": "application/json"})
+ releaseA1 = json.loads(rv.data.decode('utf-8'))['id']
+
+ rv = app.post('/v0/release',
+ data=json.dumps(dict(
+ title="some release",
+ work_type="journal-article",
+ work=workB_id,
+ doi="10.1234/B1")),
+ headers={"content-type": "application/json"})
+ releaseB1 = json.loads(rv.data.decode('utf-8'))['id']
+
+ rv = app.post('/v0/release',
+ data=json.dumps(dict(
+ title="some release",
+ work_type="journal-article",
+ work=workB_id,
+ doi="10.1234/A1")),
+ headers={"content-type": "application/json"})
+ releaseB2 = json.loads(rv.data.decode('utf-8'))['id']
+
+ # XXX: what if workB primary was set?
+
+ editgroup_id = 1
+ rv = app.post('/v0/editgroup/{}/accept'.format(editgroup_id),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ assert ChangelogEntry.query.count() == 1
+ assert WorkIdent.query.filter(WorkIdent.is_live==True).count() == 2
+ assert ReleaseIdent.query.filter(ReleaseIdent.is_live==True).count() == 3
+
+ # merge works
+ fatcat.sql.merge_works(workA_id, workB_id)
+ editgroup_id = 2
+ rv = app.post('/v0/editgroup/{}/accept'.format(editgroup_id),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+
+ # check results
+ assert ChangelogEntry.query.count() == 2
+ assert WorkIdent.query.filter(WorkIdent.is_live==True).count() == 2
+ assert ReleaseIdent.query.filter(ReleaseIdent.is_live==True).count() == 3
+
+ workA_json = json.loads(app.get('/v0/work/{}'.format(workA_id)).data.decode('utf-8'))
+ workB_json = json.loads(app.get('/v0/work/{}'.format(workB_id)).data.decode('utf-8'))
+ assert workA_json['rev'] == workB_json['rev']
+ print(workA_json)
+ print(workB_json)
+ assert workA_json['redirect_id'] == None
+ assert workB_json['redirect_id'] == workA_json['id']
diff --git a/python/tests/files/crossref-works.2018-01-21.badsample.json b/python/tests/files/crossref-works.2018-01-21.badsample.json
new file mode 100644
index 00000000..143adf3e
--- /dev/null
+++ b/python/tests/files/crossref-works.2018-01-21.badsample.json
@@ -0,0 +1,10 @@
+{ "_id" : { "$oid" : "5a55183088a035a45bd906de" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:35:29Z", "timestamp" : { "$numberLong" : "1508769329541" } }, "reference-count" : 7, "publisher" : "Elsevier BV", "issue" : "3", "license" : [ { "URL" : "http://www.elsevier.com/tdm/userlicense/1.0/", "start" : { "date-parts" : [ [ 2002, 3, 1 ] ], "date-time" : "2002-03-01T00:00:00Z", "timestamp" : { "$numberLong" : "1014940800000" } }, "delay-in-days" : 0, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Journal of the American Academy of Dermatology" ], "published-print" : { "date-parts" : [ [ 2002, 3 ] ] }, "DOI" : "10.1067/mjd.2002.110660", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 24 ] ], "date-time" : "2002-08-24T22:14:26Z", "timestamp" : { "$numberLong" : "1030227266000" } }, "page" : "434-436", "source" : "Crossref", "is-referenced-by-count" : 7, "title" : [ "Progressive generalized alopecia due to systemic amyloidosis" ], "prefix" : "10.1067", "volume" : "46", "author" : [ { "given" : "Michael E.", "family" : "Lutz", "affiliation" : [] }, { "given" : "Mark R.", "family" : "Pittelkow", "affiliation" : [] } ], "member" : "78", "container-title" : [ "Journal of the American Academy of Dermatology" ], "link" : [ { "URL" : "http://api.elsevier.com/content/article/PII:S0190962202365551?httpAccept=text/xml", "content-type" : "text/xml", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://api.elsevier.com/content/article/PII:S0190962202365551?httpAccept=text/plain", "content-type" : "text/plain", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 6, 14 ] ], "date-time" : "2017-06-14T16:37:31Z", "timestamp" : { "$numberLong" : "1497458251000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2002, 3 ] ] }, "references-count" : 7, "alternative-id" : [ "S0190962202365551" ], "URL" : "http://dx.doi.org/10.1067/mjd.2002.110660", "ISSN" : [ "0190-9622" ], "issn-type" : [ { "value" : "0190-9622", "type" : "print" } ], "subject" : [ "Dermatology" ] }
+{ "_id" : { "$oid" : "5a5513fb88a035a45bd63d2e" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:15:17Z", "timestamp" : { "$numberLong" : "1508768117199" } }, "reference-count" : 25, "publisher" : "Wiley-Blackwell", "issue" : "4", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 6452, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Int. J. Quant. Chem." ], "published-print" : { "date-parts" : [ [ 1998 ] ] }, "DOI" : "10.1002/(sici)1097-461x(1998)66:4<261::aid-qua1>3.0.co;2-t", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T21:09:51Z", "timestamp" : { "$numberLong" : "1030309791000" } }, "page" : "261-272", "source" : "Crossref", "is-referenced-by-count" : 5, "title" : [ "Renormalized perturbation theory by the moment method for degenerate states: Anharmonic oscillators" ], "prefix" : "10.1002", "volume" : "66", "author" : [ { "given" : "Marcelo D.", "family" : "Radicioni", "affiliation" : [] }, { "given" : "Carlos G.", "family" : "Diaz", "affiliation" : [] }, { "given" : "Francisco M.", "family" : "Fern�ndez", "affiliation" : [] } ], "member" : "311", "reference" : [ { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB1", "author" : "Swenson", "volume" : "57", "first-page" : "1734", "year" : "1972", "journal-title" : "J. Chem. Phys.", "DOI" : "10.1063/1.1678462", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB2", "author" : "Killingbeck", "volume" : "65", "first-page" : "87", "year" : "1978", "journal-title" : "Phys. Lett. A", "DOI" : "10.1016/0375-9601(78)90580-7", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB3", "author" : "Fernández", "volume" : "43", "year" : "1987", "unstructured" : "and Hypervirial Theorems, Lecture Notes in Chemistry, Vol. 43, (Springer, Berlin, 1987).", "volume-title" : "Hypervirial Theorems, Lecture Notes in Chemistry", "DOI" : "10.1007/978-3-642-93349-3", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB4", "author" : "Arteca", "volume" : "53", "year" : "1990", "unstructured" : "and Large Order Perturbation Theory and Summation Methods in Quantum Mechanics, Lecture Notes in Chemistry, Vol. 53, (Springer, Berlin, 1990).", "volume-title" : "Large Order Perturbation Theory and Summation Methods in Quantum Mechanics, Lecture Notes in Chemistry", "DOI" : "10.1007/978-3-642-93469-8", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB5", "author" : "Ader", "volume" : "97", "first-page" : "178", "year" : "1983", "journal-title" : "Phys. Lett. A", "DOI" : "10.1016/0375-9601(83)90352-3", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB6", "author" : "Blankenbecler", "volume" : "21", "first-page" : "1055", "year" : "1980", "journal-title" : "Phys. Rev. D", "DOI" : "10.1103/PhysRevD.21.1055", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB7", "author" : "Fernández", "volume" : "26", "first-page" : "497", "year" : "1984", "journal-title" : "Int. J. Quantum Chem.", "DOI" : "10.1002/qua.560260408", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB8", "author" : "Arteca", "volume" : "128", "first-page" : "253", "year" : "1984", "journal-title" : "Physica A", "DOI" : "10.1016/0378-4371(84)90090-6", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB9", "author" : "Fernández", "volume" : "28", "first-page" : "603", "year" : "1985", "journal-title" : "Int. J. Quantum Chem.", "DOI" : "10.1002/qua.560280507", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB10", "author" : "Fernández", "volume" : "20", "first-page" : "3777", "year" : "1987", "journal-title" : "J. Phys. A", "DOI" : "10.1088/0305-4470/20/12/027", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB11", "author" : "Austin", "volume" : "18", "first-page" : "449", "year" : "1984", "journal-title" : "Int. J. Quantum Chem. S", "DOI" : "10.1002/qua.560260841", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB12", "author" : "Killingbeck", "volume" : "18", "first-page" : "793", "year" : "1985", "journal-title" : "J. Phys. A", "DOI" : "10.1088/0305-4470/18/5/012", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB13", "author" : "Killingbeck", "volume" : "19", "first-page" : "705", "year" : "1986", "journal-title" : "J. Phys. A", "DOI" : "10.1088/0305-4470/19/5/023", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB14", "author" : "Witwit", "volume" : "24", "first-page" : "3053", "year" : "1991", "journal-title" : "J. Phys. A", "DOI" : "10.1088/0305-4470/24/13/020", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB15", "author" : "Witwit", "volume" : "24", "first-page" : "4535", "year" : "1991", "journal-title" : "J. Phys. A", "DOI" : "10.1088/0305-4470/24/19/017", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB16", "author" : "Witwit", "volume" : "33", "first-page" : "4196", "year" : "1992", "journal-title" : "J. Math. Phys.", "DOI" : "10.1063/1.529818", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB17", "author" : "Witwit", "volume" : "36", "first-page" : "187", "year" : "1995", "journal-title" : "J. Math. Phys.", "DOI" : "10.1063/1.531299", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB18", "author" : "Killingbeck", "volume" : "14", "first-page" : "1005", "year" : "1981", "journal-title" : "J. Phys. A", "DOI" : "10.1088/0305-4470/14/5/020", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB19", "author" : "Killingbeck", "volume" : "14", "first-page" : "l461", "year" : "1981", "journal-title" : "J. Phys. B", "DOI" : "10.1088/0022-3700/14/14/001", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB20", "author" : "Fernández", "volume" : "25", "first-page" : "492", "year" : "1992", "journal-title" : "J. Phys. A" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB21", "author" : "Fernández", "volume" : "165", "first-page" : "314", "year" : "1992", "journal-title" : "Phys. Lett. A", "DOI" : "10.1016/0375-9601(92)90500-L", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB22", "author" : "Fernández", "volume" : "178", "first-page" : "11", "year" : "1993", "journal-title" : "Phys. Lett. A", "DOI" : "10.1016/0375-9601(93)90719-G", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB23", "author" : "Simon", "volume" : "58", "first-page" : "76", "year" : "1970", "journal-title" : "Ann. Phys. (N.Y.)", "DOI" : "10.1016/0003-4916(70)90240-X", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB24", "author" : "Press", "year" : "1986", "unstructured" : "and Numerical Recipes, (Cambridge University Press, Cambridge, 1986).", "volume-title" : "Numerical Recipes" }, { "key" : "10.1002/(SICI)1097-461X(1998)66:4<261::AID-QUA1>3.0.CO;2-T-BIB25", "author" : "Spirko", "volume" : "102", "first-page" : "8916", "year" : "1995", "journal-title" : "J. Chem. Phys.", "DOI" : "10.1063/1.468945", "doi-asserted-by" : "crossref" } ], "container-title" : [ "International Journal of Quantum Chemistry" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2F(SICI)1097-461X(1998)66:4%3C261::AID-QUA1%3E3.0.CO;2-T", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 7, 23 ] ], "date-time" : "2017-07-23T04:32:16Z", "timestamp" : { "$numberLong" : "1500784336000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 1998 ] ] }, "references-count" : 25, "URL" : "http://dx.doi.org/10.1002/(sici)1097-461x(1998)66:4<261::aid-qua1>3.0.co;2-t", "relation" : { "cites" : [] }, "ISSN" : [ "0020-7608", "1097-461X" ], "issn-type" : [ { "value" : "0020-7608", "type" : "print" }, { "value" : "1097-461X", "type" : "electronic" } ], "subject" : [ "Physical and Theoretical Chemistry", "Atomic and Molecular Physics, and Optics", "Condensed Matter Physics" ] }
+{ "_id" : { "$oid" : "5a5515c788a035a45bd77a1b" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:24:07Z", "timestamp" : { "$numberLong" : "1508768647545" } }, "reference-count" : 10, "publisher" : "Wiley-Blackwell", "issue" : "3", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 4991, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Cell Biochem. Funct." ], "published-print" : { "date-parts" : [ [ 2002, 9 ] ] }, "DOI" : "10.1002/cbf.935", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 9, 11 ] ], "date-time" : "2002-09-11T02:05:28Z", "timestamp" : { "$numberLong" : "1031709928000" } }, "page" : "191-194", "source" : "Crossref", "is-referenced-by-count" : 6, "title" : [ "Cytokine detection in HIV-1/HHV-8 co-infected subjects" ], "prefix" : "10.1002", "volume" : "20", "author" : [ { "given" : "Agostino", "family" : "Pugliese", "affiliation" : [] }, { "given" : "Donato", "family" : "Torre", "affiliation" : [] }, { "given" : "Andrea", "family" : "Saini", "affiliation" : [] }, { "given" : "Gloria", "family" : "Pagliano", "affiliation" : [] }, { "given" : "Gloria", "family" : "Gallo", "affiliation" : [] }, { "given" : "Pietro Giorgio", "family" : "Pistono", "affiliation" : [] }, { "given" : "Gian Carlo", "family" : "Paggi", "affiliation" : [] } ], "member" : "311", "published-online" : { "date-parts" : [ [ 2002 ] ] }, "reference" : [ { "key" : "10.1002/cbf.935-BIB1", "author" : "Chang", "volume" : "266", "first-page" : "1865", "year" : "1994", "journal-title" : "Science", "DOI" : "10.1126/science.7997879", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/cbf.935-BIB2", "author" : "Drago", "volume" : "135", "first-page" : "71", "year" : "1999", "journal-title" : "Arch. Dermatol" }, { "key" : "10.1002/cbf.935-BIB3", "author" : "Pugliese", "volume" : "24", "first-page" : "258", "year" : "2000", "journal-title" : "Cancer Detect Prevent" }, { "key" : "10.1002/cbf.935-BIB4", "author" : "Clerici", "volume" : "15", "first-page" : "575", "year" : "1994", "journal-title" : "Immunol Today", "DOI" : "10.1016/0167-5699(94)90220-8", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/cbf.935-BIB5", "author" : "Zhang", "volume" : "185", "first-page" : "837", "year" : "1997", "journal-title" : "J Exp Med" }, { "key" : "10.1002/cbf.935-BIB6", "author" : "Vincenzi", "volume" : "62", "first-page" : "34", "year" : "1997", "journal-title" : "J Leuk Biol" }, { "key" : "10.1002/cbf.935-BIB7", "author" : "Stoll", "volume" : "28", "first-page" : "3231", "year" : "1998", "journal-title" : "Eur J Immunol", "DOI" : "10.1002/(SICI)1521-4141(199810)28:10<3231::AID-IMMU3231>3.0.CO;2-Q", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/cbf.935-BIB8", "author" : "Fiorelli", "volume" : "91", "first-page" : "956", "year" : "1998", "journal-title" : "Blood" }, { "key" : "10.1002/cbf.935-BIB9", "author" : "Boshoff", "volume" : "75", "first-page" : "57", "year" : "1998", "journal-title" : "Adv Cancer Res.", "DOI" : "10.1016/S0065-230X(08)60739-3", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/cbf.935-BIB10", "author" : "Fakoya", "volume" : "11", "first-page" : "1445", "year" : "1997", "journal-title" : "AIDS", "DOI" : "10.1097/00002030-199712000-00008", "doi-asserted-by" : "crossref" } ], "container-title" : [ "Cell Biochemistry and Function" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2Fcbf.935", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 7, 27 ] ], "date-time" : "2017-07-27T01:56:41Z", "timestamp" : { "$numberLong" : "1501120601000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2002 ] ] }, "references-count" : 10, "URL" : "http://dx.doi.org/10.1002/cbf.935", "relation" : { "cites" : [] }, "ISSN" : [ "0263-6484", "1099-0844" ], "issn-type" : [ { "value" : "0263-6484", "type" : "print" }, { "value" : "1099-0844", "type" : "electronic" } ], "subject" : [ "Clinical Biochemistry", "Cell Biology", "Biochemistry", "General Medicine" ] }
+{ "_id" : { "$oid" : "5a5515c788a035a45bd77a35" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:24:07Z", "timestamp" : { "$numberLong" : "1508768647674" } }, "reference-count" : 31, "publisher" : "Wiley-Blackwell", "issue" : "4", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 4946, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Angew. Chem. Int. Ed." ], "published-print" : { "date-parts" : [ [ 2002, 2, 15 ] ] }, "DOI" : "10.1002/1521-3773(20020215)41:4<583::aid-anie583>3.0.co;2-i", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T23:46:56Z", "timestamp" : { "$numberLong" : "1030319216000" } }, "page" : "583-585", "source" : "Crossref", "is-referenced-by-count" : 126, "title" : [ "Noninterpenetrating Square-Grid Coordination Polymers With Dimensions of 25×25 Å2 Prepared by UsingN,N′-Type Ligands: The First Chiral Square-Grid Coordination Polymer" ], "prefix" : "10.1002", "volume" : "41", "author" : [ { "given" : "Neil G.", "family" : "Pschirer", "affiliation" : [] }, { "given" : "Delia M.", "family" : "Ciurtin", "affiliation" : [] }, { "given" : "Mark D.", "family" : "Smith", "affiliation" : [] }, { "given" : "Uwe H. F.", "family" : "Bunz", "affiliation" : [] }, { "given" : "Hans-Conrad", "family" : "zur Loye", "affiliation" : [] } ], "member" : "311", "reference" : [ { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB1", "journal-title" : "Design of Solids From Molecular Building Blocks: Golden Opportunities for Solid State Chemistry" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB2.1", "author" : "Dong", "volume" : "112", "first-page" : "4441", "year" : "2000", "journal-title" : "Angew. Chem.", "DOI" : "10.1002/1521-3757(20001201)112:23<4441::AID-ANGE4441>3.0.CO;2-B", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB2.2", "volume" : "39", "first-page" : "4271", "year" : "2000", "journal-title" : "Angew. Chem. Int. Ed.", "DOI" : "10.1002/1521-3773(20001201)39:23<4271::AID-ANIE4271>3.0.CO;2-1", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB3", "author" : "Kang", "volume" : "120", "first-page" : "3650", "year" : "1998", "journal-title" : "J. Am. Chem. Soc.", "DOI" : "10.1021/ja973898+", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB4", "author" : "Kang", "volume" : "120", "first-page" : "7389", "year" : "1998", "journal-title" : "J. Am. Chem. Soc.", "DOI" : "10.1021/ja980927n", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB5.1", "author" : "Yünlü", "volume" : "97", "first-page" : "863", "year" : "1985", "journal-title" : "Angew. Chem.", "DOI" : "10.1002/ange.19850971016", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB5.2", "volume" : "24", "first-page" : "879", "year" : "1985", "journal-title" : "Angew. Chem. Int. Ed. Engl.", "DOI" : "10.1002/anie.198508791", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB6", "author" : "Chen", "volume" : "291", "first-page" : "1021", "year" : "2001", "journal-title" : "Science", "DOI" : "10.1126/science.1056598", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB7", "author" : "Dong", "volume" : "11", "first-page" : "1413", "year" : "1999", "journal-title" : "Chem. Mater.", "DOI" : "10.1021/cm990082u", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB8", "author" : "Dong", "volume" : "2", "first-page" : "861", "year" : "2000", "journal-title" : "Solid State Sci.", "DOI" : "10.1016/S1293-2558(00)01094-3", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB9A", "author" : "Biradha", "first-page" : "15", "year" : "2001", "journal-title" : "Chem. Commun.", "DOI" : "10.1039/b007014i", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB9B", "unstructured" : "P. J. Stang, B. Olenyuk, Acc. Chem. Res., 1977, 30, 502;", "DOI" : "10.1021/ar9602011", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB9C", "unstructured" : "P. J. Stang, D. H. Cao, S. Saito, A. M. Arif, J. Am. Chem. Soc. 1995, 117, 6273.", "DOI" : "10.1021/ja00128a015", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB10", "author" : "Dong", "volume" : "12", "first-page" : "1156", "year" : "2000", "journal-title" : "Chem. Mater.", "DOI" : "10.1021/cm9907965", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB11", "author" : "Zaworotko", "first-page" : "1", "year" : "2001", "journal-title" : "Chem. Commun.", "DOI" : "10.1039/b007127g", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB12.1", "author" : "Biradha", "volume" : "112", "first-page" : "4001", "year" : "2000", "journal-title" : "Angew. Chem.", "DOI" : "10.1002/1521-3757(20001103)112:21<4001::AID-ANGE4001>3.0.CO;2-5", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB12.2", "volume" : "39", "first-page" : "3843", "year" : "2000", "journal-title" : "Angew. Chem. Int. Ed.", "DOI" : "10.1002/1521-3773(20001103)39:21<3843::AID-ANIE3843>3.0.CO;2-#", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB13.1", "author" : "Batten", "volume" : "110", "first-page" : "1558", "year" : "1998", "journal-title" : "Angew. Chem.", "DOI" : "10.1002/(SICI)1521-3757(19980605)110:11<1558::AID-ANGE1558>3.0.CO;2-7", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB13.2", "volume" : "37", "first-page" : "1460", "year" : "1998", "journal-title" : "Angew. Chem. Int. Ed.", "DOI" : "10.1002/(SICI)1521-3773(19980619)37:11<1460::AID-ANIE1460>3.0.CO;2-Z", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB14", "unstructured" : "U. Bunz, H.-C. zur Loye, unpublished results, 2001." }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB17", "unstructured" : "A. M. C. T. PLATON, Utrecht University, Utrecht, The Netherlands, Spek, A.L. 1998." }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB18", "author" : "Ezuhara", "volume" : "121", "first-page" : "3279", "year" : "1999", "journal-title" : "J. Am. Chem. Soc.", "DOI" : "10.1021/ja9819918", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB19.1", "author" : "Biradha", "volume" : "111", "first-page" : "584", "year" : "1999", "journal-title" : "Angew. Chem.", "DOI" : "10.1002/(SICI)1521-3757(19990215)111:4<584::AID-ANGE584>3.0.CO;2-Z", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB19.2", "volume" : "38", "first-page" : "492", "year" : "1999", "journal-title" : "Angew. Chem. Int. Ed.", "DOI" : "10.1002/(SICI)1521-3773(19990215)38:4<492::AID-ANIE492>3.0.CO;2-#", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB20", "author" : "Chen", "first-page" : "4010", "year" : "2000", "journal-title" : "J. Chem. Soc. Dalton Trans.", "DOI" : "10.1039/b007004l", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB21", "author" : "Fragoso", "first-page" : "1547", "year" : "2000", "journal-title" : "Chem. Commun.", "DOI" : "10.1039/b002360o", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB22", "author" : "Carlucci", "first-page" : "1319", "year" : "2000", "journal-title" : "Chem. Commun.", "DOI" : "10.1039/b002021o", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB23", "author" : "Blake", "first-page" : "665", "year" : "2000", "journal-title" : "Chem. Commun.", "DOI" : "10.1039/a909868b", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB24", "author" : "Carlucci", "first-page" : "1837", "year" : "1998", "journal-title" : "Chem. Commun.", "DOI" : "10.1039/a803662d", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB25.1", "author" : "Mamula", "volume" : "111", "first-page" : "3129", "year" : "1999", "journal-title" : "Angew. Chem.", "DOI" : "10.1002/(SICI)1521-3757(19991004)111:19<3129::AID-ANGE3129>3.0.CO;2-X", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1521-3773(20020215)41:4<583::AID-ANIE583>3.0.CO;2-I-BIB25.2", "volume" : "38", "first-page" : "2945", "year" : "1999", "journal-title" : "Angew. Chem. Int. Ed.", "DOI" : "10.1002/(SICI)1521-3773(19991004)38:19<2945::AID-ANIE2945>3.0.CO;2-D", "doi-asserted-by" : "crossref" } ], "container-title" : [ "Angewandte Chemie International Edition" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2F1521-3773(20020215)41:4%3C583::AID-ANIE583%3E3.0.CO;2-I", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 7, 27 ] ], "date-time" : "2017-07-27T01:55:53Z", "timestamp" : { "$numberLong" : "1501120553000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2002, 2, 15 ] ] }, "references-count" : 31, "URL" : "http://dx.doi.org/10.1002/1521-3773(20020215)41:4<583::aid-anie583>3.0.co;2-i", "relation" : { "cites" : [] }, "ISSN" : [ "1433-7851", "1521-3773" ], "issn-type" : [ { "value" : "1433-7851", "type" : "print" }, { "value" : "1521-3773", "type" : "electronic" } ], "subject" : [ "General Chemistry", "Catalysis" ] }
+{ "_id" : { "$oid" : "5a5513f388a035a45bd63593" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:15:01Z", "timestamp" : { "$numberLong" : "1508768101957" } }, "reference-count" : 8, "publisher" : "Wiley-Blackwell", "issue" : "5", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 6332, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Med. Pediatr. Oncol." ], "published-print" : { "date-parts" : [ [ 1998, 5 ] ] }, "DOI" : "10.1002/(sici)1096-911x(199805)30:5<297::aid-mpo7>3.0.co;2-a", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T20:05:22Z", "timestamp" : { "$numberLong" : "1030305922000" } }, "page" : "297-300", "source" : "Crossref", "is-referenced-by-count" : 5, "title" : [ "Thallium-201 uptake in rebound thymic hyperplasia" ], "prefix" : "10.1002", "volume" : "30", "author" : [ { "given" : "Derek J.", "family" : "Roebuck", "affiliation" : [] }, { "given" : "Wayne D.", "family" : "Nicholls", "affiliation" : [] }, { "given" : "Elizabeth J.", "family" : "Bernard", "affiliation" : [] }, { "given" : "Stewart J.", "family" : "Kellie", "affiliation" : [] }, { "given" : "Robert", "family" : "Howman-Giles", "affiliation" : [] } ], "member" : "311", "reference" : [ { "key" : "10.1002/(SICI)1096-911X(199805)30:5<297::AID-MPO7>3.0.CO;2-A-BIB1", "author" : "Choyke", "volume" : "149", "first-page" : "269", "year" : "1987", "journal-title" : "AJR", "DOI" : "10.2214/ajr.149.2.269", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1096-911X(199805)30:5<297::AID-MPO7>3.0.CO;2-A-BIB2", "author" : "Cohen", "volume" : "135", "first-page" : "151", "year" : "1980", "journal-title" : "AJR", "DOI" : "10.2214/ajr.135.1.151", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1096-911X(199805)30:5<297::AID-MPO7>3.0.CO;2-A-BIB3", "author" : "Nadel", "volume" : "23", "first-page" : "243", "year" : "1993", "journal-title" : "Semin Nucl Med", "DOI" : "10.1016/S0001-2998(05)80105-9", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1096-911X(199805)30:5<297::AID-MPO7>3.0.CO;2-A-BIB4", "author" : "Howman-Giles", "volume" : "36", "first-page" : "1372", "year" : "1995", "journal-title" : "J Nucl Med" }, { "key" : "10.1002/(SICI)1096-911X(199805)30:5<297::AID-MPO7>3.0.CO;2-A-BIB5", "author" : "Harris", "volume" : "34", "first-page" : "1326", "year" : "1993", "journal-title" : "J Nucl Med" }, { "key" : "10.1002/(SICI)1096-911X(199805)30:5<297::AID-MPO7>3.0.CO;2-A-BIB6", "author" : "Fletcher", "volume" : "196", "first-page" : "851", "year" : "1995", "journal-title" : "Radiology", "DOI" : "10.1148/radiology.196.3.7644655", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1096-911X(199805)30:5<297::AID-MPO7>3.0.CO;2-A-BIB7", "author" : "Peylan-Ramu", "volume" : "7", "first-page" : "1800", "year" : "1989", "journal-title" : "J Clin Oncol", "DOI" : "10.1200/JCO.1989.7.12.1800", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/(SICI)1096-911X(199805)30:5<297::AID-MPO7>3.0.CO;2-A-BIB8", "author" : "Israel", "volume" : "34", "first-page" : "1330", "year" : "1993", "journal-title" : "J Nucl Med" } ], "container-title" : [ "Medical and Pediatric Oncology" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2F(SICI)1096-911X(199805)30:5%3C297::AID-MPO7%3E3.0.CO;2-A", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 7, 23 ] ], "date-time" : "2017-07-23T03:41:41Z", "timestamp" : { "$numberLong" : "1500781301000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 1998, 5 ] ] }, "references-count" : 8, "URL" : "http://dx.doi.org/10.1002/(sici)1096-911x(199805)30:5<297::aid-mpo7>3.0.co;2-a", "relation" : { "cites" : [] }, "ISSN" : [ "0098-1532", "1096-911X" ], "issn-type" : [ { "value" : "0098-1532", "type" : "print" }, { "value" : "1096-911X", "type" : "electronic" } ], "subject" : [ "Pediatrics, Perinatology, and Child Health", "Cancer Research", "Oncology" ] }
+{ "_id" : { "$oid" : "5a55181488a035a45bd8f11c" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:34:55Z", "timestamp" : { "$numberLong" : "1508769295121" } }, "reference-count" : 6, "publisher" : "Wiley-Blackwell", "issue" : "1", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 5722, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Int. J. Network Mgmt." ], "published-print" : { "date-parts" : [ [ 2000, 1 ] ] }, "DOI" : "10.1002/(sici)1099-1190(200001/02)10:1<51::aid-nem357>3.0.co;2-g", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T19:50:10Z", "timestamp" : { "$numberLong" : "1030305010000" } }, "page" : "51-55", "source" : "Crossref", "is-referenced-by-count" : 0, "title" : [ "Computer speech: streaming technology" ], "prefix" : "10.1002", "volume" : "10", "author" : [ { "given" : "Judith M.", "family" : "Myerson", "affiliation" : [] } ], "member" : "311", "reference" : [ { "key" : "10.1002/(SICI)1099-1190(200001/02)10:1<51::AID-NEM357>3.0.CO;2-G-BIB1", "author" : "Myerson", "year" : "1999", "unstructured" : "Text-to-Speech Server Overview. Clientbreak Server Technology Handbook. NY: CRC Press (Auerbach Publishers) 1999.", "volume-title" : "Clientbreak Server Technology Handbook" }, { "key" : "10.1002/(SICI)1099-1190(200001/02)10:1<51::AID-NEM357>3.0.CO;2-G-BIB2", "volume" : "5", "first-page" : "64", "journal-title" : "Building WebSites" }, { "key" : "10.1002/(SICI)1099-1190(200001/02)10:1<51::AID-NEM357>3.0.CO;2-G-BIB3", "author" : "Muller", "year" : "1998", "unstructured" : "Switched Token-Ring Networks. DataPro, 1998.", "volume-title" : "DataPro" }, { "key" : "10.1002/(SICI)1099-1190(200001/02)10:1<51::AID-NEM357>3.0.CO;2-G-BIB4", "author" : "Swoyer", "year" : "1998", "unstructured" : "Navy Warns of New Hacking Technique, ent, vol. 3, 1998.", "series-title" : "ent" }, { "key" : "10.1002/(SICI)1099-1190(200001/02)10:1<51::AID-NEM357>3.0.CO;2-G-BIB5", "author" : "Witherspoon", "year" : "1995", "unstructured" : "Optimizing Client/Server Networks. Chicago, Ill: IDG Books (Compaq Press), 1995.", "volume-title" : "Optimizing Client/Server Networks" }, { "key" : "10.1002/(SICI)1099-1190(200001/02)10:1<51::AID-NEM357>3.0.CO;2-G-BIB6", "author" : "Giles", "first-page" : "63", "unstructured" : "How to Add Audio and Video Files. Building WEBSITES, vol. 5, 63.", "series-title" : "Building WEBS" } ], "container-title" : [ "International Journal of Network Management" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2F(SICI)1099-1190(200001%2F02)10:1%3C51::AID-NEM357%3E3.0.CO;2-G", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 4, 4 ] ], "date-time" : "2017-04-04T12:43:11Z", "timestamp" : { "$numberLong" : "1491309791000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2000, 1 ] ] }, "references-count" : 6, "URL" : "http://dx.doi.org/10.1002/(sici)1099-1190(200001/02)10:1<51::aid-nem357>3.0.co;2-g", "relation" : { "cites" : [] }, "ISSN" : [ "1055-7148", "1099-1190" ], "issn-type" : [ { "value" : "1055-7148", "type" : "print" }, { "value" : "1099-1190", "type" : "electronic" } ], "subject" : [ "Computer Networks and Communications", "Computer Science Applications" ] }
+{ "_id" : { "$oid" : "5a55196d88a035a45bda113b" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:41:55Z", "timestamp" : { "$numberLong" : "1508769715884" } }, "reference-count" : 35, "publisher" : "Elsevier BV", "issue" : "4", "license" : [ { "URL" : "http://www.elsevier.com/tdm/userlicense/1.0/", "start" : { "date-parts" : [ [ 2001, 10, 1 ] ], "date-time" : "2001-10-01T00:00:00Z", "timestamp" : { "$numberLong" : "1001894400000" } }, "delay-in-days" : 0, "content-version" : "tdm" }, { "URL" : "http://creativecommons.org/licenses/by-nc-nd/4.0/", "start" : { "date-parts" : [ [ 2016, 10, 7 ] ], "date-time" : "2016-10-07T00:00:00Z", "timestamp" : { "$numberLong" : "1475798400000" } }, "delay-in-days" : 5485, "content-version" : "vor" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Molecular Therapy" ], "published-print" : { "date-parts" : [ [ 2001, 10 ] ] }, "DOI" : "10.1006/mthe.2001.0464", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 9, 18 ] ], "date-time" : "2002-09-18T19:46:30Z", "timestamp" : { "$numberLong" : "1032378390000" } }, "page" : "356-364", "source" : "Crossref", "is-referenced-by-count" : 31, "title" : [ "Human and Mouse IFN-β Gene Therapy Exhibits Different Anti-tumor Mechanisms in Mouse Models" ], "prefix" : "10.1016", "volume" : "4", "author" : [ { "given" : "Xiao-Qiang", "family" : "Qin", "affiliation" : [] }, { "given" : "Carla", "family" : "Beckham", "affiliation" : [] }, { "given" : "Jennifer L.", "family" : "Brown", "affiliation" : [] }, { "given" : "Matvey", "family" : "Lukashev", "affiliation" : [] }, { "given" : "James", "family" : "Barsoum", "affiliation" : [] } ], "member" : "78", "container-title" : [ "Molecular Therapy" ], "link" : [ { "URL" : "http://api.elsevier.com/content/article/PII:S1525001601904642?httpAccept=text/xml", "content-type" : "text/xml", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://api.elsevier.com/content/article/PII:S1525001601904642?httpAccept=text/plain", "content-type" : "text/plain", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 6, 14 ] ], "date-time" : "2017-06-14T16:51:52Z", "timestamp" : { "$numberLong" : "1497459112000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2001, 10 ] ] }, "references-count" : 35, "alternative-id" : [ "S1525001601904642" ], "URL" : "http://dx.doi.org/10.1006/mthe.2001.0464", "ISSN" : [ "1525-0016" ], "issn-type" : [ { "value" : "1525-0016", "type" : "print" } ], "subject" : [ "Molecular Medicine", "Genetics", "Molecular Biology", "Pharmacology", "Drug Discovery" ] }
+{ "_id" : { "$oid" : "5a55165c88a035a45bd7e535" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:27:04Z", "timestamp" : { "$numberLong" : "1508768824539" } }, "reference-count" : 6, "publisher" : "Wiley-Blackwell", "issue" : "24", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 5722, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Rapid Commun. Mass Spectrom." ], "published-print" : { "date-parts" : [ [ 2000, 12, 30 ] ] }, "DOI" : "10.1002/1097-0231(20001230)14:24<2357::aid-rcm168>3.0.co;2-2", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 9, 11 ] ], "date-time" : "2002-09-11T00:24:57Z", "timestamp" : { "$numberLong" : "1031703897000" } }, "page" : "2357-2361", "source" : "Crossref", "is-referenced-by-count" : 2, "title" : [ "Electron impact fragmentation mechanisms of some cyclic esters with helical structures" ], "prefix" : "10.1002", "volume" : "14", "author" : [ { "given" : "Jiangtao", "family" : "He", "affiliation" : [] }, { "given" : "Aihua", "family" : "Nie", "affiliation" : [] }, { "given" : "Meiyu", "family" : "He", "affiliation" : [] }, { "given" : "Xiaoran", "family" : "He", "affiliation" : [] }, { "given" : "Zhenpei", "family" : "Yu", "affiliation" : [] }, { "given" : "Xiulin", "family" : "Ye", "affiliation" : [] }, { "given" : "Qiyi", "family" : "Xing", "affiliation" : [] } ], "member" : "311", "published-online" : { "date-parts" : [ [ 2000 ] ] }, "reference" : [ { "key" : "10.1002/1097-0231(20001230)14:24<2357::AID-RCM168>3.0.CO;2-2-BIB1", "author" : "Nie", "volume" : "8", "first-page" : "141", "year" : "1997", "journal-title" : "Chinese Chem. Lett." }, { "key" : "10.1002/1097-0231(20001230)14:24<2357::AID-RCM168>3.0.CO;2-2-BIB2", "author" : "Chenevert", "first-page" : "782", "year" : "1982", "journal-title" : "Synthesis", "DOI" : "10.1055/s-1982-29945", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1097-0231(20001230)14:24<2357::AID-RCM168>3.0.CO;2-2-BIB3", "author" : "Porter", "first-page" : "296", "year" : "1985", "unstructured" : "Mass Spectrometry of Heterocyclic Compounds (2nd edn). John Wiley & Sons: New York, 1985; 296.", "volume-title" : "Mass Spectrometry of Heterocyclic Compounds" }, { "key" : "10.1002/1097-0231(20001230)14:24<2357::AID-RCM168>3.0.CO;2-2-BIB4", "author" : "Ková??ik", "volume" : "34", "first-page" : "1322", "year" : "1999", "journal-title" : "J. Mass Spectrom.", "DOI" : "10.1002/(SICI)1096-9888(199912)34:12<1322::AID-JMS888>3.0.CO;2-#", "doi-asserted-by" : "crossref" }, { "key" : "10.1002/1097-0231(20001230)14:24<2357::AID-RCM168>3.0.CO;2-2-BIB5", "author" : "Mclafferty", "year" : "1993", "unstructured" : "Interpretation of Mass Spectra (4th edn). University Science Books: Sansalito, CA, 1993.", "volume-title" : "Interpretation of Mass Spectra" }, { "key" : "10.1002/1097-0231(20001230)14:24<2357::AID-RCM168>3.0.CO;2-2-BIB6", "author" : "Williams", "year" : "1996", "unstructured" : "Spectroscopic Methods in Organic Chemistry (5th edn). McGraw-Hill: London, 1996.", "volume-title" : "Spectroscopic Methods in Organic Chemistry" } ], "container-title" : [ "Rapid Communications in Mass Spectrometry" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2F1097-0231(20001230)14:24%3C2357::AID-RCM168%3E3.0.CO;2-2", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 7, 31 ] ], "date-time" : "2017-07-31T21:16:20Z", "timestamp" : { "$numberLong" : "1501535780000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2000 ] ] }, "references-count" : 6, "URL" : "http://dx.doi.org/10.1002/1097-0231(20001230)14:24<2357::aid-rcm168>3.0.co;2-2", "relation" : { "cites" : [] }, "ISSN" : [ "0951-4198", "1097-0231" ], "issn-type" : [ { "value" : "0951-4198", "type" : "print" }, { "value" : "1097-0231", "type" : "electronic" } ], "subject" : [ "Organic Chemistry", "Analytical Chemistry", "Spectroscopy" ] }
+{ "_id" : { "$oid" : "5a55196988a035a45bda0cb1" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:41:48Z", "timestamp" : { "$numberLong" : "1508769708308" } }, "reference-count" : 44, "publisher" : "Elsevier BV", "issue" : "1", "license" : [ { "URL" : "http://www.elsevier.com/tdm/userlicense/1.0/", "start" : { "date-parts" : [ [ 1998, 11, 1 ] ], "date-time" : "1998-11-01T00:00:00Z", "timestamp" : { "$numberLong" : "909878400000" } }, "delay-in-days" : 0, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Toxicology and Applied Pharmacology" ], "published-print" : { "date-parts" : [ [ 1998, 11 ] ] }, "DOI" : "10.1006/taap.1998.8543", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 9, 18 ] ], "date-time" : "2002-09-18T22:01:25Z", "timestamp" : { "$numberLong" : "1032386485000" } }, "page" : "102-108", "source" : "Crossref", "is-referenced-by-count" : 44, "title" : [ "Role of CYP1A2 in the Hepatotoxicity of Acetaminophen: Investigations UsingCyp1a2Null Mice" ], "prefix" : "10.1006", "volume" : "153", "author" : [ { "given" : "Robert P.", "family" : "Tonge", "affiliation" : [] }, { "given" : "Edward J.", "family" : "Kelly", "affiliation" : [] }, { "given" : "Sam A.", "family" : "Bruschi", "affiliation" : [] }, { "given" : "Tom", "family" : "Kalhorn", "affiliation" : [] }, { "given" : "David L.", "family" : "Eaton", "affiliation" : [] }, { "given" : "Daniel W.", "family" : "Nebert", "affiliation" : [] }, { "given" : "Sidney D.", "family" : "Nelson", "affiliation" : [] } ], "member" : "78", "container-title" : [ "Toxicology and Applied Pharmacology" ], "link" : [ { "URL" : "http://api.elsevier.com/content/article/PII:S0041008X9898543X?httpAccept=text/xml", "content-type" : "text/xml", "content-version" : "vor", "intended-application" : "text-mining" }, { "URL" : "http://api.elsevier.com/content/article/PII:S0041008X9898543X?httpAccept=text/plain", "content-type" : "text/plain", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 6, 14 ] ], "date-time" : "2017-06-14T16:51:33Z", "timestamp" : { "$numberLong" : "1497459093000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 1998, 11 ] ] }, "references-count" : 44, "alternative-id" : [ "S0041008X9898543X" ], "URL" : "http://dx.doi.org/10.1006/taap.1998.8543", "ISSN" : [ "0041-008X" ], "issn-type" : [ { "value" : "0041-008X", "type" : "print" } ], "subject" : [ "Toxicology", "Pharmacology" ] }
+{ "_id" : { "$oid" : "5a55170088a035a45bd8490d" }, "indexed" : { "date-parts" : [ [ 2017, 10, 23 ] ], "date-time" : "2017-10-23T14:30:12Z", "timestamp" : { "$numberLong" : "1508769012416" } }, "reference-count" : 37, "publisher" : "Wiley-Blackwell", "issue" : "2", "license" : [ { "URL" : "http://doi.wiley.com/10.1002/tdm_license_1.1", "start" : { "date-parts" : [ [ 2015, 9, 1 ] ], "date-time" : "2015-09-01T00:00:00Z", "timestamp" : { "$numberLong" : "1441065600000" } }, "delay-in-days" : 5356, "content-version" : "tdm" } ], "content-domain" : { "domain" : [], "crossmark-restriction" : false }, "short-container-title" : [ "Am. J. Ind. Med." ], "published-print" : { "date-parts" : [ [ 2001, 2 ] ] }, "DOI" : "10.1002/1097-0274(200102)39:2<218::aid-ajim1009>3.0.co;2-4", "type" : "journal-article", "created" : { "date-parts" : [ [ 2002, 8, 25 ] ], "date-time" : "2002-08-25T20:41:50Z", "timestamp" : { "$numberLong" : "1030308110000" } }, "page" : "218-226", "source" : "Crossref", "is-referenced-by-count" : 10, "title" : [ "The work environment impact assessment: A methodologic framework for evaluating health-based interventions" ], "prefix" : "10.1002", "volume" : "39", "author" : [ { "given" : "Beth J.", "family" : "Rosenberg", "affiliation" : [] }, { "given" : "Elizabeth M.", "family" : "Barbeau", "affiliation" : [] }, { "given" : "Rafael", "family" : "Moure-Eraso", "affiliation" : [] }, { "given" : "Charles", "family" : "Levenstein", "affiliation" : [] } ], "member" : "311", "published-online" : { "date-parts" : [ [ 2001 ] ] }, "reference" : [ { "key" : "BIB1", "author" : "Barbeau", "year" : "1998", "unstructured" : "1998. Displaced tobacco workers, public health, and tobacco policy: moving beyond jobs versus health. Doctoral thesis, Department of Work Environment, University of Massachusetts, Lowell." }, { "key" : "BIB2", "author" : "Berberian", "volume" : "37", "first-page" : "126", "year" : "1987", "journal-title" : "J Occup Environ Med" }, { "key" : "BIB3", "author" : "Bignami", "volume" : "80", "first-page" : "265", "year" : "1981", "journal-title" : "Mutat Res", "DOI" : "10.1016/0027-5107(81)90099-3", "doi-asserted-by" : "crossref" }, { "key" : "BIB4", "author" : "Britton", "year" : "1989", "unstructured" : "1989. The post-Alar era dawns chilly for apple growers. Boston Globe. Oct. 25, p. 34." }, { "key" : "BIB5", "author" : "Brusick", "year" : "1976", "unstructured" : "1976. Mutagen and oncogen Study on 1,1-dimethylhydrazine. Prepared for the Aerospace Med. Res. Lab., Aeropsace Med. Div. Airforce Systems Command, Wright- Patterson A.F.B., Dayton OH Litton Bionetics, Inc., Kensington, MD. NTIS AD-A035475." }, { "key" : "BIB6", "author" : "Chemical Marketing Reporter", "year" : "1984", "unstructured" : "Chemical Marketing Reporter. 1984. Uniroyal pesticide to be reviewed by EPA: regulatory action prompted by its toxicity. July 23." }, { "key" : "BIB7", "author" : "Chemical Marketing Reporter", "year" : "1989", "unstructured" : "Chemical Marketing Reporter. 1989. Uniroyal pulls apple pesticide from market, citing controversy. June 5." }, { "key" : "BIB8", "year" : "1990", "unstructured" : "Du Pont Chemical Company. 1990. MSDS No. M0000057, p. 2." }, { "key" : "BIB9", "year" : "1993", "unstructured" : "Farm Chemicals Handbook '93. 1993. Willoughby, OH: Meister.", "volume-title" : "Farm Chemicals Handbook '93" }, { "key" : "BIB10", "year" : "1985", "unstructured" : "Farm Chemicals Handbook '85. 1985. Willoughby, OH: Meister.", "volume-title" : "Farm Chemicals Handbook '85" }, { "key" : "BIB11", "author" : "Federal Register", "year" : "1989", "unstructured" : "Federal Register. 1989. Daminozide: termination of special review of food uses. Vol. 54, No. 216, p. 47482, November 14." }, { "key" : "BIB12", "author" : "Fenske", "first-page" : "729", "year" : "2000", "unstructured" : "2000. Agricultural workers. In: editors. Occupational health: recognizing and preventing work-related disease and injury. 4th ed. Philadelphia: Lippincott Williams and Wilkins, p. 729-748.", "volume-title" : "Occupational health: recognizing and preventing work-related disease and injury" }, { "key" : "BIB13", "author" : "Gibson", "volume" : "5", "first-page" : "24", "year" : "1994", "journal-title" : "New Solutions", "DOI" : "10.2190/NS5.1.g", "doi-asserted-by" : "crossref" }, { "key" : "BIB14", "author" : "Goldenhar", "volume" : "29", "first-page" : "289", "year" : "1996", "journal-title" : "Am J Ind Med", "DOI" : "10.1002/(SICI)1097-0274(199604)29:4<289::AID-AJIM2>3.0.CO;2-K", "doi-asserted-by" : "crossref" }, { "key" : "BIB15", "author" : "Haun", "year" : "1984", "unstructured" : "1984. Inhalation studies of UDMH. Air Force Aerospace Medical Res Lab, TR-85-020." }, { "key" : "BIB16", "author" : "International Agency for Research on Cancer (IARC)", "year" : "1997", "unstructured" : "International Agency for Research on Cancer (IARC). 1997. Evaluation of carcinogen risks to humans: man-made mineral fibres and radon. Lyons, France." }, { "key" : "BIB17", "author" : "Lord", "year" : "1969", "unstructured" : "1969 (May-June). Thoughts on the apple harvest problem. Fruit Notes. U. S. Department of Agriculture, Massachusetts Extension Service." }, { "key" : "BIB18", "author" : "Manning", "first-page" : "34", "year" : "1989", "unstructured" : "Sales Agent for J. P. Sullivan and Co., of Ayer, MA, an apple commission house. In 1989. The post-Alar era dawns chilly for apple growers. Boston Globe Oct. 25 p. 34.", "volume-title" : "The post-Alar era dawns chilly for apple growers" }, { "key" : "BIB19", "author" : "National Cancer Institute", "year" : "1978", "unstructured" : "National Cancer Institute. 1978. Bioassay of daminozide for possible carcinogenicity. Washington, D.C., United State Department of Health, Education and Welfare, Public Health Service (NIC Carcinogenesis Technical Report Series No. 83; DHEW Publication No (NIH 78-1333)." }, { "key" : "BIB20", "author" : "Rogers", "volume" : "89", "first-page" : "321", "year" : "1981", "journal-title" : "Mutat Res", "DOI" : "10.1016/0165-1218(81)90113-0", "doi-asserted-by" : "crossref" }, { "key" : "BIB21", "author" : "Rosenberg", "year" : "1995", "unstructured" : "1995. The best laid bans: the impact of pesticide bans on workers. Doctoral thesis, Department of Work Environment, University of Massachusetts Lowell." }, { "key" : "BIB22", "author" : "Rosenberg", "volume" : "6", "first-page" : "34", "year" : "1996", "journal-title" : "New Solutions: A Journal of Environmental and Occupational Health Policy", "DOI" : "10.2190/NS6.2.d", "doi-asserted-by" : "crossref" }, { "key" : "BIB23", "author" : "Rosenberg", "volume" : "8", "first-page" : "365", "year" : "1998", "journal-title" : "New Solutions Environmental Health Policy", "DOI" : "10.2190/A2A1-CT1X-RY6D-RR3M", "doi-asserted-by" : "crossref" }, { "key" : "BIB24", "author" : "Saunders", "volume" : "29", "first-page" : "409", "year" : "1987", "journal-title" : "J Occup Environ Med" }, { "key" : "BIB25", "author" : "Toth", "volume" : "50", "first-page" : "181", "year" : "1973", "journal-title" : "J Natl Cancer Inst", "DOI" : "10.1093/jnci/50.1.181", "doi-asserted-by" : "crossref" }, { "key" : "BIB26", "author" : "Toth", "volume" : "40", "first-page" : "2427", "year" : "1977a", "journal-title" : "Cancer", "DOI" : "10.1002/1097-0142(197711)40:5+<2427::AID-CNCR2820400906>3.0.CO;2-Y", "doi-asserted-by" : "crossref" }, { "key" : "BIB27", "author" : "Toth", "volume" : "37", "first-page" : "3497", "year" : "1977b", "journal-title" : "Cancer Res" }, { "key" : "BIB28", "author" : "U.S. Environmental Protection Agency", "year" : "1986", "unstructured" : "U.S. Environmental Protection Agency. 1986. Integrated Risk Information System (IRIS). Oxamyl. December 9." }, { "key" : "BIB29", "author" : "U.S. Environmental Protection Agency", "year" : "1986", "unstructured" : "U.S. Environmental Protection Agency. 1986. Chemical Fact Sheet Number 26: Daminozide. Office of Pesticides and Toxic Substances, Washington, DC. 10-169." }, { "key" : "BIB30", "author" : "U.S. Environmental Protection Agency", "year" : "1989", "unstructured" : "U.S. Environmental Protection Agency, Office of Pesticide Programs, Office of Pesticides and Toxic Substances. 1989. Daminozide special review technical support document: Preliminary determination to cancel the food uses of Daminozide. Washington, DC: May." }, { "key" : "BIB31", "author" : "U.S. Environmental Protection Agency", "volume" : "54", "first-page" : "10", "year" : "1989", "journal-title" : "Fed Regist." }, { "key" : "BIB32", "author" : "U.S. Environmental Protection Agency", "year" : "1990", "unstructured" : "U.S. Environmental Protection Agency. 1990. Integrated Risk Information System (IRIS). Propargite. May 1." }, { "key" : "BIB33", "author" : "U.S. Environmental Protection Agency", "volume" : "57", "first-page" : "10", "year" : "1992", "journal-title" : "Fed. Regist." }, { "key" : "BIB34", "author" : "U.S. Environmental Protection Agency", "year" : "1993", "unstructured" : "U.S. Environmental Protection Agency, Office of Prevention, Pesticides and Toxic Substances. 1993. R.E.D. Facts, Document number EPA-738-F-93-007. September." }, { "key" : "BIB35", "author" : "U.S. Department of Agriculture", "year" : "1993", "journal-title" : "New England Agricultural Statistics" }, { "key" : "BIB36", "author" : "Warren", "year" : "1992", "unstructured" : "1992. Unanticipated consequences of banning a chemical: the case of Alar. Unpublished manuscript, Department of Work Environment, University of Massachusetts Lowell." }, { "key" : "BIB37", "author" : "Wood", "year" : "1990", "unstructured" : "1990. Memo to Poverty Lane, West Lebanon, New Hampshire, to members of the Risk Assessment/Risk Management Work Group, Keystone National Policy Dialogue on Food Safety, Oct. 26, 1990, cited in Rosenberg, B. 1996." } ], "container-title" : [ "American Journal of Industrial Medicine" ], "link" : [ { "URL" : "https://api.wiley.com/onlinelibrary/tdm/v1/articles/10.1002%2F1097-0274(200102)39:2%3C218::AID-AJIM1009%3E3.0.CO;2-4", "content-type" : "unspecified", "content-version" : "vor", "intended-application" : "text-mining" } ], "deposited" : { "date-parts" : [ [ 2017, 8, 4 ] ], "date-time" : "2017-08-04T20:22:16Z", "timestamp" : { "$numberLong" : "1501878136000" } }, "score" : 1, "issued" : { "date-parts" : [ [ 2001 ] ] }, "references-count" : 37, "URL" : "http://dx.doi.org/10.1002/1097-0274(200102)39:2<218::aid-ajim1009>3.0.co;2-4", "relation" : { "cites" : [] }, "ISSN" : [ "0271-3586", "1097-0274" ], "issn-type" : [ { "value" : "0271-3586", "type" : "print" }, { "value" : "1097-0274", "type" : "electronic" } ], "subject" : [ "Public Health, Environmental and Occupational Health" ] }
diff --git a/python/tests/fixtures.py b/python/tests/fixtures.py
new file mode 100644
index 00000000..d3d8c24b
--- /dev/null
+++ b/python/tests/fixtures.py
@@ -0,0 +1,169 @@
+
+import os
+import time
+import json
+import signal
+import pytest
+import fatcat
+import fatcat.sql
+from fatcat.models import *
+
+
+@pytest.fixture
+def full_app():
+ fatcat.app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
+ fatcat.app.testing = True
+ fatcat.app.debug = False
+ fatcat.db.session.remove()
+ fatcat.db.drop_all()
+ fatcat.db.create_all()
+ fatcat.sql.populate_db()
+ return fatcat.app
+
+@pytest.fixture
+def app(full_app):
+ return full_app.test_client()
+
+@pytest.fixture
+def rich_app(app):
+ enrichen_test_app(app)
+ return app
+
+
+@pytest.fixture(scope="function")
+def api_client(full_app):
+
+ pid = os.fork()
+ if pid == 0:
+ full_app.testing = False
+ full_app.run(host="localhost", port=8444, debug=False)
+ os._exit(0)
+
+ time.sleep(0.2)
+ yield fatcat.api_client.FatCatApiClient("http://localhost:8444")
+ os.kill(pid, signal.SIGKILL)
+
+
+## Helpers ##################################################################
+
+def enrichen_test_app(app):
+
+ rv = app.post('/v0/editgroup',
+ data=json.dumps(dict(
+ extra=dict(q=1, u="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ editgroup_id = obj['id']
+
+ rv = app.post('/v0/container',
+ data=json.dumps(dict(
+ name="schmournal",
+ publisher="society of authors",
+ issn="2222-3333",
+ editgroup=editgroup_id,
+ extra=dict(a=2, i="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ container_id = obj['id']
+
+ rv = app.post('/v0/creator',
+ data=json.dumps(dict(
+ name="anon y. mouse",
+ orcid="0000-0002-1825-0097",
+ editgroup=editgroup_id,
+ extra=dict(w=1, q="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ creator_id = obj['id']
+
+ rv = app.post('/v0/work',
+ data=json.dumps(dict(
+ title="dummy work",
+ work_type="book",
+ editgroup=editgroup_id,
+ extra=dict(a=3, b="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ work_id = obj['id']
+
+ # this stub work will be referenced
+ rv = app.post('/v0/release',
+ data=json.dumps(dict(
+ title="derivative work",
+ work_type="journal-article",
+ work=work_id,
+ creators=[creator_id],
+ doi="10.1234/58",
+ editgroup=editgroup_id,
+ refs=[
+ dict(stub="some other journal article"),
+ ],
+ extra=dict(f=7, b="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ stub_release_id = obj['id']
+
+ rv = app.post('/v0/release',
+ data=json.dumps(dict(
+ title="dummy work",
+ work_type="book",
+ work=work_id,
+ container=container_id,
+ creators=[creator_id],
+ doi="10.1234/5678",
+ editgroup=editgroup_id,
+ refs=[
+ dict(stub="some book", target=stub_release_id),
+ ],
+ extra=dict(f=7, b="loopy"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ release_id = obj['id']
+
+ rv = app.post('/v0/file',
+ data=json.dumps(dict(
+ sha1="deadbeefdeadbeef",
+ size=1234,
+ releases=[release_id],
+ editgroup=editgroup_id,
+ extra=dict(f=4, b="zing"))),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+ obj = json.loads(rv.data.decode('utf-8'))
+ file_id = obj['id']
+
+ rv = app.post('/v0/editgroup/{}/accept'.format(editgroup_id),
+ headers={"content-type": "application/json"})
+ assert rv.status_code == 200
+
+def check_entity_fields(e):
+ for key in ('rev', 'is_live', 'redirect_id'):
+ assert key in e
+ for key in ('id',):
+ assert e[key] is not None
+
+def check_release(e):
+ for key in ('work', 'release_type'):
+ assert key in e
+ for key in ('title', ):
+ assert e[key] is not None
+ for key in ('refs', 'creators'):
+ assert type(e[key]) == list
+
+def check_creator(e):
+ for key in ('name',):
+ assert e[key] is not None
+
+def check_container(e):
+ for key in ('name',):
+ assert e[key] is not None
+
+def check_file(e):
+ for key in ('size', 'sha1'):
+ assert e[key] is not None
diff --git a/python/tests/models.py b/python/tests/models.py
new file mode 100644
index 00000000..98bb6bc7
--- /dev/null
+++ b/python/tests/models.py
@@ -0,0 +1,87 @@
+
+import json
+import unittest
+import tempfile
+import pytest
+import fatcat
+import fatcat.sql
+from fatcat.models import *
+from fixtures import *
+
+
+def test_example_works(app):
+ fatcat.dummy.insert_example_works()
+
+def test_random_works(app):
+ fatcat.dummy.insert_random_works()
+
+def test_load_crossref(app):
+ with open('./tests/files/crossref-works.2018-01-21.badsample.json', 'r') as f:
+ raw = [json.loads(l) for l in f.readlines() if len(l) > 3]
+ for obj in raw:
+ fatcat.sql.add_crossref_via_model(obj)
+
+def test_schema_release_rev(app):
+ assert ReleaseRev.query.count() == 0
+ e = {
+ "title": "Bogus title",
+ "release_type": "book",
+ "creators": [],
+ "refs": [],
+ }
+ model = release_rev_schema.load(e)
+ fatcat.db.session.add(model.data)
+ fatcat.db.session.commit()
+ assert ReleaseRev.query.count() == 1
+ model_after = ReleaseRev.query.first()
+ serial = release_rev_schema.dump(model_after).data
+ #check_release(serial)
+ for k in e:
+ assert e[k] == serial[k]
+
+def test_schema_creator_rev(app):
+ assert ReleaseRev.query.count() == 0
+ e = {
+ "name": "Robin (Batman)",
+ }
+ model = creator_rev_schema.load(e)
+ fatcat.db.session.add(model.data)
+ fatcat.db.session.commit()
+ assert CreatorRev.query.count() == 1
+ model_after = CreatorRev.query.first()
+ serial = creator_rev_schema.dump(model_after).data
+ check_creator(serial)
+ for k in e.keys():
+ assert e[k] == serial[k]
+
+def test_schema_container_rev(app):
+ assert ReleaseRev.query.count() == 0
+ e = {
+ "name": "Papers Monthly",
+ }
+ model = container_rev_schema.load(e)
+ fatcat.db.session.add(model.data)
+ fatcat.db.session.commit()
+ assert ContainerRev.query.count() == 1
+ model_after = ContainerRev.query.first()
+ serial = container_rev_schema.dump(model_after).data
+ check_container(serial)
+ for k in e.keys():
+ assert e[k] == serial[k]
+
+def test_schema_file_rev(app):
+ assert ReleaseRev.query.count() == 0
+ e = {
+ "sha1": "asdf",
+ "size": 6,
+ }
+ model = file_rev_schema.load(e)
+ print(model)
+ fatcat.db.session.add(model.data)
+ fatcat.db.session.commit()
+ assert FileRev.query.count() == 1
+ model_after = FileRev.query.first()
+ serial = file_rev_schema.dump(model_after).data
+ check_file(serial)
+ for k in e.keys():
+ assert e[k] == serial[k]
diff --git a/python/tests/routes.py b/python/tests/routes.py
new file mode 100644
index 00000000..79d97fe4
--- /dev/null
+++ b/python/tests/routes.py
@@ -0,0 +1,67 @@
+
+import json
+import tempfile
+import pytest
+import fatcat
+import fatcat.sql
+from fatcat.models import *
+from fixtures import *
+
+
+def test_static_routes(rich_app):
+ app = rich_app
+
+ for route in ('/health', '/robots.txt', '/', '/about'):
+ rv = app.get(route)
+ assert rv.status_code == 200
+
+ assert app.get("/static/bogus/route").status_code == 404
+
+
+def test_all_views(rich_app):
+ app = rich_app
+
+ for route in ('work', 'release', 'creator', 'container', 'file'):
+ print(route)
+ rv = app.get('/{}/1'.format(route))
+ assert rv.status_code == 200
+
+ rv = app.get('/{}/999999999999'.format(route))
+ assert rv.status_code == 404
+
+ rv = app.get('/work/random')
+ rv = app.get(rv.location)
+ assert rv.status_code == 200
+
+ rv = app.get('/work/random')
+ assert rv.status_code == 302
+
+ rv = app.get('/work/create')
+ assert rv.status_code == 200
+
+ rv = app.get('/release/random')
+ assert rv.status_code == 302
+
+ rv = app.get('/release/1/changelog')
+ assert rv.status_code == 200
+
+ rv = app.get('/editgroup/1')
+ assert rv.status_code == 200
+
+ rv = app.get('/editgroup/99999999')
+ assert rv.status_code == 404
+
+ rv = app.get('/editgroup/current')
+ assert rv.status_code == 302
+
+ rv = app.get('/editor/admin')
+ assert rv.status_code == 200
+
+ rv = app.get('/editor/bizzaro')
+ assert rv.status_code == 404
+
+ rv = app.get('/editor/admin/changelog')
+ assert rv.status_code == 200
+
+ rv = app.get('/editor/bizarro/changelog')
+ assert rv.status_code == 404
diff --git a/python/tests/test_fixtures.py b/python/tests/test_fixtures.py
new file mode 100644
index 00000000..0a0d3176
--- /dev/null
+++ b/python/tests/test_fixtures.py
@@ -0,0 +1,29 @@
+
+import pytest
+import fatcat.api_client
+from fixtures import *
+
+
+def test_rich_app_fixture(rich_app):
+ app = rich_app
+
+ assert ChangelogEntry.query.count() == 1
+
+ for cls in (WorkIdent, WorkRev, WorkEdit,
+ ContainerIdent, ContainerRev, ContainerEdit,
+ CreatorIdent, CreatorRev, CreatorEdit,
+ FileIdent, FileRev, FileEdit):
+ assert cls.query.count() == 1
+ for cls in (ReleaseIdent, ReleaseRev, ReleaseEdit):
+ assert cls.query.count() == 2
+
+ for cls in (WorkIdent,
+ ContainerIdent,
+ CreatorIdent,
+ FileIdent):
+ assert cls.query.filter(cls.is_live==True).count() == 1
+ assert ReleaseIdent.query.filter(ReleaseIdent.is_live==True).count() == 2
+
+ # test that editor's active edit group is now invalid
+ editor = Editor.query.first()
+ assert editor.active_editgroup == None