diff options
-rw-r--r-- | python/tests/api_containers.py | 48 | ||||
-rw-r--r-- | python/tests/api_creators.py | 44 | ||||
-rw-r--r-- | python/tests/api_files.py | 52 | ||||
-rw-r--r-- | python/tests/api_filesets.py | 56 | ||||
-rw-r--r-- | python/tests/api_misc.py | 8 | ||||
-rw-r--r-- | python/tests/api_releases.py | 103 | ||||
-rw-r--r-- | python/tests/api_webcaptures.py | 59 |
7 files changed, 312 insertions, 58 deletions
diff --git a/python/tests/api_containers.py b/python/tests/api_containers.py new file mode 100644 index 00000000..674ae3b8 --- /dev/null +++ b/python/tests/api_containers.py @@ -0,0 +1,48 @@ + +import json +import pytest +from copy import copy + +from fatcat_client import * +from fatcat_client.rest import ApiException +from fixtures import * + + +def test_container(api): + eg = quick_eg(api) + + # all the fields! + c1 = ContainerEntity( + name="some container name", + container_type="journal", + publisher="some container publisher", + issnl="1234-567X", + wikidata_qid="Q954248", + extra=dict(a=1, b=2), + ) + + c1edit = api.create_container(c1, editgroup_id=eg.editgroup_id) + api.accept_editgroup(eg.editgroup_id) + c2 = api.get_container(c1edit.ident) + + # check that fields match + assert c1.name == c2.name + assert c1.container_type == c2.container_type + assert c1.publisher == c2.publisher + assert c1.issnl == c2.issnl + assert c1.wikidata_qid == c2.wikidata_qid + assert c1.extra == c2.extra + + # expansion + # TODO: via release + # lookup + # TODO: via issnl; but need to generate random identifiers + +def test_container_examples(api): + + api.lookup_container(issnl='1549-1277') + + c1 = api.get_container('aaaaaaaaaaaaaeiraaaaaaaaam') + assert c1.name == "PLOS Medicine" + assert c1.issnl == "1549-1277" + diff --git a/python/tests/api_creators.py b/python/tests/api_creators.py new file mode 100644 index 00000000..7443675b --- /dev/null +++ b/python/tests/api_creators.py @@ -0,0 +1,44 @@ + +import json +import pytest +from copy import copy + +from fatcat_client import * +from fatcat_client.rest import ApiException +from fixtures import * + + +def test_creators(api): + eg = quick_eg(api) + + # all the fields! + c1 = CreatorEntity( + display_name="Emma Smith", + given_name="emma", + surname="smith", + orcid="0000-0002-1825-0097", + wikidata_qid="Q9542248", + extra=dict(a=1, b=5), + ) + + c1edit = api.create_creator(c1, editgroup_id=eg.editgroup_id) + api.accept_editgroup(eg.editgroup_id) + c2 = api.get_creator(c1edit.ident) + + # check that fields match + assert c1.display_name == c2.display_name + assert c1.given_name == c2.given_name + assert c1.surname == c2.surname + assert c1.orcid == c2.orcid + assert c1.wikidata_qid == c2.wikidata_qid + assert c1.extra == c2.extra + + # expansion + # TODO: via release + # lookup + # TODO: via issnl; but need to generate random identifiers + +def test_creators_examples(api): + # TODO: aaaaaaaaaaaaaircaaaaaaaaam + + api.lookup_creator(orcid='0000-0003-3118-6859') diff --git a/python/tests/api_files.py b/python/tests/api_files.py new file mode 100644 index 00000000..033538ef --- /dev/null +++ b/python/tests/api_files.py @@ -0,0 +1,52 @@ + +import json +import pytest +from copy import copy + +from fatcat_client import * +from fatcat_client.rest import ApiException +from fixtures import * + + +def test_file(api): + + eg = quick_eg(api) + + # all the fields! + f1 = FileEntity( + size=89238, + md5="7ce6615b2a5904939576d9567bd5f68e", + sha1="027e7ed3ea1a40e92dd2657a1e3c992b5dc45dd2", + sha256="f1f4f18a904e76818863ccbc6141fce92b0dcb47b0d6041aec98bc6806e393c3", + mimetype="application/pdf", + extra=dict(a=2, b=5), + urls=[ + FileEntityUrls(url="https://web.archive.org/web/12345542/something.com/blah.pdf", rel="webarchive"), + ], + release_ids=[], + ) + + f1edit = api.create_file(f1, editgroup_id=eg.editgroup_id) + api.accept_editgroup(eg.editgroup_id) + f2 = api.get_file(f1edit.ident) + + # check that fields match + assert f1.size == f2.size + assert f1.md5 == f2.md5 + assert f1.sha1 == f2.sha1 + assert f1.sha256 == f2.sha256 + assert f1.mimetype == f2.mimetype + assert f1.extra == f2.extra + assert f1.urls == f2.urls + assert f1.release_ids == f2.release_ids + + # expansion + # TODO: via release + # lookup + # TODO: via hashes; but need to generate random? + +def test_file_examples(api): + + api.lookup_file(sha256='ffc1005680cb620eec4c913437dfabbf311b535cfe16cbaeb2faec1f92afc362') + + f1 = api.get_file('aaaaaaaaaaaaamztaaaaaaaaam') diff --git a/python/tests/api_filesets.py b/python/tests/api_filesets.py index 58ee7b5c..966b85ca 100644 --- a/python/tests/api_filesets.py +++ b/python/tests/api_filesets.py @@ -14,30 +14,31 @@ def test_fileset(api): r1 = ReleaseEntity(title="test fileset release") r1edit = api.create_release(r1, editgroup_id=eg.editgroup_id) - fs1 = FilesetEntity() - fs1.manifest = [ - FilesetEntityManifest( - path="data/thing.tar.gz", - size=54321, - md5="540da3ea6e448d8dfb057c05225f853a", - sha1="1dab6a0e110f9b5d70b18db0abf051f7f93faf06", - sha256="c7b49f3e84cd1b7cb0b0e3e9f632b7be7e21b4dc229df23331f880a8a7dfa75a", - extra={"a": 1, "b": 3}, - ), - FilesetEntityManifest( - path="README.md", - size=54210, - md5="5f83592b5249671719bbed6ce91ecfa8", - sha1="455face3598611458efe1f072e58624790a67266", - sha256="429bcafa4d3d0072d5b2511e12c85c1aac1d304011d1c406da14707f7b9cd905", - extra={"x": 1, "y": "q"}, - ), - ] - fs1.urls = [ - FileEntityUrls(url="https://archive.org/download/fileset-123/", rel="repository"), - FileEntityUrls(url="https://humble-host.com/~user123/dataset/", rel="web"), - ] - fs1.release_ids = [r1edit.ident] + fs1 = FilesetEntity( + manifest = [ + FilesetEntityManifest( + path="data/thing.tar.gz", + size=54321, + md5="540da3ea6e448d8dfb057c05225f853a", + sha1="1dab6a0e110f9b5d70b18db0abf051f7f93faf06", + sha256="c7b49f3e84cd1b7cb0b0e3e9f632b7be7e21b4dc229df23331f880a8a7dfa75a", + extra={"a": 1, "b": 3}, + ), + FilesetEntityManifest( + path="README.md", + size=54210, + md5="5f83592b5249671719bbed6ce91ecfa8", + sha1="455face3598611458efe1f072e58624790a67266", + sha256="429bcafa4d3d0072d5b2511e12c85c1aac1d304011d1c406da14707f7b9cd905", + extra={"x": 1, "y": "q"}, + ), + ], + urls = [ + FileEntityUrls(url="https://archive.org/download/fileset-123/", rel="repository"), + FileEntityUrls(url="https://humble-host.com/~user123/dataset/", rel="web"), + ], + release_ids = [r1edit.ident], + ) fs1edit = api.create_fileset(fs1, editgroup_id=eg.editgroup_id) api.accept_editgroup(eg.editgroup_id) @@ -52,6 +53,13 @@ def test_fileset(api): r1 = api.get_release(r1edit.ident, expand="filesets") assert r1.filesets[0].manifest == fs1.manifest +def test_fileset_examples(api): + fs3 = api.get_fileset('aaaaaaaaaaaaaztgaaaaaaaaam') + + assert fs3.urls[0].url == 'http://other-personal-blog.name/dataset/' + assert fs3.urls[1].rel == 'archive' + assert fs3.manifest[1].md5 == 'f4de91152c7ab9fdc2a128f962faebff' + assert fs3.manifest[1].extra['mimetype'] == 'application/gzip' def test_bad_fileset(api): diff --git a/python/tests/api_misc.py b/python/tests/api_misc.py index 3510ea82..0a0f16da 100644 --- a/python/tests/api_misc.py +++ b/python/tests/api_misc.py @@ -8,14 +8,6 @@ from fatcat_client.rest import ApiException from fixtures import * -def test_lookups(api): - - api.lookup_creator(orcid='0000-0003-3118-6859') - api.lookup_container(issnl='1549-1277') - api.lookup_file(sha256='ffc1005680cb620eec4c913437dfabbf311b535cfe16cbaeb2faec1f92afc362') - api.lookup_release(pmid='54321') - api.lookup_release(isbn13='978-3-16-148410-0') - def test_lookup_hide_extend(api): r = api.lookup_release(doi='10.1371/journal.pmed.0020124') diff --git a/python/tests/api_releases.py b/python/tests/api_releases.py new file mode 100644 index 00000000..d5b31ad3 --- /dev/null +++ b/python/tests/api_releases.py @@ -0,0 +1,103 @@ + +import json +import pytest +import datetime +from copy import copy + +from fatcat_client import * +from fatcat_client.rest import ApiException +from fixtures import * + + +def test_release(api): + + eg = quick_eg(api) + + # all the fields! + r1 = ReleaseEntity( + title="some title", + original_title="оригинальное название", + release_type="post-weblog", + release_status="pre-print", + #release_date=datetime.datetime.utcnow(), + release_year=2015, + doi="10.5555/12345678", + pmid="12345", + pmcid="PMC4321", + wikidata_qid="Q1234", + isbn13="978-3-16-148410-0", + core_id="187348", + arxiv_id="aslkdjfh", + jstor_id="8328424", + volume="84", + issue="XII", + pages="4-99", + publisher="some publisher", + language="en", + license_slug="CC-0", + extra=dict(a=1, b=2), + contribs=[], + refs=[], + abstracts=[ + ReleaseEntityAbstracts( + content="this is some abstract", + mimetype="text/plain", + lang="en"), + ReleaseEntityAbstracts( + content="this is some other abstract", + mimetype="text/plain", + lang="de"), + ], + ) + + r1edit = api.create_release(r1, editgroup_id=eg.editgroup_id) + api.accept_editgroup(eg.editgroup_id) + r2 = api.get_release(r1edit.ident) + + # check that fields match + assert r1.title == r2.title + assert r1.original_title == r2.original_title + assert r1.release_type == r2.release_type + assert r1.release_date == r2.release_date + assert r1.release_year == r2.release_year + assert r1.doi == r2.doi + assert r1.pmid == r2.pmid + assert r1.pmcid == r2.pmcid + assert r1.wikidata_qid == r2.wikidata_qid + assert r1.isbn13 == r2.isbn13 + assert r1.core_id == r2.core_id + assert r1.arxiv_id == r2.arxiv_id + assert r1.jstor_id == r2.jstor_id + assert r1.volume == r2.volume + assert r1.issue == r2.issue + assert r1.pages == r2.pages + assert r1.publisher == r2.publisher + assert r1.language == r2.language + assert r1.license_slug == r2.license_slug + assert r1.extra == r2.extra + + for i in range(len(r1.abstracts)): + r1.abstracts[i].content == r2.abstracts[i].content + r1.abstracts[i].mimetype == r2.abstracts[i].mimetype + r1.abstracts[i].lang == r2.abstracts[i].lang + for i in range(len(r1.contribs)): + r1.contribs[i] == r2.contribs[i] + for i in range(len(r1.refs)): + r1.refs[i] == r2.refs[i] + + # expansion + # TODO: via work + # lookup + # TODO: via all; but need to generate random identifiers + +def test_release_examples(api): + + api.lookup_release(pmid='54321') + api.lookup_release(isbn13='978-3-16-148410-0') + + r1 = api.get_release('aaaaaaaaaaaaarceaaaaaaaaai') + assert r1.title == "bigger example" + assert len(r1.refs) == 5 + assert r1.contribs[0].role == "editor" + assert r1.abstracts[0].mimetype == "application/xml+jats" + diff --git a/python/tests/api_webcaptures.py b/python/tests/api_webcaptures.py index 6af32bc8..dc1754b3 100644 --- a/python/tests/api_webcaptures.py +++ b/python/tests/api_webcaptures.py @@ -19,33 +19,33 @@ def test_webcapture(api): original_url = "http://example.site", #timestamp = "2012-01-02T03:04:05Z", timestamp = datetime.datetime.now(datetime.timezone.utc), + cdx = [ + WebcaptureEntityCdx( + surt="site,example,)/data/thing.tar.gz", + #timestamp="2012-01-02T03:04:05Z", + timestamp=datetime.datetime.now(datetime.timezone.utc), + url="http://example.site/data/thing.tar.gz", + mimetype="application/gzip", + status_code=200, + sha1="455face3598611458efe1f072e58624790a67266", + sha256="c7b49f3e84cd1b7cb0b0e3e9f632b7be7e21b4dc229df23331f880a8a7dfa75a", + ), + WebcaptureEntityCdx( + surt="site,example,)/README.md", + #timestamp="2012-01-02T03:04:05Z", + timestamp=datetime.datetime.now(datetime.timezone.utc), + url="http://example.site/README.md", + mimetype="text/markdown", + status_code=200, + sha1="455face3598611458efe1f072e58624790a67266", + sha256="429bcafa4d3d0072d5b2511e12c85c1aac1d304011d1c406da14707f7b9cd905", + ), + ], + archive_urls = [ + FileEntityUrls(rel="wayback", url="https://web.archive.org/web/"), + ], + release_ids = [r1edit.ident], ) - wc1.cdx = [ - WebcaptureEntityCdx( - surt="site,example,)/data/thing.tar.gz", - #timestamp="2012-01-02T03:04:05Z", - timestamp=datetime.datetime.now(datetime.timezone.utc), - url="http://example.site/data/thing.tar.gz", - mimetype="application/gzip", - status_code=200, - sha1="455face3598611458efe1f072e58624790a67266", - sha256="c7b49f3e84cd1b7cb0b0e3e9f632b7be7e21b4dc229df23331f880a8a7dfa75a", - ), - WebcaptureEntityCdx( - surt="site,example,)/README.md", - #timestamp="2012-01-02T03:04:05Z", - timestamp=datetime.datetime.now(datetime.timezone.utc), - url="http://example.site/README.md", - mimetype="text/markdown", - status_code=200, - sha1="455face3598611458efe1f072e58624790a67266", - sha256="429bcafa4d3d0072d5b2511e12c85c1aac1d304011d1c406da14707f7b9cd905", - ), - ] - wc1.archive_urls = [ - FileEntityUrls(rel="wayback", url="https://web.archive.org/web/"), - ] - wc1.release_ids = [r1edit.ident] wc1edit = api.create_webcapture(wc1, editgroup_id=eg.editgroup_id) api.accept_editgroup(eg.editgroup_id) @@ -68,6 +68,13 @@ def test_webcapture(api): print(r1) assert r1.webcaptures[0].cdx == wc1.cdx +def test_webcapture_examples(api): + wc3 = api.get_webcapture('aaaaaaaaaaaaa53xaaaaaaaaam') + + assert wc3.cdx[0].surt == 'org,asheesh)/' + assert wc3.cdx[1].sha1 == 'a637f1d27d9bcb237310ed29f19c07e1c8cf0aa5' + assert wc3.archive_urls[1].rel == 'warc' + def test_bad_webcapture(api): |