aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2018-03-21 18:46:23 -0700
committerBryan Newbold <bnewbold@robocracy.org>2018-03-21 18:46:23 -0700
commitd66fed95534d7a443e2cf84e9290d0c9bdf72311 (patch)
tree8e4144cd5ff6b4668661468871c8049697d6a05b
parente425bdfc377a64613a1cb9995b40be7008440ca2 (diff)
downloadfatcat-d66fed95534d7a443e2cf84e9290d0c9bdf72311.tar.gz
fatcat-d66fed95534d7a443e2cf84e9290d0c9bdf72311.zip
WIP
-rw-r--r--README.md4
-rwxr-xr-xbackend/backend.py130
-rw-r--r--plan.txt14
3 files changed, 144 insertions, 4 deletions
diff --git a/README.md b/README.md
index ea03c0a5..184b6f26 100644
--- a/README.md
+++ b/README.md
@@ -17,3 +17,7 @@ Use `pipenv` (which you can install with `pip`).
pipenv shell
python3 fatcat/api.py
+
+Run tests:
+
+ pipenv run nosetests3 backend/ webface/
diff --git a/backend/backend.py b/backend/backend.py
index 8f9322ab..a39ae790 100755
--- a/backend/backend.py
+++ b/backend/backend.py
@@ -5,7 +5,16 @@ from flask import Flask, render_template, send_from_directory, request, \
from sqlalchemy import create_engine, MetaData, Table
app = Flask(__name__)
-app.config.from_pyfile('config.py')
+app.config.from_object(__name__)
+
+# Load default config and override config from an environment variable
+app.config.update(dict(
+ DATABASE_URI='sqlite://:memory:',
+ SECRET_KEY='development-key',
+ USERNAME='admin',
+ PASSWORD='admin'
+))
+app.config.from_envvar('FATCAT_BACKEND_CONFIG', silent=True)
metadata = MetaData()
@@ -54,7 +63,7 @@ release_revision = Table('release_revision', metadata,
Column('edit_id', ForeignKey('edit.id')),
Column('extra_json', ForeignKey('extra_json.sha1'), nullable=True),
- Column('work', ForeignKey('work_id.id')),
+ #Column('work', ForeignKey('work_id.id')),
Column('container', ForeignKey('container_id.id')),
Column('title', String),
Column('license', String), # TODO: oa status foreign key
@@ -114,6 +123,30 @@ container_revision = Table('container_revision', metadata,
Column('issn', String), # TODO: identifier table
)
+file_id = Table('file_id', metadata,
+ Column('id', Integer, primary_key=True, autoincrement=False),
+ Column('revision', ForeignKey('container_revision.id')),
+ )
+
+file_revision = Table('file_revision', metadata,
+ Column('id', Integer, primary_key=True, autoincrement=True),
+ Column('previous', ForeignKey('file_revision.id'), nullable=True),
+ Column('state', Enum(IdState)),
+ Column('redirect_id', ForeignKey('file_id.id'), nullable=True),
+ Column('edit_id', ForeignKey('edit.id')),
+ Column('extra_json', ForeignKey('extra_json.sha1'), nullable=True),
+
+ Column('size', Integer),
+ Column('sha1', Integer), # TODO: hash table... only or in addition?
+ Column('url', Integer), # TODO: URL table
+ )
+
+release_file= Table('release_file', metadata,
+ Column('id', Integer, primary_key=True, autoincrement=True),
+ Column('release_rev', ForeignKey('release_revision.id'), nullable=False),
+ Column('file_id', ForeignKey('file_id.id'), nullable=False),
+ )
+
edit = Table('edit', metadata,
Column('id', Integer, primary_key=True, autoincrement=True),
Column('edit_group', ForeignKey('edit_group.id')),
@@ -143,6 +176,82 @@ extra_json = Table('extra_json', metadata,
Column('json', String),
)
+## Helpers ##################################################################
+
+def is_fcid(s):
+ return len(s) == 26 and s.isalnum()
+
+# XXX: why isn't this running?
+def test_is_fcid():
+
+ for s in ("rzga5b9cd7efgh04iljk", "RZGA5B9CD7Efgh04iljk"):
+ assert is_fcid() is True
+
+ for s in ("rzga59cd7efgh04iljk", "rzga.b9cd7efgh04iljk", "",
+ "rzga5b9cd7efgh04iljkz"):
+ assert is_fcid() is False
+
+def release_list(id_list):
+ # XXX: MOCK
+ l = []
+ for i in id_list:
+ l.append({
+ "id": i,
+ "rev": "8fkj28fjhqkjdhkjkj9s",
+ "previous": "0021jdfjhqkjdhkjkj9s",
+ "state": "normal",
+ "redirect_id": None,
+ "edit_id": "932582iuhckjvssk",
+ "extra_json": None,
+
+ "container_id": "0021jdfjhqkjdhkjkj9s",
+ "title": "Mocks are great",
+ "license": "CC-0",
+ "release_type": "publication",
+ "date": "2017-11-22",
+ "doi": "10.1000/953kj.sdfkj",
+ })
+ return l
+
+def release_hydrate(release_id):
+ e = release_list([release_id])[0]
+ e['container'] = container_hydrate(d['container_id'])
+ e.pop('container_id')
+ e['creators'] = [creator_hydrate(c['id']) for c in e['creator_ids']]
+ return e
+
+def work_list(id_list):
+ """This is the fast/light version: populates entity-specific lists (eg,
+ identifiers), and any primaries, but doesn't transclude all other
+ entities"""
+ if len(id_list) == 0:
+ return []
+
+ l = []
+ for i in id_list:
+ l.append({
+ "id": "rzga5b9cd7efgh04iljk",
+ "rev": "8fkj28fjhqkjdhkjkj9s",
+ "previous": "0021jdfjhqkjdhkjkj9s",
+ "state": "normal",
+ "redirect_id": None,
+ "edit_id": "932582iuhckjvssk",
+ "extra_json": None,
+
+ "title": "Mocks are great",
+ "contributors": [],
+ "work_type": "journal-article",
+ "date": None,
+
+ "primary_release": release_list(["8fkj28fjhqkjdhkjkj9s"])[0],
+ })
+ return l
+
+def work_hydrate(work_id):
+ """This is the heavy/slowversion: everything from get_works(), but also
+ recursively transcludes single-linked entities"""
+ # XXX:
+ return work_list([work_id])[0]
## API Methods ##############################################################
@@ -150,6 +259,15 @@ extra_json = Table('extra_json', metadata,
def health():
return jsonify({'ok': True})
+
+@app.route('/v0/work/<work_id>', methods=['GET'])
+def work_get(work_id):
+ if not is_fcid(work_id):
+ print("not fcid: {}".format(work_id))
+ return abort(404)
+ work = work_hydrate(work_id)
+ return jsonify(work)
+
## Entry Point ##############################################################
def main():
@@ -165,13 +283,17 @@ def main():
default=8040,
help="listen on this port")
parser.add_argument('--database-uri',
- default="sqlite:///test.sqlite",
+ default=app.config['DATABASE_URI'],
help="sqlalchemy database string")
args = parser.parse_args()
app.config['DATABASE_URI'] = args.database_uri
- engine = create_engine(app.config['DATABASE_URI'], convert_unicode=True)
+ app.conn = create_engine(app.config['DATABASE_URI'], convert_unicode=True)
metadata.create_all(bind=engine)
+
+ # XXX:
+ db_test_data()
+
app.run(debug=args.debug, host=args.host, port=args.port)
diff --git a/plan.txt b/plan.txt
index b7f05277..9e8d957b 100644
--- a/plan.txt
+++ b/plan.txt
@@ -1,6 +1,9 @@
+- backend test setup: generate temporary database, insert rows (?)
+
backend/api:
- first-rev schema
+- json_blob table (by sha1)
- create work, release, etc
- get by ID
@@ -22,3 +25,14 @@ webface:
files
people
containers
+
+#### Open Questions
+
+How to create multiple cross-referenced entities at the same time? Eg, work and
+release, with release referencing work. work_id isn't allocated/indicated until
+merge-time. As a work-around, could have a temporary work_rev_id column which
+gets overridden during merge.
+
+Mechanism for skipping edit group stage. Propose always having edit rows
+generated, containing appropriate metadata, but certain bots can skip creation
+of edit group.