summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--proposals/20190911_v04_schema_tweaks.md6
-rwxr-xr-xpython/fatcat_import.py4
-rw-r--r--python/fatcat_tools/normal.py7
-rw-r--r--python/tests/files/datacite/datacite_doc_23.json2
-rw-r--r--rust/src/entity_crud.rs5
5 files changed, 18 insertions, 6 deletions
diff --git a/proposals/20190911_v04_schema_tweaks.md b/proposals/20190911_v04_schema_tweaks.md
index eaf39474..0e789ad1 100644
--- a/proposals/20190911_v04_schema_tweaks.md
+++ b/proposals/20190911_v04_schema_tweaks.md
@@ -41,5 +41,7 @@ Elasticsearch schema:
- releases *may* need an "_all" field (or `biblio`?) containing most fields to
make some search experiences work
- releases should include volume, issue, pages
-- releases *could* include reference and creator lists, as a faster/cheaper
- mechanism for doing reverse lookups
+- releases *could* include reference and creator fatcat identifier lists, as a
+ faster/cheaper mechanism for doing reverse lookups
+- doi_prefix
+- doi_registrar (?)
diff --git a/python/fatcat_import.py b/python/fatcat_import.py
index 5b97d3a8..656b9a05 100755
--- a/python/fatcat_import.py
+++ b/python/fatcat_import.py
@@ -42,6 +42,7 @@ def run_pubmed(args):
pi = PubmedImporter(args.api,
args.issn_map_file,
edit_batch_size=args.batch_size,
+ do_updates=args.do_updates,
lookup_refs=(not args.no_lookup_refs))
if args.kafka_mode:
raise NotImplementedError
@@ -276,6 +277,9 @@ def main():
sub_pubmed.add_argument('--no-lookup-refs',
action='store_true',
help="skip lookup of references (PMID or DOI)")
+ sub_pubmed.add_argument('--do-updates',
+ action='store_true',
+ help="update pre-existing release entities")
sub_pubmed.add_argument('--kafka-mode',
action='store_true',
help="consume from kafka topic (not stdin)")
diff --git a/python/fatcat_tools/normal.py b/python/fatcat_tools/normal.py
index a77c5eb0..7b4bd19c 100644
--- a/python/fatcat_tools/normal.py
+++ b/python/fatcat_tools/normal.py
@@ -22,7 +22,10 @@ def clean_doi(raw):
if not raw:
return None
raw = raw.strip()
- raw = raw.replace('\u2013', '-') # emdash
+ if '\u2013' in raw:
+ # Do not attempt to normalize "en dash" and since FC does not allow
+ # unicode in DOI, treat this as invalid.
+ return None
if len(raw.split()) != 1:
return None
if raw.startswith("doi:"):
@@ -48,7 +51,7 @@ def test_clean_doi():
assert clean_doi("10.1037//0002-9432.72.1.50") == "10.1037/0002-9432.72.1.50"
assert clean_doi("10.1037/0002-9432.72.1.50") == "10.1037/0002-9432.72.1.50"
assert clean_doi("10.23750/abm.v88i2 -s.6506") == None
- assert clean_doi("10.17167/mksz.2017.2.129–155") == "10.17167/mksz.2017.2.129-155"
+ assert clean_doi("10.17167/mksz.2017.2.129–155") == None
assert clean_doi("http://doi.org/10.1234/asdf ") == "10.1234/asdf"
assert clean_doi("https://dx.doi.org/10.1234/asdf ") == "10.1234/asdf"
assert clean_doi("doi:10.1234/asdf ") == "10.1234/asdf"
diff --git a/python/tests/files/datacite/datacite_doc_23.json b/python/tests/files/datacite/datacite_doc_23.json
index b755f1a5..8eaa8c21 100644
--- a/python/tests/files/datacite/datacite_doc_23.json
+++ b/python/tests/files/datacite/datacite_doc_23.json
@@ -23,7 +23,7 @@
"dateType": "Issued"
}
],
- "doi": "10.7916/d86x0cg1–xxx",
+ "doi": "10.7916/d86x0cg1-xxx",
"isActive": true,
"language": "GERMAN",
"publicationYear": 2017,
diff --git a/rust/src/entity_crud.rs b/rust/src/entity_crud.rs
index c0c9e30b..83dd26c9 100644
--- a/rust/src/entity_crud.rs
+++ b/rust/src/entity_crud.rs
@@ -2395,7 +2395,8 @@ impl EntityCrud for ReleaseEntity {
.execute(conn)?;
}
- // limit is much smaller for abstracts, so don't need to batch
+ // abstracts-per-release limit is much smaller for abstracts (won't ever hit 65k row
+ // limit), so don't need to chunk these inserts
if !abstract_rows.is_empty() {
// Sort of an "upsert"; only inserts new abstract rows if they don't already exist
insert_into(abstracts::table)
@@ -2403,6 +2404,8 @@ impl EntityCrud for ReleaseEntity {
.on_conflict(abstracts::sha1)
.do_nothing()
.execute(conn)?;
+ }
+ if !release_abstract_rows.is_empty() {
insert_into(release_rev_abstract::table)
.values(release_abstract_rows)
.execute(conn)?;