1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
|
import sys
import json
import itertools
import fatcat_client
from .common import FatcatImporter
def or_none(s):
if s is None:
return None
if len(s) == 0:
return None
return s
def truthy(s):
if s is None:
return None
s = s.lower()
if s in ('true', 't', 'yes', 'y', '1'):
return True
elif s in ('false', 'f', 'no', 'n', '0'):
return False
else:
return None
class IssnImporter(FatcatImporter):
"""
Imports journal metadata ("containers") by ISSN, currently from a custom
(data munged) .csv file format
CSV format (generated from git.archive.org/webgroup/oa-journal-analysis):
ISSN-L,in_doaj,in_road,in_norwegian,in_crossref,title,publisher,url,lang,ISSN-print,ISSN-electronic,doi_count,has_doi,is_oa,is_kept,publisher_size,url_live,url_live_status,url_live_final_status,url_live_final_url,url_live_status_simple,url_live_final_status_simple,url_domain,gwb_pdf_count
"""
def parse_issn_row(self, row):
"""
row is a python dict (parsed from CSV).
returns a ContainerEntity (or None if invalid or couldn't parse)
"""
title = or_none(row['title'])
issnl = or_none(row['ISSN-L'])
if title is None or issnl is None:
return None
extra = dict(
in_doaj=truthy(row['in_doaj']),
in_road=truthy(row['in_road']),
in_norwegian=truthy(row['in_norwegian']),
language=or_none(row['lang']),
url=or_none(row['url']),
ISSNp=or_none(row['ISSN-print']),
ISSNe=or_none(row['ISSN-electronic']),
is_oa=truthy(row['is_oa']),
is_kept=truthy(row['is_kept']),
)
ce = fatcat_client.ContainerEntity(
issnl=issnl,
name=title,
publisher=or_none(row['publisher']),
abbrev=None,
coden=None,
extra=extra)
return ce
def create_row(self, row, editgroup=None):
ce = self.parse_issn_row(row)
if ce is not None:
self.api.create_container(ce, editgroup=editgroup)
self.counts['insert'] += 1
def create_batch(self, batch, editgroup=None):
"""Reads and processes in batches (not API-call-per-line)"""
objects = [self.parse_issn_row(l)
for l in batch if (l is not None)]
objects = [o for o in objects if (o is not None)]
self.api.create_container_batch(objects, autoaccept="true", editgroup=editgroup)
self.counts['insert'] += len(objects)
|