summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--bbb/__init__.py3
-rw-r--r--bbb/bbb.py259
-rw-r--r--bbb/constants.py165
-rw-r--r--bbb/exceptions.py76
4 files changed, 503 insertions, 0 deletions
diff --git a/bbb/__init__.py b/bbb/__init__.py
new file mode 100644
index 0000000..6fa3ac3
--- /dev/null
+++ b/bbb/__init__.py
@@ -0,0 +1,3 @@
+from .bbb import *
+from .exceptions import *
+
diff --git a/bbb/bbb.py b/bbb/bbb.py
new file mode 100644
index 0000000..d57327f
--- /dev/null
+++ b/bbb/bbb.py
@@ -0,0 +1,259 @@
+
+import requests
+import time
+import warnings
+import dateutil.parser
+
+from .util import *
+from .exceptions import *
+from .constants import *
+
+__version__ = (0, 0, 0)
+
+BOT_DEV_URL = "https://github.com/bnewbold/basebasebase"
+BOT_DEV_EMAIL = "bnewbold@robocracy.org"
+BOT_USER_AGENT = "basebasebase/%d.%d (%s; %s)" % (
+ __version__[0], __version__[1], BOT_DEV_URL, BOT_DEV_EMAIL)
+DEFAULT_LANG = "en"
+DEFAULT_SITE = "enwiki"
+
+
+class WikibaseServer:
+ '''
+ This class represents a Wikibase API endpoint. It isn't called a "Site"
+ because that term is ambiguous in this context.
+ '''
+
+ def __init__(self, api_url, lang=DEFAULT_LANG, auth=None, is_bot=False,
+ user_agent=BOT_USER_AGENT, maxlag=5, site=DEFAULT_SITE,
+ throttle_delay=0.):
+ self.api_url = str(api_url)
+ self.lang = DEFAULT_LANG
+
+ self.throttle_delay = throttle_delay
+ self.site = site
+
+ self.is_bot = is_bot
+ self.session = requests.Session()
+ assert(auth is None or len(auth) == 2)
+ self.session.auth = auth
+ self.session.headers.update({
+ 'User-Agent': user_agent,
+ 'Api-User-Agent': user_agent
+ })
+ if maxlag is not None:
+ self.session.params['maxlag'] = int(maxlag)
+ self.session.params['format'] = 'json'
+ self.session.params['uselang'] = 'user'
+
+ def __repr__(self):
+ return "<WikibaseServer at %s>" % self.api_url
+
+ def _check_api_err(self, action, resp):
+
+ if 'warnings' in resp:
+ for k in resp['warnings']:
+ warnings.warn(str((k, resp['warnings'][k])), Warning)
+
+ if 'error' in resp:
+ try:
+ raise WikibaseAPIError(resp['error']['code'],
+ resp['error']['info'], action)
+ except KeyError:
+ raise WikibaseException(resp['error'])
+
+ def _api_call(self, method, action, params):
+ params['action'] = action
+ if self.throttle_delay:
+ time.sleep(self.throttle_delay)
+ if method.upper() == "GET":
+ resp = self.session.get(self.api_url, params=params)
+ elif method.upper() == "POST":
+ resp = self.session.post(self.api_url, params=params)
+ else:
+ raise ValueError("method must be GET or POST")
+ resp_json = resp.json()
+ self._check_api_err(action, resp_json)
+ return resp_json
+
+ def _post(self, action, params):
+ return self._api_call("POST", action, params)
+
+ def _get(self, action, params=dict()):
+ return self._api_call("GET", action, params)
+
+ def check(self):
+ # Check that wikibase API calls work (instead of just "action=query")
+ self._get("wbparsevalue",
+ dict(datatype="time", values="1999-12-31|now"))
+
+ def login(self, user=None, passwd=None, is_bot=None, force_http=False):
+ if user is None or passwd is None:
+ raise WikibaseException("Need user and pass to attempt log-in")
+
+ if not force_http and not self.api_url.lower().startswith("https:"):
+ raise WikibaseException("Cowardly refusing to log in without https")
+
+ if is_bot is not None:
+ self.is_bot = bool(is_bot)
+ self.user = user
+ # not keeping password around; don't need it
+
+ # First partially log-in to get a token...
+ self.session.params.pop('assert', None)
+ resp = self._post("login", dict(lgname=self.user, lgpassword=passwd))
+ token = resp['login']['token']
+
+ # Then really log-in
+ resp = self._post("login", dict(lgname=self.user, lgpassword=passwd, lgtoken=token))
+ result = resp['login']['result']
+ if result != 'Success':
+ raise WikibaseAccountError(user, result)
+
+ if self.is_bot:
+ self.session.params['assert'] = 'bot'
+ else:
+ self.session.params['assert'] = 'user'
+
+ # Simple ping to check that we are actually logged in
+ self._get("query")
+
+ def logout(self):
+ self.user = None
+ self.session.params.pop('assert', None)
+ self._get("logout")
+
+ def _get_entities(self, query, expected, site=None, lang=None, redirects=True, as_titles=False):
+
+ """
+ NB: doesn't handle case of multiple sites, single title
+ """
+
+ params = {
+ 'sites': site or self.site,
+ 'languages': lang or self.lang,
+ 'redirects': redirects and "yes" or "no",
+ }
+ if as_titles:
+ params['titles'] = '|'.join(query)
+ else:
+ params['ids'] = '|'.join(query)
+
+ try:
+ resp = self._get("wbgetentities", params)
+ except WikibaseAPIError as wae:
+ if wae.code == "no-such-entity":
+ raise MissingEntityError(info=wae.info)
+ else:
+ raise wae
+
+ if not 'success' in resp:
+ raise WikibaseException("Expected 'success' in wbgetentities response")
+
+ entities = resp['entities'].values()
+
+ for e in entities:
+ if 'missing' in e or e['type'] != expected:
+ if 'title' in e:
+ raise MissingEntityError(title=e['title'])
+ elif 'id' in e:
+ raise MissingEntityError(id=e['id'])
+ else:
+ raise MissingEntityError()
+
+ return entities
+
+ def get_items(self, query, **kwargs):
+ as_titles = False
+ if type(query[0]) is int:
+ # Convert list of ints to QIDs
+ query = map(lambda x: "Q%d" % x, query)
+ elif not (type(query[0]) is str and query[0][0] in "Q" and query[0][1:].isdigit()):
+ # Must be list of titles
+ as_titles = True
+
+ try:
+ entities = self._get_entities(query, as_titles=as_titles, expected='item', **kwargs)
+ except MissingEntityError as wee:
+ # Case entity error to item error
+ raise MissingItemError(id=wee.id, title=wee.title)
+ items = [WikibaseItem.from_json(e) for e in entities]
+ return items
+
+ def get_item(self, query, **kwargs):
+ return self.get_items((query, ), **kwargs)
+
+
+class WikibaseEntity:
+ '''
+ Base class for WikibaseItem and WikibaseProperty
+ '''
+
+ def __init__(self, dbid=None, rev=None, rev_timestamp=None, labels=[],
+ descriptions=[], aliases=[], statements=[], sitelinks=[]):
+ self.dbid = dbid
+ self.rev = rev
+ self.rev_timestamp = rev_timestamp
+ self.labels = labels
+ self.descriptions = descriptions
+ self.aliases = aliases
+ self.statements = statements
+ self.sitelinks = sitelinks
+
+ @classmethod
+ def from_json(cls, j):
+ we = cls(
+ dbid=j['id'],
+ rev=j['lastrevid'],
+ rev_timestamp=dateutil.parser.parse(j['modified']),
+ aliases=j['aliases'],
+ sitelinks=j['sitelinks'],
+ descriptions=j['descriptions'],
+ )
+ claims = j['claims']
+ for c in claims:
+ we.statements.append(WikibaseStatement.from_json(c))
+ return we
+
+ def add_statement(self, statement):
+ raise NotImplementedError
+
+ def add_label(self, label):
+ raise NotImplementedError
+
+ def add_alias(self, label):
+ raise NotImplementedError
+
+class WikibaseItem(WikibaseEntity):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ def __repr__(self):
+ return "<WikibaseItem %s>" % self.qid
+
+class WikibaseProperty(WikibaseEntity):
+
+ def __init__(self,*args, **kwargs):
+ self.datatype = kwargs.pop('datatype')
+ super().__init__(*args, **kwargs)
+
+ def __repr__(self):
+ return "<WikibaseProperty %s>" % self.pid
+
+class WikibaseStatement:
+
+ def __init__(self, property=None, value=None, qualifiers=[], references=[],
+ rank='normal'):
+ self.property = property
+ self.value = value
+ self.qualifiers = qualifiers
+ self.references = references
+ self.rank = rank
+
+ def __repr__(self):
+ if self.property:
+ return "<WikibaseStatement %s is %s>" % (self.property.pid, self.value)
+ else:
+ return "<WikibaseStatement (empty)>"
+
diff --git a/bbb/constants.py b/bbb/constants.py
new file mode 100644
index 0000000..ac62f98
--- /dev/null
+++ b/bbb/constants.py
@@ -0,0 +1,165 @@
+
+datatypes = ['commonsMedia', 'globe-coordinate', 'monolingual-text',
+ 'quantity', 'string', 'time', 'url', 'timestamp', 'item', 'property']
+
+# time: time, timezone(minutes), before(int), after(int), precision(enum), calendarmodel(url)
+
+ranks = ['preferred', 'normal', 'deprecated']
+
+wmf_language_codes = ('aa', 'ab', 'ace', 'ady', 'ady-cyrl', 'aeb', 'aeb-arab',
+ 'aeb-latn', 'af', 'ak', 'aln', 'als', 'am', 'an', 'ang', 'anp', 'ar',
+ 'arc', 'arn', 'arq', 'ary', 'arz', 'as', 'ase', 'ast', 'av', 'avk', 'awa',
+ 'ay', 'az', 'azb', 'ba', 'bar', 'bat-smg', 'bbc', 'bbc-latn', 'bcc', 'bcl',
+ 'be', 'be-tarask', 'be-x-old', 'bg', 'bgn', 'bh', 'bho', 'bi', 'bjn', 'bm',
+ 'bn', 'bo', 'bpy', 'bqi', 'br', 'brh', 'bs', 'bto', 'bug', 'bxr', 'ca',
+ 'cbk-zam', 'cdo', 'ce', 'ceb', 'ch', 'cho', 'chr', 'chy', 'ckb', 'co',
+ 'cps', 'cr', 'crh', 'crh-cyrl', 'crh-latn', 'cs', 'csb', 'cu', 'cv', 'cy',
+ 'da', 'de', 'de-at', 'de-ch', 'de-formal', 'din', 'diq', 'dsb', 'dtp',
+ 'dty', 'dv', 'dz', 'ee', 'egl', 'el', 'eml', 'en', 'en-ca', 'en-gb', 'eo',
+ 'es', 'et', 'eu', 'ext', 'fa', 'ff', 'fi', 'fit', 'fiu-vro', 'fj', 'fo',
+ 'fr', 'frc', 'frp', 'frr', 'fur', 'fy', 'ga', 'gag', 'gan', 'gan-hans',
+ 'gan-hant', 'gd', 'gl', 'glk', 'gn', 'gom', 'gom-deva', 'gom-latn', 'got',
+ 'grc', 'gsw', 'gu', 'gv', 'ha', 'hak', 'haw', 'he', 'hi', 'hif',
+ 'hif-latn', 'hil', 'ho', 'hr', 'hrx', 'hsb', 'ht', 'hu', 'hy', 'hz', 'ia',
+ 'id', 'ie', 'ig', 'ii', 'ik', 'ike-cans', 'ike-latn', 'ilo', 'inh', 'io',
+ 'is', 'it', 'iu', 'ja', 'jam', 'jbo', 'jut', 'jv', 'ka', 'kaa', 'kab',
+ 'kbd', 'kbd-cyrl', 'kg', 'khw', 'ki', 'kiu', 'kj', 'kk', 'kk-arab',
+ 'kk-cn', 'kk-cyrl', 'kk-kz', 'kk-latn', 'kk-tr', 'kl', 'km', 'kn', 'ko',
+ 'ko-kp', 'koi', 'kr', 'krc', 'kri', 'krj', 'ks', 'ks-arab', 'ks-deva',
+ 'ksh', 'ku', 'ku-arab', 'ku-latn', 'kv', 'kw', 'ky', 'la', 'lad', 'lb',
+ 'lbe', 'lez', 'lfn', 'lg', 'li', 'lij', 'liv', 'lmo', 'ln', 'lo', 'loz',
+ 'lrc', 'lt', 'ltg', 'lus', 'luz', 'lv', 'lzh', 'lzz', 'mai', 'map-bms',
+ 'mdf', 'mg', 'mh', 'mhr', 'mi', 'min', 'mk', 'ml', 'mn', 'mo', 'mr', 'mrj',
+ 'ms', 'mt', 'mus', 'mwl', 'my', 'myv', 'mzn', 'na', 'nah', 'nan', 'nap',
+ 'nb', 'nds', 'nds-nl', 'ne', 'new', 'ng', 'niu', 'nl', 'nl-informal', 'nn',
+ 'no', 'nod', 'nov', 'nrm', 'nso', 'nv', 'ny', 'oc', 'olo', 'om', 'or',
+ 'os', 'ota', 'pa', 'pag', 'pam', 'pap', 'pcd', 'pdc', 'pdt', 'pfl', 'pi',
+ 'pih', 'pl', 'pms', 'pnb', 'pnt', 'prg', 'ps', 'pt', 'pt-br', 'qu', 'qug',
+ 'rgn', 'rif', 'rm', 'rmy', 'rn', 'ro', 'roa-rup', 'roa-tara', 'ru', 'rue',
+ 'rup', 'ruq', 'ruq-cyrl', 'ruq-latn', 'rw', 'rwr', 'sa', 'sah', 'sat',
+ 'sc', 'scn', 'sco', 'sd', 'sdc', 'sdh', 'se', 'sei', 'ses', 'sg', 'sgs',
+ 'sh', 'shi', 'shi-latn', 'shi-tfng', 'si', 'simple', 'sk', 'sl', 'sli',
+ 'sm', 'sma', 'sn', 'so', 'sq', 'sr', 'sr-ec', 'sr-el', 'srn', 'ss', 'st',
+ 'stq', 'su', 'sv', 'sw', 'szl', 'ta', 'tcy', 'te', 'tet', 'tg', 'tg-cyrl',
+ 'tg-latn', 'th', 'ti', 'tk', 'tl', 'tly', 'tn', 'to', 'tokipona', 'tpi',
+ 'tr', 'tru', 'ts', 'tt', 'tt-cyrl', 'tt-latn', 'tum', 'tw', 'ty', 'tyv',
+ 'tzm', 'udm', 'ug', 'ug-arab', 'ug-latn', 'uk', 'ur', 'uz', 'uz-cyrl',
+ 'uz-latn', 've', 'vec', 'vep', 'vi', 'vls', 'vmf', 'vo', 'vot', 'vro',
+ 'wa', 'war', 'wo', 'wuu', 'xal', 'xh', 'xmf', 'yi', 'yo', 'yue', 'za',
+ 'zea', 'zh', 'zh-classical', 'zh-cn', 'zh-hans', 'zh-hant', 'zh-hk',
+ 'zh-min-nan', 'zh-mo', 'zh-my', 'zh-sg', 'zh-tw', 'zh-yue', 'zu')
+
+wmf_sites = ('aawikibooks', 'afwikibooks', 'afwikiquote', 'akwikibooks',
+ 'alswikibooks', 'alswikiquote', 'amwikiquote', 'angwikibooks',
+ 'angwikiquote', 'angwikisource', 'arwikibooks', 'arwikinews',
+ 'arwikiquote', 'arwikisource', 'astwikibooks', 'astwikiquote',
+ 'aswikibooks', 'aswikisource', 'aywikibooks', 'azwikibooks', 'azwikiquote',
+ 'azwikisource', 'bawikibooks', 'bewikibooks', 'bewikiquote',
+ 'bewikisource', 'bgwikibooks', 'bgwikinews', 'bgwikiquote', 'bgwikisource',
+ 'biwikibooks', 'bmwikibooks', 'bmwikiquote', 'bnwikibooks', 'bnwikisource',
+ 'bowikibooks', 'brwikiquote', 'brwikisource', 'bswikibooks', 'bswikinews',
+ 'bswikiquote', 'bswikisource', 'cawikibooks', 'cawikinews', 'cawikiquote',
+ 'cawikisource', 'chwikibooks', 'commonswiki', 'cowikibooks', 'cowikiquote',
+ 'crwikiquote', 'cswikibooks', 'cswikinews', 'cswikiquote', 'cswikisource',
+ 'cvwikibooks', 'cywikibooks', 'cywikiquote', 'cywikisource', 'dawikibooks',
+ 'dawikiquote', 'dawikisource', 'dewikibooks', 'dewikinews', 'dewikiquote',
+ 'dewikisource', 'dewikivoyage', 'elwikibooks', 'elwikinews', 'elwikiquote',
+ 'elwikisource', 'elwikivoyage', 'enwikibooks', 'enwikinews', 'enwikiquote',
+ 'enwikisource', 'enwikivoyage', 'eowikibooks', 'eowikinews', 'eowikiquote',
+ 'eowikisource', 'eswikibooks', 'eswikinews', 'eswikiquote', 'eswikisource',
+ 'eswikivoyage', 'etwikibooks', 'etwikiquote', 'etwikisource',
+ 'euwikibooks', 'euwikiquote', 'fawikibooks', 'fawikinews', 'fawikiquote',
+ 'fawikisource', 'fawikivoyage', 'fiwikibooks', 'fiwikinews', 'fiwikiquote',
+ 'fiwikisource', 'fowikisource', 'frwikibooks', 'frwikinews', 'frwikiquote',
+ 'frwikisource', 'frwikivoyage', 'fywikibooks', 'gawikibooks',
+ 'gawikiquote', 'glwikibooks', 'glwikiquote', 'glwikisource', 'gnwikibooks',
+ 'gotwikibooks', 'guwikibooks', 'guwikiquote', 'guwikisource',
+ 'hewikibooks', 'hewikinews', 'hewikiquote', 'hewikisource', 'hewikivoyage',
+ 'hiwikibooks', 'hiwikiquote', 'hrwikibooks', 'hrwikiquote', 'hrwikisource',
+ 'htwikisource', 'huwikibooks', 'huwikinews', 'huwikiquote', 'huwikisource',
+ 'hywikibooks', 'hywikiquote', 'hywikisource', 'iawikibooks', 'idwikibooks',
+ 'idwikiquote', 'idwikisource', 'iewikibooks', 'iswikibooks', 'iswikiquote',
+ 'iswikisource', 'itwikibooks', 'itwikinews', 'itwikiquote', 'itwikisource',
+ 'itwikivoyage', 'jawikibooks', 'jawikinews', 'jawikiquote', 'jawikisource',
+ 'kawikibooks', 'kawikiquote', 'kkwikibooks', 'kkwikiquote', 'kmwikibooks',
+ 'knwikibooks', 'knwikiquote', 'knwikisource', 'kowikibooks', 'kowikinews',
+ 'kowikiquote', 'kowikisource', 'krwikiquote', 'kswikibooks', 'kswikiquote',
+ 'kuwikibooks', 'kuwikiquote', 'kwwikiquote', 'kywikibooks', 'kywikiquote',
+ 'lawikibooks', 'lawikiquote', 'lawikisource', 'lbwikibooks', 'lbwikiquote',
+ 'liwikibooks', 'liwikiquote', 'liwikisource', 'lnwikibooks', 'ltwikibooks',
+ 'ltwikiquote', 'ltwikisource', 'lvwikibooks', 'mediawikiwiki', 'metawiki',
+ 'mgwikibooks', 'miwikibooks', 'mkwikibooks', 'mkwikisource', 'mlwikibooks',
+ 'mlwikiquote', 'mlwikisource', 'mnwikibooks', 'mrwikibooks', 'mrwikiquote',
+ 'mrwikisource', 'mswikibooks', 'mywikibooks', 'nahwikibooks',
+ 'nawikibooks', 'nawikiquote', 'ndswikibooks', 'ndswikiquote',
+ 'newikibooks', 'nlwikibooks', 'nlwikinews', 'nlwikiquote', 'nlwikisource',
+ 'nlwikivoyage', 'nnwikiquote', 'nowikibooks', 'nowikinews', 'nowikiquote',
+ 'nowikisource', 'ocwikibooks', 'orwikisource', 'pawikibooks',
+ 'plwikibooks', 'plwikinews', 'plwikiquote', 'plwikisource', 'plwikivoyage',
+ 'pswikibooks', 'ptwikibooks', 'ptwikinews', 'ptwikiquote', 'ptwikisource',
+ 'ptwikivoyage', 'quwikibooks', 'quwikiquote', 'rmwikibooks', 'rowikibooks',
+ 'rowikinews', 'rowikiquote', 'rowikisource', 'rowikivoyage', 'ruwikibooks',
+ 'ruwikinews', 'ruwikiquote', 'ruwikisource', 'ruwikivoyage',
+ 'sahwikisource', 'sawikibooks', 'sawikiquote', 'sawikisource',
+ 'sdwikinews', 'sewikibooks', 'simplewikibooks', 'simplewikiquote',
+ 'siwikibooks', 'skwikibooks', 'skwikiquote', 'skwikisource', 'slwikibooks',
+ 'slwikiquote', 'slwikisource', 'specieswiki', 'sqwikibooks', 'sqwikinews',
+ 'sqwikiquote', 'srwikibooks', 'srwikinews', 'srwikiquote', 'srwikisource',
+ 'suwikibooks', 'suwikiquote', 'svwikibooks', 'svwikinews', 'svwikiquote',
+ 'svwikisource', 'svwikivoyage', 'swwikibooks', 'tawikibooks', 'tawikinews',
+ 'tawikiquote', 'tawikisource', 'tewikibooks', 'tewikiquote',
+ 'tewikisource', 'tgwikibooks', 'thwikibooks', 'thwikinews', 'thwikiquote',
+ 'thwikisource', 'tkwikibooks', 'tkwikiquote', 'tlwikibooks', 'trwikibooks',
+ 'trwikinews', 'trwikiquote', 'trwikisource', 'ttwikibooks', 'ttwikiquote',
+ 'ugwikibooks', 'ugwikiquote', 'ukwikibooks', 'ukwikinews', 'ukwikiquote',
+ 'ukwikisource', 'ukwikivoyage', 'urwikibooks', 'urwikiquote',
+ 'uzwikibooks', 'uzwikiquote', 'vecwikisource', 'viwikibooks',
+ 'viwikiquote', 'viwikisource', 'viwikivoyage', 'vowikibooks',
+ 'vowikiquote', 'wawikibooks', 'wikidatawiki', 'wowikiquote', 'xhwikibooks',
+ 'yiwikisource', 'yowikibooks', 'zawikibooks', 'zawikiquote',
+ 'zh_min_nanwikibooks', 'zh_min_nanwikiquote', 'zh_min_nanwikisource',
+ 'zhwikibooks', 'zhwikinews', 'zhwikiquote', 'zhwikisource', 'zhwikivoyage',
+ 'zuwikibooks', 'aawiki', 'abwiki', 'acewiki', 'afwiki', 'akwiki',
+ 'alswiki', 'amwiki', 'anwiki', 'angwiki', 'arwiki', 'arcwiki', 'arzwiki',
+ 'aswiki', 'astwiki', 'avwiki', 'aywiki', 'azwiki', 'bawiki', 'barwiki',
+ 'bat_smgwiki', 'bclwiki', 'bewiki', 'be_x_oldwiki', 'bgwiki', 'bhwiki',
+ 'biwiki', 'bjnwiki', 'bmwiki', 'bnwiki', 'bowiki', 'bpywiki', 'brwiki',
+ 'bswiki', 'bugwiki', 'bxrwiki', 'cawiki', 'cbk_zamwiki', 'cdowiki',
+ 'cewiki', 'cebwiki', 'chwiki', 'chowiki', 'chrwiki', 'chywiki', 'ckbwiki',
+ 'cowiki', 'crwiki', 'crhwiki', 'cswiki', 'csbwiki', 'cuwiki', 'cvwiki',
+ 'cywiki', 'dawiki', 'dewiki', 'diqwiki', 'dsbwiki', 'dvwiki', 'dzwiki',
+ 'eewiki', 'elwiki', 'emlwiki', 'enwiki', 'eowiki', 'eswiki', 'etwiki',
+ 'euwiki', 'extwiki', 'fawiki', 'ffwiki', 'fiwiki', 'fiu_vrowiki', 'fjwiki',
+ 'fowiki', 'frwiki', 'frpwiki', 'frrwiki', 'furwiki', 'fywiki', 'gawiki',
+ 'gagwiki', 'ganwiki', 'gdwiki', 'glwiki', 'glkwiki', 'gnwiki', 'gotwiki',
+ 'guwiki', 'gvwiki', 'hawiki', 'hakwiki', 'hawwiki', 'hewiki', 'hiwiki',
+ 'hifwiki', 'howiki', 'hrwiki', 'hsbwiki', 'htwiki', 'huwiki', 'hywiki',
+ 'hzwiki', 'iawiki', 'idwiki', 'iewiki', 'igwiki', 'iiwiki', 'ikwiki',
+ 'ilowiki', 'iowiki', 'iswiki', 'itwiki', 'iuwiki', 'jawiki', 'jbowiki',
+ 'jvwiki', 'kawiki', 'kaawiki', 'kabwiki', 'kbdwiki', 'kgwiki', 'kiwiki',
+ 'kjwiki', 'kkwiki', 'klwiki', 'kmwiki', 'knwiki', 'kowiki', 'koiwiki',
+ 'krwiki', 'krcwiki', 'kswiki', 'kshwiki', 'kuwiki', 'kvwiki', 'kwwiki',
+ 'kywiki', 'lawiki', 'ladwiki', 'lbwiki', 'lbewiki', 'lezwiki', 'lgwiki',
+ 'liwiki', 'lijwiki', 'lmowiki', 'lnwiki', 'lowiki', 'ltwiki', 'ltgwiki',
+ 'lvwiki', 'maiwiki', 'map_bmswiki', 'mdfwiki', 'mgwiki', 'mhwiki',
+ 'mhrwiki', 'miwiki', 'minwiki', 'mkwiki', 'mlwiki', 'mnwiki', 'mowiki',
+ 'mrwiki', 'mrjwiki', 'mswiki', 'mtwiki', 'muswiki', 'mwlwiki', 'mywiki',
+ 'myvwiki', 'mznwiki', 'nawiki', 'nahwiki', 'napwiki', 'ndswiki',
+ 'nds_nlwiki', 'newiki', 'newwiki', 'ngwiki', 'nlwiki', 'nnwiki', 'nowiki',
+ 'novwiki', 'nrmwiki', 'nsowiki', 'nvwiki', 'nywiki', 'ocwiki', 'omwiki',
+ 'orwiki', 'oswiki', 'pawiki', 'pagwiki', 'pamwiki', 'papwiki', 'pcdwiki',
+ 'pdcwiki', 'pflwiki', 'piwiki', 'pihwiki', 'plwiki', 'pmswiki', 'pnbwiki',
+ 'pntwiki', 'pswiki', 'ptwiki', 'quwiki', 'rmwiki', 'rmywiki', 'rnwiki',
+ 'rowiki', 'roa_rupwiki', 'roa_tarawiki', 'ruwiki', 'ruewiki', 'rwwiki',
+ 'sawiki', 'sahwiki', 'scwiki', 'scnwiki', 'scowiki', 'sdwiki', 'sewiki',
+ 'sgwiki', 'shwiki', 'siwiki', 'simplewiki', 'skwiki', 'slwiki', 'smwiki',
+ 'snwiki', 'sowiki', 'sqwiki', 'srwiki', 'srnwiki', 'sswiki', 'stwiki',
+ 'stqwiki', 'suwiki', 'svwiki', 'swwiki', 'szlwiki', 'tawiki', 'tewiki',
+ 'tetwiki', 'tgwiki', 'thwiki', 'tiwiki', 'tkwiki', 'tlwiki', 'tnwiki',
+ 'towiki', 'tpiwiki', 'trwiki', 'tswiki', 'ttwiki', 'tumwiki', 'twwiki',
+ 'tywiki', 'tyvwiki', 'udmwiki', 'ugwiki', 'ukwiki', 'urwiki', 'uzwiki',
+ 'vewiki', 'vecwiki', 'vepwiki', 'viwiki', 'vlswiki', 'vowiki', 'wawiki',
+ 'warwiki', 'wowiki', 'wuuwiki', 'xalwiki', 'xhwiki', 'xmfwiki', 'yiwiki',
+ 'yowiki', 'zawiki', 'zeawiki', 'zhwiki', 'zh_classicalwiki',
+ 'zh_min_nanwiki', 'zh_yuewiki', 'zuwiki', 'lrcwiki', 'gomwiki', 'azbwiki')
+
diff --git a/bbb/exceptions.py b/bbb/exceptions.py
new file mode 100644
index 0000000..9460387
--- /dev/null
+++ b/bbb/exceptions.py
@@ -0,0 +1,76 @@
+
+import re
+
+class WikibaseException(Exception):
+ """Generic base class for Wikibase API errors"""
+
+ def __init__(self, err):
+ self.error = err
+
+ def __repr__(self):
+ return self.__unicode__()
+
+ def __str__(self):
+ return self.__unicode__()
+
+ def __unicode__(self):
+ return self.error
+
+class WikibaseAccountError(WikibaseException):
+
+ def __init__(self, user, error):
+ self.user = user
+ self.error = error
+
+ def __unicode__(self):
+ return "User '%s' had error: %s" % (self.user, self.error)
+
+class WikibaseAPIError(WikibaseException):
+
+ def __init__(self, code, info, action):
+ self.code = code
+ self.info = info
+ self.action = action
+
+ def __unicode__(self):
+ return "Wikibase server returned error for action '%s': %s" % (
+ self.action, self.code)
+
+class MissingEntityError(WikibaseException):
+
+ def __init__(self, id=None, title=None, info=None):
+ self.what = "Entity"
+ self.id = id
+ self.title = title
+ if info:
+ found_id = re.search("\(Invalid id: (.*)\)", info)
+ if found_id:
+ self.id = found_id.groups()[0]
+ found_title = re.search("\(Invalid title: (.*)\)", info)
+ if found_title:
+ self.title = found_title.groups()[0]
+
+ def __unicode__(self):
+ if self.id:
+ return "Couldn't find %s with id: %s" % (self.what, self.id)
+ elif self.title:
+ return "Couldn't find %s with title: %s" % (self.what, self.title)
+ else:
+ return "Couldn't find %s (unknown)" % self.what
+
+class MissingItemError(MissingEntityError):
+
+ def __init__(self, **kwargs):
+ super().__init__(**kwargs)
+ self.what = "Item"
+
+class MissingPropertyError(MissingEntityError):
+ pass
+
+class APITimeoutError(WikibaseException):
+
+ def __init__(self, query):
+ self.query
+
+ def __unicode__(self):
+ return "HTTP (or HTTPS) request timed out: %s" % self.query