aboutsummaryrefslogtreecommitdiffstats
path: root/python
diff options
context:
space:
mode:
authorBryan Newbold <bnewbold@robocracy.org>2018-11-20 12:23:05 -0800
committerBryan Newbold <bnewbold@robocracy.org>2018-11-20 12:23:05 -0800
commit069a30366b87415efa4a0c8ae804dea46bcdfc67 (patch)
treeb5b202f6a5d398b63adbf9978e636bd0fc7c5e67 /python
parent07f9d46ea06ccee867369b759c00c6bfe9b91b13 (diff)
downloadfatcat-069a30366b87415efa4a0c8ae804dea46bcdfc67.tar.gz
fatcat-069a30366b87415efa4a0c8ae804dea46bcdfc67.zip
correct kafka topic names
Diffstat (limited to 'python')
-rwxr-xr-xpython/fatcat_harvest.py24
-rwxr-xr-xpython/fatcat_import.py2
2 files changed, 13 insertions, 13 deletions
diff --git a/python/fatcat_harvest.py b/python/fatcat_harvest.py
index 6ecc3ec6..09fde584 100755
--- a/python/fatcat_harvest.py
+++ b/python/fatcat_harvest.py
@@ -10,8 +10,8 @@ from fatcat_tools.harvest import HarvestCrossrefWorker, HarvestDataciteWorker,\
def run_crossref(args):
worker = HarvestCrossrefWorker(
kafka_hosts=args.kafka_hosts,
- produce_topic="fatcat-{}.crossref".format(args.env),
- state_topic="fatcat-{}.crossref-state".format(args.env),
+ produce_topic="fatcat-{}.api-crossref".format(args.env),
+ state_topic="fatcat-{}.api-crossref-state".format(args.env),
contact_email=args.contact_email,
start_date=args.start_date,
end_date=args.end_date)
@@ -20,8 +20,8 @@ def run_crossref(args):
def run_datacite(args):
worker = HarvestDataciteWorker(
kafka_hosts=args.kafka_hosts,
- produce_topic="fatcat-{}.datacite".format(args.env),
- state_topic="fatcat-{}.datacite-state".format(args.env),
+ produce_topic="fatcat-{}.api-datacite".format(args.env),
+ state_topic="fatcat-{}.api-datacite-state".format(args.env),
contact_email=args.contact_email,
start_date=args.start_date,
end_date=args.end_date)
@@ -30,8 +30,8 @@ def run_datacite(args):
def run_arxiv(args):
worker = HarvestArxivWorker(
kafka_hosts=args.kafka_hosts,
- produce_topic="fatcat-{}.arxiv".format(args.env),
- state_topic="fatcat-{}.arxiv-state".format(args.env),
+ produce_topic="fatcat-{}.oaipmh-arxiv".format(args.env),
+ state_topic="fatcat-{}.oaipmh-arxiv-state".format(args.env),
start_date=args.start_date,
end_date=args.end_date)
worker.run()
@@ -39,8 +39,8 @@ def run_arxiv(args):
def run_pubmed(args):
worker = HarvestPubmedWorker(
kafka_hosts=args.kafka_hosts,
- produce_topic="fatcat-{}.pubmed".format(args.env),
- state_topic="fatcat-{}.pubmed-state".format(args.env),
+ produce_topic="fatcat-{}.oaipmh-pubmed".format(args.env),
+ state_topic="fatcat-{}.oaipmh-pubmed-state".format(args.env),
start_date=args.start_date,
end_date=args.end_date)
worker.run()
@@ -48,8 +48,8 @@ def run_pubmed(args):
def run_doaj_article(args):
worker = HarvestDoajArticleWorker(
kafka_hosts=args.kafka_hosts,
- produce_topic="fatcat-{}.doaj-article".format(args.env),
- state_topic="fatcat-{}.doaj-article-state".format(args.env),
+ produce_topic="fatcat-{}.oaipmh-doaj-article".format(args.env),
+ state_topic="fatcat-{}.oaipmh-doaj-article-state".format(args.env),
start_date=args.start_date,
end_date=args.end_date)
worker.run()
@@ -57,8 +57,8 @@ def run_doaj_article(args):
def run_doaj_journal(args):
worker = HarvestDoajJournalWorker(
kafka_hosts=args.kafka_hosts,
- produce_topic="fatcat-{}.doaj-journal".format(args.env),
- state_topic="fatcat-{}.doaj-journal-state".format(args.env),
+ produce_topic="fatcat-{}.oaipmh-doaj-journal".format(args.env),
+ state_topic="fatcat-{}.oaipmh-doaj-journal-state".format(args.env),
start_date=args.start_date,
end_date=args.end_date)
worker.run()
diff --git a/python/fatcat_import.py b/python/fatcat_import.py
index 555d4083..aad4ee57 100755
--- a/python/fatcat_import.py
+++ b/python/fatcat_import.py
@@ -11,7 +11,7 @@ def run_crossref(args):
args.extid_map_file, create_containers=(not args.no_create_containers))
if args.kafka_mode:
consumer = make_kafka_consumer(
- args.kafka_hosts, args.kafka_env, "crossref", "fatcat-import")
+ args.kafka_hosts, args.kafka_env, "api-crossref", "fatcat-import")
fci.process_batch(consumer, size=args.batch_size, decode_kafka=True)
else:
fci.process_batch(args.json_file, size=args.batch_size)