summaryrefslogtreecommitdiffstats
path: root/python/fatcat_worker.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/fatcat_worker.py')
-rwxr-xr-xpython/fatcat_worker.py19
1 files changed, 18 insertions, 1 deletions
diff --git a/python/fatcat_worker.py b/python/fatcat_worker.py
index bfb87a72..03167a3a 100755
--- a/python/fatcat_worker.py
+++ b/python/fatcat_worker.py
@@ -6,7 +6,7 @@ import datetime
import raven
from fatcat_tools import public_api
-from fatcat_tools.workers import ChangelogWorker, EntityUpdatesWorker, ElasticsearchReleaseWorker, ElasticsearchContainerWorker
+from fatcat_tools.workers import ChangelogWorker, EntityUpdatesWorker, ElasticsearchReleaseWorker, ElasticsearchContainerWorker, ElasticsearchChangelogWorker
# Yep, a global. Gets DSN from `SENTRY_DSN` environment variable
sentry_client = raven.Client()
@@ -47,6 +47,13 @@ def run_elasticsearch_container(args):
elasticsearch_index=args.elasticsearch_index)
worker.run()
+def run_elasticsearch_changelog(args):
+ consume_topic = "fatcat-{}.changelog".format(args.env)
+ worker = ElasticsearchChangelogWorker(args.kafka_hosts, consume_topic,
+ elasticsearch_backend=args.elasticsearch_backend,
+ elasticsearch_index=args.elasticsearch_index)
+ worker.run()
+
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
@@ -92,6 +99,16 @@ def main():
help="elasticsearch index to push into",
default="fatcat_container")
+ sub_elasticsearch_changelog = subparsers.add_parser('elasticsearch-changelog',
+ help="consume changelog kafka feed, transform and push to search")
+ sub_elasticsearch_changelog.set_defaults(func=run_elasticsearch_changelog)
+ sub_elasticsearch_changelog.add_argument('--elasticsearch-backend',
+ help="elasticsearch backend to connect to",
+ default="http://localhost:9200")
+ sub_elasticsearch_changelog.add_argument('--elasticsearch-index',
+ help="elasticsearch index to push into",
+ default="fatcat_changelog")
+
args = parser.parse_args()
if not args.__dict__.get("func"):
print("tell me what to do!")