diff options
author | Bryan Newbold <bnewbold@archive.org> | 2020-11-06 18:32:35 -0800 |
---|---|---|
committer | Bryan Newbold <bnewbold@archive.org> | 2020-11-06 18:32:35 -0800 |
commit | 175019c96fced3e21d0f60ea1a4a37da6b8872ac (patch) | |
tree | f42fbbe9c8ac06ae9eb06373ab9eec96d2b3a177 /python/persist_tool.py | |
parent | b0b66c20c6ffb9d8acc626068964d7dfd5d3bcdc (diff) | |
parent | 47ca1a273912c8836630b0930b71a4e66fd2c85b (diff) | |
download | sandcrawler-175019c96fced3e21d0f60ea1a4a37da6b8872ac.tar.gz sandcrawler-175019c96fced3e21d0f60ea1a4a37da6b8872ac.zip |
Merge branch 'bnewbold-html-ingest'
Diffstat (limited to 'python/persist_tool.py')
-rwxr-xr-x | python/persist_tool.py | 18 |
1 files changed, 9 insertions, 9 deletions
diff --git a/python/persist_tool.py b/python/persist_tool.py index 66e02aa..69e9374 100755 --- a/python/persist_tool.py +++ b/python/persist_tool.py @@ -1,7 +1,7 @@ #!/usr/bin/env python3 """ -Commands for backfilling content from bulk files into postgresql and s3 (minio). +Commands for backfilling content from bulk files into postgresql and s3 (seaweedfs). Normally this is done by workers (in sandcrawler_worker.py) consuming from Kafka feeds, but sometimes we have bulk processing output we want to backfill. @@ -120,16 +120,16 @@ def main(): help="postgresql database connection string", default="postgres:///sandcrawler") parser.add_argument('--s3-url', - help="S3 (minio) backend URL", + help="S3 (seaweedfs) backend URL", default="localhost:9000") parser.add_argument('--s3-access-key', - help="S3 (minio) credential", - default=os.environ.get('MINIO_ACCESS_KEY')) + help="S3 (seaweedfs) credential", + default=os.environ.get('SANDCRAWLER_BLOB_ACCESS_KEY') or os.environ.get('MINIO_ACCESS_KEY')) parser.add_argument('--s3-secret-key', - help="S3 (minio) credential", - default=os.environ.get('MINIO_SECRET_KEY')) + help="S3 (seaweedfs) credential", + default=os.environ.get('SANDCRAWLER_BLOB_ACCESS_KEY') or os.environ.get('MINIO_SECRET_KEY')) parser.add_argument('--s3-bucket', - help="S3 (minio) bucket to persist into", + help="S3 (seaweedfs) bucket to persist into", default="sandcrawler-dev") subparsers = parser.add_subparsers() @@ -144,7 +144,7 @@ def main(): help="ignore mimetype filtering; insert all content types (eg, assuming pre-filtered)") sub_grobid = subparsers.add_parser('grobid', - help="backfill a grobid JSON ('pg') dump into postgresql and s3 (minio)") + help="backfill a grobid JSON ('pg') dump into postgresql and s3 (seaweedfs)") sub_grobid.set_defaults(func=run_grobid) sub_grobid.add_argument('json_file', help="grobid file to import from (or '-' for stdin)", @@ -180,7 +180,7 @@ def main(): type=str) sub_pdftrio = subparsers.add_parser('pdftrio', - help="backfill a pdftrio JSON ('pg') dump into postgresql and s3 (minio)") + help="backfill a pdftrio JSON ('pg') dump into postgresql and s3 (seaweedfs)") sub_pdftrio.set_defaults(func=run_pdftrio) sub_pdftrio.add_argument('json_file', help="pdftrio file to import from (or '-' for stdin)", |