diff options
author | Bryan Newbold <bnewbold@archive.org> | 2022-02-08 16:57:16 -0800 |
---|---|---|
committer | Bryan Newbold <bnewbold@archive.org> | 2022-02-08 16:57:16 -0800 |
commit | b76630806d43427ddcdef8cd6efd110c731668a5 (patch) | |
tree | 2b0d9c940ab6abc13ed36e4c4ecf0a4011ba5426 | |
parent | 963fc4a50e0eddc99c14c6ff571d099e8873abac (diff) | |
download | sandcrawler-b76630806d43427ddcdef8cd6efd110c731668a5.tar.gz sandcrawler-b76630806d43427ddcdef8cd6efd110c731668a5.zip |
sql: script to reingest recent spn2 lookup failure in bulk mode
-rw-r--r-- | sql/dump_reingest_bulk.sql | 25 | ||||
-rw-r--r-- | sql/dump_reingest_quarterly.sql | 15 | ||||
-rw-r--r-- | sql/dump_reingest_spn.sql | 15 | ||||
-rw-r--r-- | sql/dump_reingest_weekly.sql | 15 | ||||
-rwxr-xr-x | sql/reingest_bulk.sh | 19 |
5 files changed, 71 insertions, 18 deletions
diff --git a/sql/dump_reingest_bulk.sql b/sql/dump_reingest_bulk.sql new file mode 100644 index 0000000..403fb20 --- /dev/null +++ b/sql/dump_reingest_bulk.sql @@ -0,0 +1,25 @@ + +COPY ( + SELECT row_to_json(ingest_request.*) FROM ingest_request + LEFT JOIN ingest_file_result ON ingest_file_result.base_url = ingest_request.base_url + WHERE + (ingest_request.ingest_type = 'pdf' + OR ingest_request.ingest_type = 'html') + AND ingest_file_result.hit = false + AND ingest_request.created < NOW() - '24 hour'::INTERVAL + AND ingest_request.created > NOW() - '181 day'::INTERVAL + AND (ingest_request.ingest_request_source = 'fatcat-changelog' + OR ingest_request.ingest_request_source = 'fatcat-ingest') + AND ( + ingest_file_result.status like 'spn2-%' + OR ingest_file_result.status like 'cdx-error' + OR ingest_file_result.status like 'petabox-error' + ) + AND ingest_file_result.status != 'spn2-error:invalid-url-syntax' + AND ingest_file_result.status != 'spn2-error:filesize-limit' + AND ingest_file_result.status != 'spn2-error:not-found' + AND ingest_file_result.status != 'spn2-error:blocked-url' + AND ingest_file_result.status != 'spn2-error:too-many-redirects' + AND ingest_file_result.status != 'spn2-error:network-authentication-required' + AND ingest_file_result.status != 'spn2-error:unknown' +) TO '/srv/sandcrawler/tasks/reingest_bulk_current.rows.json'; diff --git a/sql/dump_reingest_quarterly.sql b/sql/dump_reingest_quarterly.sql index 725a404..c425a15 100644 --- a/sql/dump_reingest_quarterly.sql +++ b/sql/dump_reingest_quarterly.sql @@ -2,7 +2,9 @@ COPY ( SELECT row_to_json(ingest_request.*) FROM ingest_request LEFT JOIN ingest_file_result ON ingest_file_result.base_url = ingest_request.base_url - WHERE ingest_request.ingest_type = 'pdf' + WHERE + (ingest_request.ingest_type = 'pdf' + OR ingest_request.ingest_type = 'html') AND ingest_file_result.hit = false AND ingest_request.created < NOW() - '8 hour'::INTERVAL AND ingest_request.created > NOW() - '91 day'::INTERVAL @@ -10,11 +12,12 @@ COPY ( OR ingest_request.ingest_request_source = 'fatcat-ingest') AND ( ingest_file_result.status like 'spn2-%' - OR ingest_file_result.status like 'cdx-error' - OR ingest_file_result.status like 'wayback-error' - OR ingest_file_result.status like 'wayback-content-error' - OR ingest_file_result.status like 'petabox-error' - OR ingest_file_result.status like 'gateway-timeout' + OR ingest_file_result.status = 'cdx-error' + OR ingest_file_result.status = 'wayback-error' + OR ingest_file_result.status = 'wayback-content-error' + OR ingest_file_result.status = 'petabox-error' + OR ingest_file_result.status = 'gateway-timeout' + OR ingest_file_result.status = 'no-capture' ) AND ingest_file_result.status != 'spn2-error:invalid-url-syntax' AND ingest_file_result.status != 'spn2-error:filesize-limit' diff --git a/sql/dump_reingest_spn.sql b/sql/dump_reingest_spn.sql index 6ef08c1..b0051dd 100644 --- a/sql/dump_reingest_spn.sql +++ b/sql/dump_reingest_spn.sql @@ -2,18 +2,21 @@ COPY ( SELECT row_to_json(ingest_request.*) FROM ingest_request LEFT JOIN ingest_file_result ON ingest_file_result.base_url = ingest_request.base_url - WHERE ingest_request.ingest_type = 'pdf' + WHERE + (ingest_request.ingest_type = 'pdf' + OR ingest_request.ingest_type = 'html') AND ingest_file_result.hit = false AND ingest_request.created < NOW() - '6 hour'::INTERVAL AND ingest_request.created > NOW() - '180 day'::INTERVAL AND ingest_request.ingest_request_source = 'savepapernow-web' AND ( ingest_file_result.status like 'spn2-%' - -- OR ingest_file_result.status like 'cdx-error' - -- OR ingest_file_result.status like 'wayback-error' - -- OR ingest_file_result.status like 'wayback-content-error' - OR ingest_file_result.status like 'petabox-error' - -- OR ingest_file_result.status like 'gateway-timeout' + -- OR ingest_file_result.status = 'cdx-error' + -- OR ingest_file_result.status = 'wayback-error' + -- OR ingest_file_result.status = 'wayback-content-error' + OR ingest_file_result.status = 'petabox-error' + -- OR ingest_file_result.status = 'gateway-timeout' + OR ingest_file_result.status = 'no-capture' ) AND ingest_file_result.status != 'spn2-error:invalid-url-syntax' AND ingest_file_result.status != 'spn2-error:filesize-limit' diff --git a/sql/dump_reingest_weekly.sql b/sql/dump_reingest_weekly.sql index 65800eb..e529945 100644 --- a/sql/dump_reingest_weekly.sql +++ b/sql/dump_reingest_weekly.sql @@ -2,7 +2,9 @@ COPY ( SELECT row_to_json(ingest_request.*) FROM ingest_request LEFT JOIN ingest_file_result ON ingest_file_result.base_url = ingest_request.base_url - WHERE ingest_request.ingest_type = 'pdf' + WHERE + (ingest_request.ingest_type = 'pdf' + OR ingest_request.ingest_type = 'html') AND ingest_file_result.hit = false AND ingest_request.created < NOW() - '8 hour'::INTERVAL AND ingest_request.created > NOW() - '8 day'::INTERVAL @@ -10,11 +12,12 @@ COPY ( OR ingest_request.ingest_request_source = 'fatcat-ingest') AND ( ingest_file_result.status like 'spn2-%' - -- OR ingest_file_result.status like 'cdx-error' - -- OR ingest_file_result.status like 'wayback-error' - -- OR ingest_file_result.status like 'wayback-content-error' - OR ingest_file_result.status like 'petabox-error' - -- OR ingest_file_result.status like 'gateway-timeout' + -- OR ingest_file_result.status = 'cdx-error' + -- OR ingest_file_result.status = 'wayback-error' + -- OR ingest_file_result.status = 'wayback-content-error' + OR ingest_file_result.status = 'petabox-error' + -- OR ingest_file_result.status = 'gateway-timeout' + OR ingest_file_result.status = 'no-capture' ) AND ingest_file_result.status != 'spn2-error:invalid-url-syntax' AND ingest_file_result.status != 'spn2-error:filesize-limit' diff --git a/sql/reingest_bulk.sh b/sql/reingest_bulk.sh new file mode 100755 index 0000000..d5d3e35 --- /dev/null +++ b/sql/reingest_bulk.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +set -e # fail on error +set -u # fail if variable not set in substitution +set -o pipefail # fail if part of a '|' command fails + +sudo -u postgres psql sandcrawler < dump_reingest_bulk.sql + +cd ../python +sudo -u sandcrawler pipenv run \ + ./scripts/ingestrequest_row2json.py /srv/sandcrawler/tasks/reingest_bulk_current.rows.json \ + > /srv/sandcrawler/tasks/reingest_bulk_current.json + +cat /srv/sandcrawler/tasks/reingest_bulk_current.json \ + | shuf \ + | head -n1000000 \ + | jq . -c \ + | kafkacat -P -b wbgrp-svc263.us.archive.org -t sandcrawler-prod.ingest-file-requests-bulk -p -1 + |