diff options
author | Bryan Newbold <bnewbold@archive.org> | 2022-10-03 10:16:26 -0700 |
---|---|---|
committer | Bryan Newbold <bnewbold@archive.org> | 2022-10-03 10:16:26 -0700 |
commit | 54e14814080d9a706ff6f15694b3b54918200169 (patch) | |
tree | c2ce152acb64a365408a57728c18d960b954c1f5 /sql | |
parent | a04468041cd81ad90aa76ec15788a5ffacb6eec2 (diff) | |
download | sandcrawler-54e14814080d9a706ff6f15694b3b54918200169.tar.gz sandcrawler-54e14814080d9a706ff6f15694b3b54918200169.zip |
reingests: update scripts and SQL
Diffstat (limited to 'sql')
-rw-r--r-- | sql/dump_reingest_old.sql | 36 | ||||
-rw-r--r-- | sql/dump_reingest_quarterly.sql | 14 | ||||
-rw-r--r-- | sql/dump_reingest_spn.sql | 4 | ||||
-rw-r--r-- | sql/dump_reingest_terminalstatus.sql | 34 | ||||
-rw-r--r-- | sql/dump_reingest_weekly.sql | 7 | ||||
-rwxr-xr-x | sql/reingest_old.sh | 19 | ||||
-rwxr-xr-x | sql/reingest_terminalstatus_forcerecrawl.sh | 19 |
7 files changed, 127 insertions, 6 deletions
diff --git a/sql/dump_reingest_old.sql b/sql/dump_reingest_old.sql new file mode 100644 index 0000000..7473420 --- /dev/null +++ b/sql/dump_reingest_old.sql @@ -0,0 +1,36 @@ + +BEGIN TRANSACTION ISOLATION LEVEL SERIALIZABLE READ ONLY DEFERRABLE; + +COPY ( + SELECT row_to_json(ingest_request.*) FROM ingest_request + LEFT JOIN ingest_file_result ON + ingest_file_result.base_url = ingest_request.base_url + AND ingest_file_result.ingest_type = ingest_request.ingest_type + WHERE + ingest_file_result.hit = false + AND ingest_request.created < NOW() - '6 day'::INTERVAL + -- AND ingest_request.created > NOW() - '181 day'::INTERVAL + AND (ingest_request.ingest_request_source = 'fatcat-changelog' + OR ingest_request.ingest_request_source = 'fatcat-ingest' + OR ingest_request.ingest_request_source = 'fatcat-ingest-container' + OR ingest_request.ingest_request_source = 'unpaywall' + OR ingest_request.ingest_request_source = 'arxiv' + OR ingest_request.ingest_request_source = 'pmc' + OR ingest_request.ingest_request_source = 'doaj' + OR ingest_request.ingest_request_source = 'dblp') + AND ( + ingest_file_result.status like 'spn2-%' + -- OR ingest_file_result.status like 'no-capture' + -- OR ingest_file_result.status like 'cdx-error' + -- OR ingest_file_result.status like 'petabox-error' + ) + AND ingest_file_result.status != 'spn2-error:invalid-url-syntax' + AND ingest_file_result.status != 'spn2-error:filesize-limit' + AND ingest_file_result.status != 'spn2-error:not-found' + AND ingest_file_result.status != 'spn2-error:blocked-url' + AND ingest_file_result.status != 'spn2-error:too-many-redirects' + AND ingest_file_result.status != 'spn2-error:network-authentication-required' + AND ingest_file_result.status != 'spn2-error:unknown' +) TO '/srv/sandcrawler/tasks/reingest_old_current.rows.json'; + +ROLLBACK; diff --git a/sql/dump_reingest_quarterly.sql b/sql/dump_reingest_quarterly.sql index c377bf0..dbeb199 100644 --- a/sql/dump_reingest_quarterly.sql +++ b/sql/dump_reingest_quarterly.sql @@ -8,17 +8,25 @@ COPY ( AND ingest_file_result.ingest_type = ingest_request.ingest_type WHERE (ingest_request.ingest_type = 'pdf' - OR ingest_request.ingest_type = 'html') + OR ingest_request.ingest_type = 'html' + OR ingest_request.ingest_type = 'xml' + OR ingest_request.ingest_type = 'component') AND ingest_file_result.hit = false AND ingest_request.created < NOW() - '8 hour'::INTERVAL AND ingest_request.created > NOW() - '91 day'::INTERVAL AND (ingest_request.ingest_request_source = 'fatcat-changelog' - OR ingest_request.ingest_request_source = 'fatcat-ingest') + OR ingest_request.ingest_request_source = 'fatcat-ingest' + OR ingest_request.ingest_request_source = 'fatcat-ingest-container' + OR ingest_request.ingest_request_source = 'unpaywall' + OR ingest_request.ingest_request_source = 'arxiv' + OR ingest_request.ingest_request_source = 'pmc' + OR ingest_request.ingest_request_source = 'doaj' + OR ingest_request.ingest_request_source = 'dblp') AND ( ingest_file_result.status like 'spn2-%' OR ingest_file_result.status = 'cdx-error' OR ingest_file_result.status = 'wayback-error' - OR ingest_file_result.status = 'wayback-content-error' + -- OR ingest_file_result.status = 'wayback-content-error' OR ingest_file_result.status = 'petabox-error' OR ingest_file_result.status = 'gateway-timeout' OR ingest_file_result.status = 'no-capture' diff --git a/sql/dump_reingest_spn.sql b/sql/dump_reingest_spn.sql index 65a8796..a83125c 100644 --- a/sql/dump_reingest_spn.sql +++ b/sql/dump_reingest_spn.sql @@ -8,7 +8,9 @@ COPY ( AND ingest_file_result.ingest_type = ingest_request.ingest_type WHERE (ingest_request.ingest_type = 'pdf' - OR ingest_request.ingest_type = 'html') + OR ingest_request.ingest_type = 'html' + OR ingest_request.ingest_type = 'xml' + OR ingest_request.ingest_type = 'component') AND ingest_file_result.hit = false AND ingest_request.created < NOW() - '6 hour'::INTERVAL AND ingest_request.created > NOW() - '180 day'::INTERVAL diff --git a/sql/dump_reingest_terminalstatus.sql b/sql/dump_reingest_terminalstatus.sql new file mode 100644 index 0000000..b72a096 --- /dev/null +++ b/sql/dump_reingest_terminalstatus.sql @@ -0,0 +1,34 @@ + +BEGIN TRANSACTION ISOLATION LEVEL SERIALIZABLE READ ONLY DEFERRABLE; + +COPY ( + SELECT row_to_json(ingest_request.*) FROM ingest_request + LEFT JOIN ingest_file_result ON + ingest_file_result.base_url = ingest_request.base_url + AND ingest_file_result.ingest_type = ingest_request.ingest_type + WHERE + ingest_file_result.hit = false + AND ingest_request.created < NOW() - '72 hour'::INTERVAL + AND ingest_request.created > NOW() - '10 day'::INTERVAL + AND (ingest_request.ingest_request_source = 'fatcat-changelog' + OR ingest_request.ingest_request_source = 'fatcat-ingest') + AND ingest_file_result.status = 'terminal-bad-status' + AND ( + ingest_file_result.terminal_status_code = 500 + OR ingest_file_result.terminal_status_code = 502 + OR ingest_file_result.terminal_status_code = 503 + OR ingest_file_result.terminal_status_code = 429 + OR ingest_file_result.terminal_status_code = 404 + ) + AND ( + ingest_request.base_url LIKE 'https://doi.org/10.3390/%' + OR ingest_request.base_url LIKE 'https://doi.org/10.1103/%' + OR ingest_request.base_url LIKE 'https://doi.org/10.1155/%' + ) +) TO '/srv/sandcrawler/tasks/reingest_terminalstatus_current.rows.json'; + +-- bulk re-tries would be: +-- AND (ingest_request.ingest_request_source != 'fatcat-changelog' +-- AND ingest_request.ingest_request_source != 'fatcat-ingest') + +ROLLBACK; diff --git a/sql/dump_reingest_weekly.sql b/sql/dump_reingest_weekly.sql index 4acec38..a019938 100644 --- a/sql/dump_reingest_weekly.sql +++ b/sql/dump_reingest_weekly.sql @@ -8,12 +8,15 @@ COPY ( AND ingest_file_result.ingest_type = ingest_request.ingest_type WHERE (ingest_request.ingest_type = 'pdf' - OR ingest_request.ingest_type = 'html') + OR ingest_request.ingest_type = 'html' + OR ingest_request.ingest_type = 'xml' + OR ingest_request.ingest_type = 'component') AND ingest_file_result.hit = false AND ingest_request.created < NOW() - '8 hour'::INTERVAL AND ingest_request.created > NOW() - '8 day'::INTERVAL AND (ingest_request.ingest_request_source = 'fatcat-changelog' - OR ingest_request.ingest_request_source = 'fatcat-ingest') + OR ingest_request.ingest_request_source = 'fatcat-ingest' + OR ingest_request.ingest_request_source = 'fatcat-ingest-container') AND ( ingest_file_result.status like 'spn2-%' -- OR ingest_file_result.status = 'cdx-error' diff --git a/sql/reingest_old.sh b/sql/reingest_old.sh new file mode 100755 index 0000000..96e5416 --- /dev/null +++ b/sql/reingest_old.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +set -e # fail on error +set -u # fail if variable not set in substitution +set -o pipefail # fail if part of a '|' command fails + +sudo -u postgres psql sandcrawler < dump_reingest_old.sql + +cd ../python +sudo -u sandcrawler pipenv run \ + ./scripts/ingestrequest_row2json.py /srv/sandcrawler/tasks/reingest_old_current.rows.json \ + > /srv/sandcrawler/tasks/reingest_old_current.json + +cat /srv/sandcrawler/tasks/reingest_old_current.json \ + | shuf \ + | head -n1000000 \ + | jq . -c \ + | kafkacat -P -b wbgrp-svc350.us.archive.org -t sandcrawler-prod.ingest-file-requests-daily -p -1 + diff --git a/sql/reingest_terminalstatus_forcerecrawl.sh b/sql/reingest_terminalstatus_forcerecrawl.sh new file mode 100755 index 0000000..5cb6d51 --- /dev/null +++ b/sql/reingest_terminalstatus_forcerecrawl.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +set -e # fail on error +set -u # fail if variable not set in substitution +set -o pipefail # fail if part of a '|' command fails + +sudo -u postgres psql sandcrawler < dump_reingest_terminalstatus.sql + +cd ../python +sudo -u sandcrawler pipenv run \ + ./scripts/ingestrequest_row2json.py --force-recrawl /srv/sandcrawler/tasks/reingest_terminalstatus_current.rows.json \ + > /srv/sandcrawler/tasks/reingest_terminalstatus_current.json + +cat /srv/sandcrawler/tasks/reingest_terminalstatus_current.json \ + | shuf \ + | head -n100000 \ + | jq . -c \ + | kafkacat -P -b wbgrp-svc350.us.archive.org -t sandcrawler-prod.ingest-file-requests-daily -p -1 + |