aboutsummaryrefslogtreecommitdiffstats
path: root/skate/cmd/skate-cleanup
diff options
context:
space:
mode:
authorMartin Czygan <martin.czygan@gmail.com>2021-08-05 00:42:53 +0200
committerMartin Czygan <martin.czygan@gmail.com>2021-08-05 00:42:53 +0200
commit3e75168158461c18d66fb1dee98e69beb8ae8907 (patch)
treeb894e433240e72985900d2bd6b591730ad5070a5 /skate/cmd/skate-cleanup
parent9f71dc1060e90bc3ab183c82a1098428e304c183 (diff)
downloadrefcat-3e75168158461c18d66fb1dee98e69beb8ae8907.tar.gz
refcat-3e75168158461c18d66fb1dee98e69beb8ae8907.zip
skate: move cleanup code out
Diffstat (limited to 'skate/cmd/skate-cleanup')
-rw-r--r--skate/cmd/skate-cleanup/main.go75
1 files changed, 17 insertions, 58 deletions
diff --git a/skate/cmd/skate-cleanup/main.go b/skate/cmd/skate-cleanup/main.go
index d4d5f5a..4dd367f 100644
--- a/skate/cmd/skate-cleanup/main.go
+++ b/skate/cmd/skate-cleanup/main.go
@@ -9,16 +9,13 @@ package main
import (
"flag"
- "fmt"
"log"
"os"
- "regexp"
"runtime"
"strings"
"git.archive.org/martin/cgraph/skate"
"git.archive.org/martin/cgraph/skate/parallel"
- "mvdan.cc/xurls/v2"
)
var (
@@ -32,8 +29,6 @@ var (
extendedCleanup = flag.Bool("X", false, "extended (and slower) cleanup for urls")
allow = flag.String("allow", "http,https", "comma separted list of schemas to allow for urls")
- PatDOI = regexp.MustCompile(`10[.][0-9]{1,8}/[^ ]*[\w]`)
- rxRelaxed = xurls.Relaxed()
allowedSchemas []string // parsed from allow flag
)
@@ -44,12 +39,25 @@ func main() {
}
var f func([]byte) ([]byte, error)
switch *what {
- case "doi":
- f = doiFilter
case "url":
- f = urlFilter
+ filter := skate.FilterURL{
+ Delimiter: *delimiter,
+ Index: *index,
+ BestEffort: *bestEffort,
+ Aggressive: *extendedCleanup,
+ SkipNonMatches: *skipNonMatches,
+ AllowedSchemas: allowedSchemas,
+ }
+ f = filter.Run
default:
- f = doiFilter
+ filter := skate.FilterDOI{
+ Delimiter: *delimiter,
+ Index: *index,
+ BestEffort: *bestEffort,
+ Aggressive: *extendedCleanup,
+ SkipNonMatches: *skipNonMatches,
+ }
+ f = filter.Run
}
pp := parallel.NewProcessor(os.Stdin, os.Stdout, f)
pp.NumWorkers = *numWorkers
@@ -58,52 +66,3 @@ func main() {
log.Fatal(err)
}
}
-
-// urlFilter parses finds the first URL.
-func urlFilter(p []byte) ([]byte, error) {
- parts := strings.Split(string(p), *delimiter)
- if len(parts) < *index {
- msg := fmt.Sprintf("warn: line has too few fields (%d): %s", len(parts), string(p))
- if *bestEffort {
- log.Println(msg)
- return nil, nil
- } else {
- return nil, fmt.Errorf(msg)
- }
- }
- url := rxRelaxed.FindString(parts[*index-1])
- if *extendedCleanup {
- url = skate.SanitizeURL(url)
- }
- if url == "" && *skipNonMatches {
- return nil, nil
- }
- if len(allowedSchemas) > 0 && !skate.HasAnyPrefix(url, allowedSchemas) {
- return nil, nil
- }
- if len(parts) == 1 || *index == len(parts) {
- url = url + "\n"
- }
- parts[*index-1] = url
- return []byte(strings.Join(parts, *delimiter)), nil
-}
-
-// doiFilter finds a DOI, normalizes to lowercase.
-func doiFilter(p []byte) ([]byte, error) {
- parts := strings.Split(string(p), *delimiter)
- if len(parts) < *index {
- msg := fmt.Sprintf("warn: line has too few fields (%d): %s", len(parts), string(p))
- if *bestEffort {
- log.Println(msg)
- return nil, nil
- } else {
- return nil, fmt.Errorf(msg)
- }
- }
- result := PatDOI.FindString(parts[*index-1])
- if result == "" && *skipNonMatches {
- return nil, nil
- }
- parts[*index-1] = strings.ToLower(result)
- return []byte(strings.Join(parts, *delimiter)), nil
-}