// skate-wikipedia-doi extracts DOI from wikipedia reference dataset. package main import ( "flag" "fmt" "log" "os" "regexp" "runtime" "strings" "github.com/segmentio/encoding/json" "gitlab.com/internetarchive/refcat/skate" "gitlab.com/internetarchive/refcat/skate/parallel" ) var ( numWorkers = flag.Int("w", runtime.NumCPU(), "number of workers") batchSize = flag.Int("b", 100000, "batch size") bytesNewline = []byte("\n") wsReplacer = strings.NewReplacer("\t", "", "\n", "", " ", "") patDOI = regexp.MustCompile(`(10[.][0-9]{1,8}/[^ ]*[\w])`) ) func main() { flag.Parse() pp := parallel.NewProcessor(os.Stdin, os.Stdout, func(p []byte) ([]byte, error) { var w skate.MinimalCitations if err := json.Unmarshal(p, &w); err != nil { return nil, err } ids := w.ParseIDList() if ids.DOI == "" { return nil, nil } match := patDOI.FindStringSubmatch(ids.DOI) if len(match) == 0 { return nil, nil } var ( doi = skate.SanitizeDOI(wsReplacer.Replace(match[0])) pageTitle = strings.TrimSpace(w.PageTitle) s = fmt.Sprintf("%s\t%s\t%s", doi, pageTitle, string(p)) ) return []byte(s), nil }) pp.NumWorkers = *numWorkers pp.BatchSize = *batchSize if err := pp.Run(); err != nil { log.Fatal(err) } }