1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
|
// skate-wikipedia-doi extracts DOI from wikipedia reference dataset.
package main
import (
"flag"
"fmt"
"log"
"os"
"regexp"
"runtime"
"strings"
"git.archive.org/martin/cgraph/skate"
"git.archive.org/martin/cgraph/skate/parallel"
"github.com/segmentio/encoding/json"
)
var (
numWorkers = flag.Int("w", runtime.NumCPU(), "number of workers")
batchSize = flag.Int("b", 100000, "batch size")
bytesNewline = []byte("\n")
wsReplacer = strings.NewReplacer("\t", "", "\n", "", " ", "")
patDOI = regexp.MustCompile(`(10[.][0-9]{1,8}/[^ ]*[\w])`)
)
func main() {
flag.Parse()
pp := parallel.NewProcessor(os.Stdin, os.Stdout, func(p []byte) ([]byte, error) {
var w skate.MinimalCitations
if err := json.Unmarshal(p, &w); err != nil {
return nil, err
}
ids := w.ParseIDList()
if ids.DOI == "" {
return nil, nil
}
match := patDOI.FindStringSubmatch(ids.DOI)
if len(match) == 0 {
return nil, nil
}
var (
doi = skate.SanitizeDOI(wsReplacer.Replace(match[0]))
pageTitle = strings.TrimSpace(w.PageTitle)
s = fmt.Sprintf("%s\t%s\t%s", doi, pageTitle, string(p))
)
return []byte(s), nil
})
pp.NumWorkers = *numWorkers
pp.BatchSize = *batchSize
if err := pp.Run(); err != nil {
log.Fatal(err)
}
}
|