blob: 3ca41631b1016dff3c3dd2ddf7ed45561817dac1 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
|
package skate
import (
"regexp"
)
var (
patNonWordDomain = regexp.MustCompile(`(https?:\/\/)([^\w]*)(.*)`)
patRepeatedHttpSlashes = regexp.MustCompile(`(https?:\/\/)(\/)*(.*)`)
patHttpDOI = regexp.MustCompile(`(https?:\/\/)(10[.][0-9]{1,8}\/.*)`)
patAccessedOn = regexp.MustCompile(`(?iU)(.*)[.;]?(abgerufen|adresinden|sayfasındanulaşıl|accessedon|consultéle|consultad|diaksestanggal|diaksespadatanggal|lastaccessed|acesso|accessoem|accessed).*$`)
patFileExtraSuffix = regexp.MustCompile(`(http.*[.](zip|pdf|html|doc|docx|rar))(.*)$`)
)
// SanitizeURL applies various cleanup rules on URLs as found in references.
func SanitizeURL(s string) string {
// http://!!!:
// http://!
// http://"
s = patNonWordDomain.ReplaceAllString(s, `$1$3`)
// http:///en.m.wikipedia.org/ChenLong
s = patRepeatedHttpSlashes.ReplaceAllString(s, `$1$3`)
// http://10.1113/jphysiol.2002.026047
s = patHttpDOI.ReplaceAllString(s, `https://doi.org/$2`)
// http://10.3386/w20634https://doi.org/10.3386/w20634
// .diaksestanggal27-03-2017.10.30Wib
// accessedon15
// .Accessed
// Acessoem:10/09/2012
// .Acesso:11Abr
if patAccessedOn.MatchString(s) {
s = patAccessedOn.ReplaceAllString(s, `$1`)
}
// http://140.120.197.173/Ecology/Download/Timing-MSChart.zipJournalofInsectScience
if patFileExtraSuffix.MatchString(s) {
s = patFileExtraSuffix.ReplaceAllString(s, `$1`)
}
return s
}
|