1 Commits

Author SHA1 Message Date
Richard Patel
8cfada7904 kill perf 2018-11-06 01:44:09 +01:00
12 changed files with 1280 additions and 197 deletions

BIN
.github/stress.png vendored

Binary file not shown.

Before

Width:  |  Height:  |  Size: 369 KiB

View File

@@ -1,12 +1,2 @@
# oddb Go crawler 🚀
# oddb Go crawler
> by terorie 2018 :P
* Crawls HTTP open directories (standard Web Server Listings)
* Gets name, path, size and modification time of all files
* Soon: Will work as a crawler for [OD-Database](https://github.com/simon987/od-database)!
Stress test crawling [pandoradir](https://github.com/terorie/pandoradir)
on an average laptop (~10K requests per second, 4 connections):
![image](.github/stress.png)
Memory usage is being optimized :P

View File

@@ -13,7 +13,6 @@ var config struct {
Token string
Retries int
Workers int
Timeout time.Duration
Tasks int32
CrawlStats time.Duration
AllocStats time.Duration
@@ -26,7 +25,6 @@ const (
ConfTasks = "crawl.tasks"
ConfRetries = "crawl.retries"
ConfWorkers = "crawl.connections"
ConfTimeout = "crawl.timeout"
ConfCrawlStats = "output.crawl_stats"
ConfAllocStats = "output.resource_stats"
ConfVerbose = "output.verbose"
@@ -36,7 +34,6 @@ func prepareConfig() {
viper.SetDefault(ConfRetries, 5)
viper.SetDefault(ConfWorkers, 2)
viper.SetDefault(ConfTasks, 3)
viper.SetDefault(ConfTimeout, 10 * time.Second)
viper.SetDefault(ConfCrawlStats, 3 * time.Second)
viper.SetDefault(ConfAllocStats, 0)
viper.SetDefault(ConfVerbose, false)
@@ -76,8 +73,6 @@ func readConfig() {
configOOB(ConfTasks, int(config.Tasks))
}
config.Timeout = viper.GetDuration(ConfTimeout)
config.CrawlStats = viper.GetDuration(ConfCrawlStats)
config.AllocStats = viper.GetDuration(ConfAllocStats)

View File

@@ -24,5 +24,3 @@ crawl:
# How often to retry getting data
# from the site before giving up
retries: 5
# Time before discarding a network request
timeout: 10s

212
crawl.go
View File

@@ -3,10 +3,15 @@ package main
import (
"bytes"
"fmt"
"github.com/sirupsen/logrus"
"github.com/terorie/oddb-go/ds/redblackhash"
"github.com/terorie/oddb-go/fasturl"
"github.com/terorie/oddb-go/runes"
"github.com/terorie/oddb-go/runespath"
"github.com/valyala/fasthttp"
"golang.org/x/crypto/blake2b"
"golang.org/x/net/html"
"golang.org/x/net/html/atom"
"path"
"strconv"
"strings"
@@ -15,9 +20,9 @@ import (
var client fasthttp.Client
func GetDir(j *Job, f *File) (links []fasthttp.URI, err error) {
func GetDir(j *Job, f *File) (links []fasturl.URL, err error) {
f.IsDir = true
f.Name = path.Base(string(j.Uri.Path()))
f.Name = runespath.Base(j.Uri.Path)
req := fasthttp.AcquireRequest()
req.SetRequestURI(j.UriStr)
@@ -25,10 +30,13 @@ func GetDir(j *Job, f *File) (links []fasthttp.URI, err error) {
res := fasthttp.AcquireResponse()
defer fasthttp.ReleaseResponse(res)
err = client.DoTimeout(req, res, config.Timeout)
err = client.Do(req, res)
fasthttp.ReleaseRequest(req)
if err != nil { return }
if err != nil {
logrus.Error(err)
return
}
err = checkStatusCode(res.StatusCode())
if err != nil { return }
@@ -37,58 +45,65 @@ func GetDir(j *Job, f *File) (links []fasthttp.URI, err error) {
doc := html.NewTokenizer(bytes.NewReader(body))
var linkHref string
var linkTexts []string
for {
tokenType := doc.Next()
token := doc.Token()
if tokenType == html.ErrorToken {
break
}
switch tokenType {
case html.StartTagToken:
name, hasAttr := doc.TagName()
if len(name) == 1 && name[0] == 'a' {
for hasAttr {
var ks, vs []byte
ks, vs, hasAttr = doc.TagAttr()
if bytes.Equal(ks, []byte("href")) {
// TODO Check escape
linkHref = string(vs)
if token.DataAtom == atom.A {
for _, attr := range token.Attr {
if attr.Key == "href" {
linkHref = attr.Val
break
}
}
}
case html.TextToken:
if linkHref != "" {
linkTexts = append(linkTexts, token.Data)
}
case html.EndTagToken:
name, _ := doc.TagName()
if len(name) == 1 && name[0] == 'a' {
if linkHref != "" && token.DataAtom == atom.A {
// Copy params
href := linkHref
linkText := strings.Join(linkTexts, " ")
// Reset params
linkHref = ""
linkTexts = nil
if strings.LastIndexByte(href, '?') != -1 {
goto nextToken
// TODO Optimized decision tree
for _, entry := range urlBlackList {
if href == entry {
goto nextToken
}
}
for _, entry := range urlPartBlackList {
if strings.Contains(href, entry) {
goto nextToken
}
}
for _, entry := range fileNameBlackList {
if strings.Contains(linkText, entry) {
goto nextToken
}
}
switch href {
case "", " ", ".", "..", "/":
goto nextToken
}
if strings.Contains(href, "../") {
goto nextToken
}
var link fasthttp.URI
j.Uri.CopyTo(&link)
link.Update(href)
var link fasturl.URL
err = j.Uri.ParseRel(&link, []rune(href))
if err != nil { continue }
if !bytes.Equal(link.Scheme(), j.Uri.Scheme()) ||
!bytes.Equal(link.Host(), j.Uri.Host()) ||
bytes.Equal(link.Path(), j.Uri.Path()) ||
!bytes.HasPrefix(link.Path(), j.Uri.Path()) {
if !runes.Equals(link.Scheme, j.Uri.Scheme) ||
!runes.Equals(link.Host, j.Uri.Host) ||
runes.Equals(link.Path, j.Uri.Path) ||
!runes.HasPrefix(link.Path, j.Uri.Path) {
continue
}
@@ -102,12 +117,11 @@ func GetDir(j *Job, f *File) (links []fasthttp.URI, err error) {
return
}
func GetFile(u fasthttp.URI, f *File) (err error) {
func GetFile(u fasturl.URL, f *File) (err error) {
f.IsDir = false
cleanPath := path.Clean(string(u.Path()))
u.SetPath(cleanPath)
f.Name = path.Base(cleanPath)
f.Path = strings.Trim(cleanPath, "/")
u.Path = []rune(path.Clean(string(u.Path)))
f.Name = runespath.Base(u.Path)
f.Path = runes.TrimRune(u.Path, '/')
req := fasthttp.AcquireRequest()
req.Header.SetMethod("HEAD")
@@ -117,7 +131,7 @@ func GetFile(u fasthttp.URI, f *File) (err error) {
res.SkipBody = true
defer fasthttp.ReleaseResponse(res)
err = client.DoTimeout(req, res, config.Timeout)
err = client.Do(req, res)
fasthttp.ReleaseRequest(req)
if err != nil { return }
@@ -125,41 +139,83 @@ func GetFile(u fasthttp.URI, f *File) (err error) {
err = checkStatusCode(res.StatusCode())
if err != nil { return }
f.applyContentLength(string(res.Header.Peek("content-length")))
f.applyLastModified(string(res.Header.Peek("last-modified")))
// TODO Inefficient af
header := res.Header.Header()
f.ParseHeader(header)
return nil
}
func (f *File) HashDir(links []fasthttp.URI) (o redblackhash.Key) {
func (f *File) HashDir(links []fasturl.URL) (o redblackhash.Key) {
h, _ := blake2b.New256(nil)
h.Write([]byte(f.Name))
h.Write([]byte(string(f.Name)))
for _, link := range links {
h.Write(link.Path())
fileName := runespath.Base(link.Path)
h.Write([]byte(string(fileName)))
}
sum := h.Sum(nil)
copy(o[:redblackhash.KeySize], sum)
return
}
func (f *File) applyContentLength(v string) {
if v == "" { return }
size, err := strconv.ParseInt(v, 10, 64)
if err != nil { return }
if size < 0 { return }
f.Size = size
func (f *File) ParseHeader(h []byte) {
var k1, k2 int
var v1, v2 int
// Simple finite state machine
state := 0
for i, b := range h {
switch state {
case 0:
if b == byte(':') {
state = 1
k2 = i
}
case 1:
state = 2
case 2:
state = 3
v1 = i
case 3:
if b == byte('\r') {
state = 4
}
case 4:
state = 0
v2 = i - 1
key := string(h[k1:k2])
val := string(h[v1:v2])
k1 = i
f.applyHeader(key, val)
}
}
}
func (f *File) applyLastModified(v string) {
if v == "" { return }
var err error
f.MTime, err = time.Parse(time.RFC1123, v)
if err == nil { return }
f.MTime, err = time.Parse(time.RFC850, v)
if err == nil { return }
// TODO Parse asctime
f.MTime, err = time.Parse("2006-01-02", v[:10])
if err == nil { return }
func (f *File) applyHeader(k, v string) {
switch k {
case "content-length":
size, err := strconv.ParseInt(v, 10, 64)
if err != nil { break }
if size < 0 { break }
f.Size = size
case "last-modified":
var err error
f.MTime, err = time.Parse(time.RFC1123, v)
if err == nil { break }
f.MTime, err = time.Parse(time.RFC850, v)
if err == nil { break }
// TODO Parse asctime
f.MTime, err = time.Parse("2006-01-02", v[:10])
if err == nil { break }
}
}
func checkStatusCode(status int) error {
@@ -178,3 +234,41 @@ func checkStatusCode(status int) error {
return fmt.Errorf("got HTTP status %d", status)
}
}
var urlBlackList = [...]string {
"",
" ",
".",
"..",
"/",
}
var urlPartBlackList = [...]string {
"?C=N&O=D",
"?C=M&O=A",
"?C=S&O=A",
"?C=D&O=A",
"?C=N;O=D",
"?C=M;O=A",
"?C=M&O=D",
"?C=S;O=A",
"?C=S&O=D",
"?C=D;O=A",
"?MA",
"?SA",
"?DA",
"?ND",
"?C=N&O=A",
"?C=N&O=A",
"?M=A",
"?N=D",
"?S=A",
"?D=A",
}
var fileNameBlackList = [...]string {
"Parent Directory",
" Parent Directory",
"../",
}

920
fasturl/url.go Normal file
View File

@@ -0,0 +1,920 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package fasturl parses URLs and implements query escaping.
package fasturl
// Modifications by terorie
// See RFC 3986. This package generally follows RFC 3986, except where
// it deviates for compatibility reasons. When sending changes, first
// search old issues for history on decisions. Unit tests should also
// contain references to issue numbers with details.
import (
"errors"
"fmt"
"github.com/terorie/oddb-go/runes"
"strconv"
"strings"
)
// Error reports an error and the operation and URL that caused it.
type Error struct {
Op string
URL []rune
Err error
}
func (e *Error) Error() string { return e.Op + " " + string(e.URL) + ": " + e.Err.Error() }
type timeout interface {
Timeout() bool
}
func (e *Error) Timeout() bool {
t, ok := e.Err.(timeout)
return ok && t.Timeout()
}
type temporary interface {
Temporary() bool
}
func (e *Error) Temporary() bool {
t, ok := e.Err.(temporary)
return ok && t.Temporary()
}
func ishex(c byte) bool {
switch {
case '0' <= c && c <= '9':
return true
case 'a' <= c && c <= 'f':
return true
case 'A' <= c && c <= 'F':
return true
}
return false
}
func unhex(c byte) byte {
switch {
case '0' <= c && c <= '9':
return c - '0'
case 'a' <= c && c <= 'f':
return c - 'a' + 10
case 'A' <= c && c <= 'F':
return c - 'A' + 10
}
return 0
}
type encoding int
const (
encodePath encoding = 1 + iota
encodePathSegment
encodeHost
encodeZone
encodeUserPassword
encodeQueryComponent
encodeFragment
)
type EscapeError string
func (e EscapeError) Error() string {
return "invalid URL escape " + strconv.Quote(string(e))
}
type InvalidHostError string
func (e InvalidHostError) Error() string {
return "invalid character " + strconv.Quote(string(e)) + " in host name"
}
// Return true if the specified character should be escaped when
// appearing in a URL string, according to RFC 3986.
//
// Please be informed that for now shouldEscape does not check all
// reserved characters correctly. See golang.org/issue/5684.
func shouldEscape(c rune, mode encoding) bool {
// §2.3 Unreserved characters (alphanum)
if 'A' <= c && c <= 'Z' || 'a' <= c && c <= 'z' || '0' <= c && c <= '9' {
return false
}
if mode == encodeHost || mode == encodeZone {
// §3.2.2 Host allows
// sub-delims = "!" / "$" / "&" / "'" / "(" / ")" / "*" / "+" / "," / ";" / "="
// as part of reg-name.
// We add : because we include :port as part of host.
// We add [ ] because we include [ipv6]:port as part of host.
// We add < > because they're the only characters left that
// we could possibly allow, and Parse will reject them if we
// escape them (because hosts can't use %-encoding for
// ASCII bytes).
switch c {
case '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '[', ']', '<', '>', '"':
return false
}
}
switch c {
case '-', '_', '.', '~': // §2.3 Unreserved characters (mark)
return false
case '$', '&', '+', ',', '/', ':', ';', '=', '?', '@': // §2.2 Reserved characters (reserved)
// Different sections of the URL allow a few of
// the reserved characters to appear unescaped.
switch mode {
case encodePath: // §3.3
// The RFC allows : @ & = + $ but saves / ; , for assigning
// meaning to individual path segments. This package
// only manipulates the path as a whole, so we allow those
// last three as well. That leaves only ? to escape.
return c == '?'
case encodePathSegment: // §3.3
// The RFC allows : @ & = + $ but saves / ; , for assigning
// meaning to individual path segments.
return c == '/' || c == ';' || c == ',' || c == '?'
case encodeUserPassword: // §3.2.1
// The RFC allows ';', ':', '&', '=', '+', '$', and ',' in
// userinfo, so we must escape only '@', '/', and '?'.
// The parsing of userinfo treats ':' as special so we must escape
// that too.
return c == '@' || c == '/' || c == '?' || c == ':'
case encodeQueryComponent: // §3.4
// The RFC reserves (so we must escape) everything.
return true
case encodeFragment: // §4.1
// The RFC text is silent but the grammar allows
// everything, so escape nothing.
return false
}
}
if mode == encodeFragment {
// RFC 3986 §2.2 allows not escaping sub-delims. A subset of sub-delims are
// included in reserved from RFC 2396 §2.2. The remaining sub-delims do not
// need to be escaped. To minimize potential breakage, we apply two restrictions:
// (1) we always escape sub-delims outside of the fragment, and (2) we always
// escape single quote to avoid breaking callers that had previously assumed that
// single quotes would be escaped. See issue #19917.
switch c {
case '!', '(', ')', '*':
return false
}
}
// Everything else must be escaped.
return true
}
// QueryUnescape does the inverse transformation of QueryEscape,
// converting each 3-byte encoded substring of the form "%AB" into the
// hex-decoded byte 0xAB.
// It returns an error if any % is not followed by two hexadecimal
// digits.
func QueryUnescape(s []rune) ([]rune, error) {
return unescape(s, encodeQueryComponent)
}
// PathUnescape does the inverse transformation of PathEscape,
// converting each 3-byte encoded substring of the form "%AB" into the
// hex-decoded byte 0xAB. It returns an error if any % is not followed
// by two hexadecimal digits.
//
// PathUnescape is identical to QueryUnescape except that it does not
// unescape '+' to ' ' (space).
func PathUnescape(s []rune) ([]rune, error) {
return unescape(s, encodePathSegment)
}
// unescape unescapes a string; the mode specifies
// which section of the URL string is being unescaped.
func unescape(s []rune, mode encoding) ([]rune, error) {
// Count %, check that they're well-formed.
n := 0
hasPlus := false
for i := 0; i < len(s); {
switch s[i] {
case '%':
n++
if i+2 >= len(s) || !ishex(byte(s[i+1])) || !ishex(byte(s[i+2])) {
s = s[i:]
if len(s) > 3 {
s = s[:3]
}
return nil, EscapeError(s)
}
// Per https://tools.ietf.org/html/rfc3986#page-21
// in the host component %-encoding can only be used
// for non-ASCII bytes.
// But https://tools.ietf.org/html/rfc6874#section-2
// introduces %25 being allowed to escape a percent sign
// in IPv6 scoped-address literals. Yay.
if mode == encodeHost && unhex(byte(s[i+1])) < 8 && !runes.Equals(s[i:i+3], []rune("%25")) {
return nil, EscapeError(s[i : i+3])
}
if mode == encodeZone {
// RFC 6874 says basically "anything goes" for zone identifiers
// and that even non-ASCII can be redundantly escaped,
// but it seems prudent to restrict %-escaped bytes here to those
// that are valid host name bytes in their unescaped form.
// That is, you can use escaping in the zone identifier but not
// to introduce bytes you couldn't just write directly.
// But Windows puts spaces here! Yay.
v := unhex(byte(s[i+1]))<<4 | unhex(byte(s[i+2]))
if !runes.Equals(s[i:i+3], []rune("%25")) && v != ' ' && shouldEscape(rune(v), encodeHost) {
return nil, EscapeError(s[i : i+3])
}
}
i += 3
case '+':
hasPlus = mode == encodeQueryComponent
i++
default:
if (mode == encodeHost || mode == encodeZone) && s[i] < 0x80 && shouldEscape(s[i], mode) {
return nil, InvalidHostError(s[i : i+1])
}
i++
}
}
if n == 0 && !hasPlus {
return s, nil
}
t := make([]byte, len(s)-2*n)
j := 0
for i := 0; i < len(s); {
switch s[i] {
case '%':
t[j] = unhex(byte(s[i+1]))<<4 | unhex(byte(s[i+2]))
j++
i += 3
case '+':
if mode == encodeQueryComponent {
t[j] = ' '
} else {
t[j] = '+'
}
j++
i++
default:
t[j] = byte(s[i])
j++
i++
}
}
return []rune(string(t)), nil
}
// QueryEscape escapes the string so it can be safely placed
// inside a URL query.
func QueryEscape(s []rune) []rune {
return escape(s, encodeQueryComponent)
}
// PathEscape escapes the string so it can be safely placed
// inside a URL path segment.
func PathEscape(s []rune) []rune {
return escape(s, encodePathSegment)
}
func escape(s []rune, mode encoding) []rune {
spaceCount, hexCount := 0, 0
for i := 0; i < len(s); i++ {
c := s[i]
if shouldEscape(c, mode) {
if c == ' ' && mode == encodeQueryComponent {
spaceCount++
} else {
hexCount++
}
}
}
if spaceCount == 0 && hexCount == 0 {
return s
}
t := make([]byte, len(s)+2*hexCount)
j := 0
for i := 0; i < len(s); i++ {
switch c := s[i]; {
case c == ' ' && mode == encodeQueryComponent:
t[j] = '+'
j++
case shouldEscape(c, mode):
t[j] = '%'
t[j+1] = "0123456789ABCDEF"[c>>4]
t[j+2] = "0123456789ABCDEF"[c&15]
j += 3
default:
t[j] = byte(s[i])
j++
}
}
return []rune(string(t))
}
// A URL represents a parsed URL (technically, a URI reference).
//
// The general form represented is:
//
// [scheme:][//[userinfo@]host][/]path[?query][#fragment]
//
// URLs that do not start with a slash after the scheme are interpreted as:
//
// scheme:opaque[?query][#fragment]
//
// Note that the Path field is stored in decoded form: /%47%6f%2f becomes /Go/.
// A consequence is that it is impossible to tell which slashes in the Path were
// slashes in the raw URL and which were %2f. This distinction is rarely important,
// but when it is, code must not use Path directly.
// The Parse function sets both Path and RawPath in the URL it returns,
// and URL's String method uses RawPath if it is a valid encoding of Path,
// by calling the EscapedPath method.
type URL struct {
Scheme []rune
Opaque []rune // encoded opaque data
Host []rune // host or host:port
Path []rune // path (relative paths may omit leading slash)
RawPath []rune // encoded path hint (see EscapedPath method)
ForceQuery bool // append a query ('?') even if RawQuery is empty
RawQuery []rune // encoded query values, without '?'
}
// Maybe rawurl is of the form scheme:path.
// (Scheme must be [a-zA-Z][a-zA-Z0-9+-.]*)
// If so, return scheme, path; else return "", rawurl.
func getscheme(rawurl []rune) (scheme []rune, path []rune, err error) {
for i := 0; i < len(rawurl); i++ {
c := rawurl[i]
switch {
case 'a' <= c && c <= 'z' || 'A' <= c && c <= 'Z':
// do nothing
case '0' <= c && c <= '9' || c == '+' || c == '-' || c == '.':
if i == 0 {
return nil, rawurl, nil
}
case c == ':':
if i == 0 {
return nil, nil, errors.New("missing protocol scheme")
}
scheme = rawurl[:i]
path = rawurl[i+1:]
return
default:
// we have encountered an invalid character,
// so there is no valid scheme
return nil, rawurl, nil
}
}
return nil, rawurl, nil
}
// Maybe s is of the form t c u.
// If so, return t, c u (or t, u if cutc == true).
// If not, return s, "".
func split(s []rune, c rune, cutc bool) ([]rune, []rune) {
i := strings.Index(string(s), string(c)) // TODO Optimize
if i < 0 {
return s, nil
}
if cutc {
return s[:i], s[i+1:]
}
return s[:i], s[i:]
}
// Parse parses rawurl into a URL structure.
//
// The rawurl may be relative (a path, without a host) or absolute
// (starting with a scheme). Trying to parse a hostname and path
// without a scheme is invalid but may not necessarily return an
// error, due to parsing ambiguities.
func (u *URL) Parse(rawurl []rune) error {
// Cut off #frag
s, frag := split(rawurl, '#', true)
err := u.parse(s, false)
if err != nil {
return &Error{"parse", s, err}
}
if len(frag) == 0 {
return nil
}
return nil
}
// ParseRequestURI parses rawurl into a URL structure. It assumes that
// rawurl was received in an HTTP request, so the rawurl is interpreted
// only as an absolute URI or an absolute path.
// The string rawurl is assumed not to have a #fragment suffix.
// (Web browsers strip #fragment before sending the URL to a web server.)
func (u *URL) ParseRequestURI(rawurl []rune) error {
err := u.parse(rawurl, true)
if err != nil {
return &Error{"parse", rawurl, err}
}
return nil
}
// parse parses a URL from a string in one of two contexts. If
// viaRequest is true, the URL is assumed to have arrived via an HTTP request,
// in which case only absolute URLs or path-absolute relative URLs are allowed.
// If viaRequest is false, all forms of relative URLs are allowed.
func (u *URL) parse(rawurl []rune, viaRequest bool) error {
var rest []rune
var err error
if len(rawurl) == 0 && viaRequest {
return errors.New("empty url")
}
if runes.Equals(rawurl, []rune("*")) {
u.Path = []rune("*")
return nil
}
// Split off possible leading "http:", "mailto:", etc.
// Cannot contain escaped characters.
if u.Scheme, rest, err = getscheme(rawurl); err != nil {
return err
}
if runes.HasSuffix(rest, []rune("?")) && runes.Count(rest, '?') == 1 {
u.ForceQuery = true
rest = rest[:len(rest)-1]
} else {
rest, u.RawQuery = split(rest, '?', true)
}
if !runes.HasPrefix(rest, []rune("/")) {
if len(u.Scheme) != 0 {
// We consider rootless paths per RFC 3986 as opaque.
u.Opaque = rest
return nil
}
if viaRequest {
return errors.New("invalid URI for request")
}
// Avoid confusion with malformed schemes, like cache_object:foo/bar.
// See golang.org/issue/16822.
//
// RFC 3986, §3.3:
// In addition, a URI reference (Section 4.1) may be a relative-path reference,
// in which case the first path segment cannot contain a colon (":") character.
colon := runes.IndexRune(rest, ':')
slash := runes.IndexRune(rest, '/')
if colon >= 0 && (slash < 0 || colon < slash) {
// First path segment has colon. Not allowed in relative URL.
return errors.New("first path segment in URL cannot contain colon")
}
}
if (len(u.Scheme) != 0 || !viaRequest && !runes.HasPrefix(rest, []rune("///"))) && runes.HasPrefix(rest, []rune("//")) {
var authority []rune
authority, rest = split(rest[2:], '/', false)
u.Host, err = parseAuthority(authority)
if err != nil {
return err
}
}
// Set Path and, optionally, RawPath.
// RawPath is a hint of the encoding of Path. We don't want to set it if
// the default escaping of Path is equivalent, to help make sure that people
// don't rely on it in general.
if err := u.setPath(rest); err != nil {
return err
}
return nil
}
func parseAuthority(authority []rune) (host []rune, err error) {
i := runes.LastIndexRune(authority, '@')
if i < 0 {
host, err = parseHost(authority)
} else {
host, err = parseHost(authority[i+1:])
}
if err != nil {
return nil, err
}
if i < 0 {
return host, nil
}
userinfo := authority[:i]
if !validUserinfo(userinfo) {
return nil, errors.New("fasturl: invalid userinfo")
}
return host, nil
}
// parseHost parses host as an authority without user
// information. That is, as host[:port].
func parseHost(host []rune) ([]rune, error) {
if runes.HasPrefix(host, []rune("[")) {
// Parse an IP-Literal in RFC 3986 and RFC 6874.
// E.g., "[fe80::1]", "[fe80::1%25en0]", "[fe80::1]:80".
i := runes.LastIndexRune(host, ']')
if i < 0 {
return nil, errors.New("missing ']' in host")
}
colonPort := host[i+1:]
if !validOptionalPort(colonPort) {
return nil, fmt.Errorf("invalid port %q after host", colonPort)
}
// RFC 6874 defines that %25 (%-encoded percent) introduces
// the zone identifier, and the zone identifier can use basically
// any %-encoding it likes. That's different from the host, which
// can only %-encode non-ASCII bytes.
// We do impose some restrictions on the zone, to avoid stupidity
// like newlines.
zone := strings.Index(string(host[:i]), "%25")
if zone >= 0 {
host1, err := unescape(host[:zone], encodeHost)
if err != nil {
return nil, err
}
host2, err := unescape(host[zone:i], encodeZone)
if err != nil {
return nil, err
}
host3, err := unescape(host[i:], encodeHost)
if err != nil {
return nil, err
}
// TODO Optimize
return runes.Create(host1, host2, host3), nil
}
}
var err error
if host, err = unescape(host, encodeHost); err != nil {
return nil, err
}
return host, nil
}
// setPath sets the Path and RawPath fields of the URL based on the provided
// escaped path p. It maintains the invariant that RawPath is only specified
// when it differs from the default encoding of the path.
// For example:
// - setPath("/foo/bar") will set Path="/foo/bar" and RawPath=""
// - setPath("/foo%2fbar") will set Path="/foo/bar" and RawPath="/foo%2fbar"
// setPath will return an error only if the provided path contains an invalid
// escaping.
func (u *URL) setPath(p []rune) error {
path, err := unescape(p, encodePath)
if err != nil {
return err
}
u.Path = path
if escp := escape(path, encodePath); runes.Equals(p, escp) {
// Default encoding is fine.
u.RawPath = nil
} else {
u.RawPath = p
}
return nil
}
// EscapedPath returns the escaped form of u.Path.
// In general there are multiple possible escaped forms of any path.
// EscapedPath returns u.RawPath when it is a valid escaping of u.Path.
// Otherwise EscapedPath ignores u.RawPath and computes an escaped
// form on its own.
// The String and RequestURI methods use EscapedPath to construct
// their results.
// In general, code should call EscapedPath instead of
// reading u.RawPath directly.
func (u *URL) EscapedPath() []rune {
if len(u.RawPath) != 0 && validEncodedPath(u.RawPath) {
p, err := unescape(u.RawPath, encodePath)
if err == nil && runes.Equals(p, u.Path) {
return u.RawPath
}
}
if runes.Equals(u.Path, []rune("*")) {
return []rune("*") // don't escape (Issue 11202)
}
return escape(u.Path, encodePath)
}
// validEncodedPath reports whether s is a valid encoded path.
// It must not contain any bytes that require escaping during path encoding.
func validEncodedPath(s []rune) bool {
for i := 0; i < len(s); i++ {
// RFC 3986, Appendix A.
// pchar = unreserved / pct-encoded / sub-delims / ":" / "@".
// shouldEscape is not quite compliant with the RFC,
// so we check the sub-delims ourselves and let
// shouldEscape handle the others.
switch s[i] {
case '!', '$', '&', '\'', '(', ')', '*', '+', ',', ';', '=', ':', '@':
// ok
case '[', ']':
// ok - not specified in RFC 3986 but left alone by modern browsers
case '%':
// ok - percent encoded, will decode
default:
if shouldEscape(s[i], encodePath) {
return false
}
}
}
return true
}
// validOptionalPort reports whether port is either an empty string
// or matches /^:\d*$/
func validOptionalPort(port []rune) bool {
if len(port) == 0 {
return true
}
if port[0] != ':' {
return false
}
for _, b := range port[1:] {
if b < '0' || b > '9' {
return false
}
}
return true
}
func (u *URL) Runes() (buf []rune) {
if len(u.Scheme) != 0 {
buf = append(buf, u.Scheme...)
buf = append(buf, ':')
}
if len(u.Opaque) != 0 {
buf = append(buf, u.Opaque...)
} else {
if len(u.Scheme) != 0 || len(u.Host) != 0 {
if len(u.Host) != 0 || len(u.Path) != 0 {
buf = append(buf, '/', '/')
}
if h := u.Host; len(h) != 0 {
buf = append(buf, escape(h, encodeHost)...)
}
}
path := u.EscapedPath()
if len(path) != 0 && path[0] != '/' && len(u.Host) != 0 {
buf = append(buf, '/')
}
if len(buf) == 0 {
// RFC 3986 §4.2
// A path segment that contains a colon character (e.g., "this:that")
// cannot be used as the first segment of a relative-path reference, as
// it would be mistaken for a scheme name. Such a segment must be
// preceded by a dot-segment (e.g., "./this:that") to make a relative-
// path reference.
if i := runes.IndexRune(path, ':'); i > -1 && runes.IndexRune(path[:i], '/') == -1 {
buf = append(buf, '.', '/')
}
}
buf = append(buf, path...)
}
if u.ForceQuery || len(u.RawQuery) != 0 {
buf = append(buf, '?')
buf = append(buf, u.RawQuery...)
}
return
}
// String reassembles the URL into a valid URL string.
// The general form of the result is one of:
//
// scheme:opaque?query#fragment
// scheme://userinfo@host/path?query#fragment
//
// If u.Opaque is non-empty, String uses the first form;
// otherwise it uses the second form.
// To obtain the path, String uses u.EscapedPath().
//
// In the second form, the following rules apply:
// - if u.Scheme is empty, scheme: is omitted.
// - if u.User is nil, userinfo@ is omitted.
// - if u.Host is empty, host/ is omitted.
// - if u.Scheme and u.Host are empty and u.User is nil,
// the entire scheme://userinfo@host/ is omitted.
// - if u.Host is non-empty and u.Path begins with a /,
// the form host/path does not add its own /.
// - if u.RawQuery is empty, ?query is omitted.
// - if u.Fragment is empty, #fragment is omitted.
func (u *URL) String() string {
return string(u.Runes())
}
// resolvePath applies special path segments from refs and applies
// them to base, per RFC 3986.
func resolvePath(base, ref []rune) []rune {
var full []rune
if len(ref) == 0 {
full = base
} else if ref[0] != '/' {
// TODO Optimize
i := strings.LastIndex(string(base), "/")
full = runes.Create(base[:i+1], ref)
} else {
full = ref
}
if len(full) == 0 {
return nil
}
var dst []string
// TODO Optimize
src := strings.Split(string(full), "/")
for _, elem := range src {
switch elem {
case ".":
// drop
case "..":
if len(dst) > 0 {
dst = dst[:len(dst)-1]
}
default:
dst = append(dst, elem)
}
}
if last := src[len(src)-1]; last == "." || last == ".." {
// Add final slash to the joined path.
dst = append(dst, "") // TODO Wtf?
}
// TODO Optimize
return []rune("/" + strings.TrimPrefix(strings.Join(dst, "/"), "/"))
}
// IsAbs reports whether the URL is absolute.
// Absolute means that it has a non-empty scheme.
func (u *URL) IsAbs() bool {
return len(u.Scheme) != 0
}
// ParseRel parses a URL in the context of the receiver. The provided URL
// may be relative or absolute. Parse returns nil, err on parse
// failure, otherwise its return value is the same as ResolveReference.
func (u *URL) ParseRel(out *URL, ref []rune) error {
var refurl URL
err := refurl.Parse(ref)
if err != nil {
return err
}
u.ResolveReference(out, &refurl)
return nil
}
// ResolveReference resolves a URI reference to an absolute URI from
// an absolute base URI u, per RFC 3986 Section 5.2. The URI reference
// may be relative or absolute. ResolveReference always returns a new
// URL instance, even if the returned URL is identical to either the
// base or reference. If ref is an absolute URL, then ResolveReference
// ignores base and returns a copy of ref.
func (u *URL) ResolveReference(url *URL, ref *URL) {
*url = *ref
if len(ref.Scheme) == 0 {
url.Scheme = u.Scheme
}
if len(ref.Scheme) != 0 || len(ref.Host) != 0 {
// The "absoluteURI" or "net_path" cases.
// We can ignore the error from setPath since we know we provided a
// validly-escaped path.
url.setPath(resolvePath(ref.EscapedPath(), nil))
return
}
if len(ref.Opaque) != 0 {
url.Host = nil
url.Path = nil
return
}
if len(ref.Path) == 0 && len(ref.RawQuery) == 0 {
url.RawQuery = u.RawQuery
}
// The "abs_path" or "rel_path" cases.
url.Host = u.Host
url.setPath(resolvePath(u.EscapedPath(), ref.EscapedPath()))
return
}
// RequestURI returns the encoded path?query or opaque?query
// string that would be used in an HTTP request for u.
func (u *URL) RequestURI() []rune {
result := u.Opaque
if len(result) == 0 {
result = u.EscapedPath()
if len(result) == 0 {
result = []rune("/")
}
} else {
if runes.HasPrefix(result, []rune("//")) {
result = runes.Create(u.Scheme, []rune(":"), result)
}
}
if u.ForceQuery || len(u.RawQuery) != 0 {
result = append(result, '?')
result = append(result, u.RawQuery...)
}
return result
}
// Hostname returns u.Host, without any port number.
//
// If Host is an IPv6 literal with a port number, Hostname returns the
// IPv6 literal without the square brackets. IPv6 literals may include
// a zone identifier.
func (u *URL) Hostname() []rune {
return stripPort(u.Host)
}
// Port returns the port part of u.Host, without the leading colon.
// If u.Host doesn't contain a port, Port returns an empty string.
func (u *URL) Port() []rune {
return portOnly(u.Host)
}
func stripPort(hostport []rune) []rune {
colon := runes.IndexRune(hostport, ':')
if colon == -1 {
return hostport
}
if i := runes.IndexRune(hostport, ']'); i != -1 {
return runes.TrimPrefix(hostport[:i], []rune("["))
}
return hostport[:colon]
}
func portOnly(hostport []rune) []rune {
colon := runes.IndexRune(hostport, ':')
if colon == -1 {
return nil
}
// TODO Optimize
if i := strings.Index(string(hostport), "]:"); i != -1 {
return hostport[i+len("]:"):]
}
if strings.Contains(string(hostport), "]") {
return nil
}
return hostport[colon+len(":"):]
}
// Marshaling interface implementations.
// Would like to implement MarshalText/UnmarshalText but that will change the JSON representation of URLs.
func (u *URL) MarshalBinary() (text []byte, err error) {
return []byte(u.String()), nil
}
func (u *URL) UnmarshalBinary(text []byte) error {
var u1 URL
err := u1.Parse([]rune(string(text)))
if err != nil {
return err
}
*u = u1
return nil
}
// validUserinfo reports whether s is a valid userinfo string per RFC 3986
// Section 3.2.1:
// userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
// unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
// sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
// / "*" / "+" / "," / ";" / "="
//
// It doesn't validate pct-encoded. The caller does that via func unescape.
func validUserinfo(s []rune) bool {
for _, r := range s {
if 'A' <= r && r <= 'Z' {
continue
}
if 'a' <= r && r <= 'z' {
continue
}
if '0' <= r && r <= '9' {
continue
}
switch r {
case '-', '.', '_', ':', '~', '!', '$', '&', '\'',
'(', ')', '*', '+', ',', ';', '=', '%', '@':
continue
default:
return false
}
}
return true
}

36
main.go
View File

@@ -3,8 +3,9 @@ package main
import (
"context"
"github.com/sirupsen/logrus"
"github.com/terorie/oddb-go/fasturl"
"github.com/terorie/oddb-go/runes"
"github.com/urfave/cli"
"github.com/valyala/fasthttp"
"log"
"net/http"
_ "net/http/pprof"
@@ -16,7 +17,7 @@ import (
var app = cli.App {
Name: "oddb-go",
Usage: "OD-Database Go crawler",
Version: "0.2",
Version: "0.1",
BashComplete: cli.DefaultAppComplete,
Writer: os.Stdout,
Compiled: buildDate,
@@ -50,24 +51,21 @@ func cmdCrawler(clic *cli.Context) error {
args := clic.Args()
remotes := make([]*OD, len(args))
for i, arg := range args {
for i, argStr := range args {
// https://github.com/golang/go/issues/19779
if !strings.Contains(arg, "://") {
arg = "http://" + arg
if !strings.Contains(argStr, "://") {
argStr = "http://" + argStr
}
var u fasthttp.URI
u.Parse(nil, []byte(arg))
uPath := string(u.Path())
if !strings.HasSuffix(uPath, "/") {
u.SetPath(uPath + "/")
}
remotes[i] = &OD {
Task: &Task{
WebsiteId: 0,
Url: u.String(),
},
BaseUri: u,
arg := []rune(argStr)
var u fasturl.URL
err := u.Parse(arg)
if !runes.HasSuffix(u.Path, []rune("/")) {
u.Path = append(u.Path, '/')
}
if err != nil { return err }
remotes[i] = &OD{ BaseUri: u }
}
c := context.Background()
@@ -89,6 +87,6 @@ func cmdCrawler(clic *cli.Context) error {
}
var buildDate = time.Date(
2018, 11, 15,
23, 24, 0, 0,
2018, 10, 28,
17, 10, 0, 0,
time.UTC)

View File

@@ -2,23 +2,23 @@ package main
import (
"github.com/terorie/oddb-go/ds/redblackhash"
"github.com/valyala/fasthttp"
"github.com/terorie/oddb-go/fasturl"
"sync"
"time"
)
type Job struct {
OD *OD
Uri fasthttp.URI
Uri fasturl.URL
UriStr string
Fails int
LastError error
}
type OD struct {
Task *Task
Wait sync.WaitGroup
BaseUri fasthttp.URI
BaseUri fasturl.URL
Files []File
WCtx WorkerContext
Scanned redblackhash.Tree
@@ -26,10 +26,10 @@ type OD struct {
}
type File struct {
Name string `json:"name"`
Name []rune `json:"name"`
Size int64 `json:"size"`
MTime time.Time `json:"mtime"`
Path string `json:"path"`
Path []rune `json:"path"`
IsDir bool `json:"-"`
}

98
runes/runes.go Normal file
View File

@@ -0,0 +1,98 @@
package runes
func Create(rs ...[]rune) (x []rune) {
for _, r := range rs {
x = append(x, r...)
}
return x
}
func IndexRune(s []rune, r rune) int {
for i, sr := range s {
if r == sr {
return i
}
}
return -1
}
func LastIndexRune(s []rune, r rune) int {
for i := len(s)-1; i >= 0; i-- {
sr := s[i]
if r == sr {
return i
}
}
return -1
}
func Equals(a, b []rune) bool {
if len(a) != len(b) {
return false
}
for i := 0; i < len(a); i++ {
if a[i] != b[i] {
return false
}
}
return true
}
func Count(s []rune, r rune) (n int) {
for _, sr := range s {
if sr == r {
n++
}
}
return
}
func HasPrefix(s, prefix []rune) bool {
return len(s) >= len(prefix) && Equals(s[0:len(prefix)], prefix)
}
func HasSuffix(s, suffix []rune) bool {
return len(s) >= len(suffix) && Equals(s[len(s)-len(suffix):], suffix)
}
// TrimPrefix returns s without the provided leading prefix string.
// If s doesn't start with prefix, s is returned unchanged.
func TrimPrefix(s, prefix []rune) []rune {
if HasPrefix(s, prefix) {
return s[len(prefix):]
}
return s
}
func TrimRune(s []rune, r rune) (out []rune) {
if len(s) == 0 {
return nil
}
var i = 0
var sr rune
// Trim prefix
for i, sr = range s {
if sr == r {
out = s[i:]
} else {
break
}
}
s = out
if len(s) == 0 {
return nil
}
// Trim suffix
for i := len(s)-1; i >= 0; i++ {
if s[i] == r {
out = s[:i]
} else {
break
}
}
return out
}

44
runespath/path.go Normal file
View File

@@ -0,0 +1,44 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package path implements utility routines for manipulating slash-separated
// paths.
//
// The path package should only be used for paths separated by forward
// slashes, such as the paths in URLs. This package does not deal with
// Windows paths with drive letters or backslashes; to manipulate
// operating system paths, use the path/filepath package.
package runespath
import (
"github.com/terorie/oddb-go/runes"
)
// Base returns the last element of path.
// Trailing slashes are removed before extracting the last element.
// If the path is empty, Base returns ".".
// If the path consists entirely of slashes, Base returns "/".
func Base(path []rune) []rune {
if len(path) == 0 {
return []rune(".")
}
// Strip trailing slashes.
for len(path) > 0 && path[len(path)-1] == '/' {
path = path[0 : len(path)-1]
}
// Find the last element
if i := runes.LastIndexRune(path, '/'); i >= 0 {
path = path[i+1:]
}
// If empty now, it had only slashes.
if len(path) == 0 {
return []rune("/")
}
return path
}
// IsAbs reports whether the path is absolute.
func IsAbs(path string) bool {
return len(path) > 0 && path[0] == '/'
}

View File

@@ -2,11 +2,7 @@ package main
import (
"context"
"encoding/json"
"fmt"
"github.com/sirupsen/logrus"
"os"
"path"
"sync/atomic"
)
@@ -16,36 +12,38 @@ var totalBuffered int64
func Schedule(c context.Context, remotes <-chan *OD) {
go Stats(c)
for remote := range remotes {
logrus.WithField("url", remote.BaseUri.String()).
Info("Starting crawler")
for {
select {
case <-c.Done():
return
// Collect results
results := make(chan File)
case remote := <-remotes:
logrus.WithField("url", remote.BaseUri.String()).
Info("Starting crawler")
// Spawn workers
remote.WCtx.in, remote.WCtx.out = makeJobBuffer(c)
for i := 0; i < config.Workers; i++ {
go remote.WCtx.Worker(results)
// Spawn workers
remote.WCtx.in, remote.WCtx.out = makeJobBuffer(c)
for i := 0; i < config.Workers; i++ {
go remote.WCtx.Worker()
}
// Enqueue initial job
atomic.AddInt32(&activeTasks, 1)
remote.WCtx.queueJob(Job{
OD: remote,
Uri: remote.BaseUri,
UriStr: remote.BaseUri.String(),
Fails: 0,
})
globalWait.Done()
// Upload result when ready
go remote.Watch()
}
// Enqueue initial job
atomic.AddInt32(&activeTasks, 1)
remote.WCtx.queueJob(Job{
OD: remote,
Uri: remote.BaseUri,
UriStr: remote.BaseUri.String(),
Fails: 0,
})
// Upload result when ready
go remote.Watch(results)
}
}
func (r *OD) Watch(results chan File) {
go r.Task.Collect(results)
func (r *OD) Watch() {
// Wait for all jobs on remote to finish
r.Wait.Wait()
close(r.WCtx.in)
@@ -53,42 +51,6 @@ func (r *OD) Watch(results chan File) {
logrus.WithField("url", r.BaseUri.String()).
Info("Crawler finished")
globalWait.Done()
close(results)
}
func (t *Task) Collect(results chan File) {
err := t.collect(results)
if err != nil {
logrus.WithError(err).
Error("Failed saving crawl results")
}
}
func (t *Task) collect(results chan File) error {
err := os.MkdirAll("crawled", 0755)
if err != nil { return err }
f, err := os.OpenFile(
path.Join("crawled", fmt.Sprintf("%d.json", t.WebsiteId)),
os.O_CREATE | os.O_WRONLY | os.O_TRUNC,
0755,
)
if err != nil { return err }
defer f.Close()
for result := range results {
resJson, err := json.Marshal(result)
if err != nil { panic(err) }
_, err = f.Write(resJson)
if err != nil { return err }
_, err = f.Write([]byte{'\n'})
if err != nil { return err }
}
return nil
}
func makeJobBuffer(c context.Context) (chan<- Job, <-chan Job) {

View File

@@ -1,10 +1,8 @@
package main
import (
"bytes"
"github.com/sirupsen/logrus"
"math"
"sort"
"sync"
"sync/atomic"
"time"
@@ -19,13 +17,13 @@ type WorkerContext struct {
numRateLimits int
}
func (w WorkerContext) Worker(results chan<- File) {
func (w WorkerContext) Worker() {
for job := range w.out {
w.step(results, job)
w.step(job)
}
}
func (w WorkerContext) step(results chan<- File, job Job) {
func (w WorkerContext) step(job Job) {
defer w.finishJob(&job)
var f File
@@ -64,15 +62,12 @@ func (w WorkerContext) step(results chan<- File, job Job) {
w.queueJob(job)
}
if !f.IsDir {
results <- f
}
job.OD.Files = append(job.OD.Files, f)
}
func DoJob(job *Job, f *File) (newJobs []Job, err error) {
uriPath := job.Uri.Path()
if len(uriPath) == 0 { return }
if uriPath[len(uriPath)-1] == '/' {
if len(job.Uri.Path) == 0 { return }
if job.Uri.Path[len(job.Uri.Path)-1] == '/' {
// Load directory
links, err := GetDir(job, f)
if err != nil {
@@ -90,36 +85,23 @@ func DoJob(job *Job, f *File) (newJobs []Job, err error) {
return nil, ErrKnown
}
// Sort by path
sort.Slice(links, func(i, j int) bool {
return bytes.Compare(links[i].Path(), links[j].Path()) < 0
})
var newJobCount int
var lastLink string
for _, link := range links {
uriStr := link.String()
// Ignore dupes
if uriStr == lastLink {
continue
}
lastLink = uriStr
// Skip already queued links
//if _, old := job.OD.Scanned.LoadOrStore(link, true); old {
// continue
//}
job.OD.Wait.Add(1)
newJobs = append(newJobs, Job{
OD: job.OD,
Uri: link,
UriStr: uriStr,
UriStr: link.String(),
Fails: 0,
})
newJobCount++
}
if config.Verbose {
logrus.WithFields(logrus.Fields{
"url": job.UriStr,
"files": newJobCount,
"files": len(links),
}).Debug("Listed")
}
} else {
@@ -127,7 +109,7 @@ func DoJob(job *Job, f *File) (newJobs []Job, err error) {
err := GetFile(job.Uri, f)
if err != nil {
logrus.WithError(err).
WithField("url", job.UriStr).
WithField("url", job.Uri.String()).
Error("Failed getting file")
return nil, err
}
@@ -137,6 +119,7 @@ func DoJob(job *Job, f *File) (newJobs []Job, err error) {
func (w WorkerContext) queueJob(job Job) {
job.OD.Wait.Add(1)
globalWait.Add(1)
if w.numRateLimits > 0 {
if time.Since(w.lastRateLimit) > 5 * time.Second {
@@ -153,4 +136,5 @@ func (w WorkerContext) queueJob(job Job) {
func (w WorkerContext) finishJob(job *Job) {
job.OD.Wait.Done()
globalWait.Done()
}