mirror of
https://github.com/terorie/od-database-crawler.git
synced 2025-12-13 15:19:03 +00:00
Exclude dups in dir instead of keeping hashes of links
This commit is contained in:
8
crawl.go
8
crawl.go
@@ -155,14 +155,6 @@ func (f *File) HashDir(links []fasturl.URL) (o redblackhash.Key) {
|
||||
return
|
||||
}
|
||||
|
||||
func HashString(s string) (o redblackhash.Key) {
|
||||
h, _ := blake2b.New256(nil)
|
||||
h.Write([]byte(s))
|
||||
sum := h.Sum(nil)
|
||||
copy(o[:redblackhash.KeySize], sum)
|
||||
return
|
||||
}
|
||||
|
||||
func (f *File) applyContentLength(v string) {
|
||||
if v == "" { return }
|
||||
size, err := strconv.ParseInt(v, 10, 64)
|
||||
|
||||
Reference in New Issue
Block a user