crawler: respect robots.txt
This commit is contained in:
parent
eb6769c904
commit
d6bc032d24
2 changed files with 40 additions and 9 deletions
|
@ -9,17 +9,20 @@ import (
|
||||||
"net/url"
|
"net/url"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
//"github.com/temoto/robotstxt" // TODO
|
"github.com/temoto/robotstxt"
|
||||||
"golang.org/x/net/html"
|
"golang.org/x/net/html"
|
||||||
|
|
||||||
"git.sr.ht/~sircmpwn/searchhut/database"
|
"git.sr.ht/~sircmpwn/searchhut/database"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const USER_AGENT = "SearchHut Bot 0.0; https://sr.ht/~sircmpwn/searchhut"
|
||||||
|
|
||||||
type Crawler struct {
|
type Crawler struct {
|
||||||
Client *http.Client
|
Client *http.Client
|
||||||
Domain string
|
Domain string
|
||||||
DomainID int
|
DomainID int
|
||||||
Delay time.Duration
|
Delay time.Duration
|
||||||
|
Robots *robotstxt.Group
|
||||||
|
|
||||||
db *sql.DB
|
db *sql.DB
|
||||||
seen map[string]struct{}
|
seen map[string]struct{}
|
||||||
|
@ -41,7 +44,9 @@ func NewCrawler(db *sql.DB, domain string) *Crawler {
|
||||||
Client: client,
|
Client: client,
|
||||||
Domain: domain,
|
Domain: domain,
|
||||||
DomainID: domainID,
|
DomainID: domainID,
|
||||||
Delay: 1 * time.Second, // TODO: Increase me
|
// TODO: Dynamic crawl delay based on remote performance
|
||||||
|
Delay: 2 * time.Second,
|
||||||
|
Robots: nil,
|
||||||
|
|
||||||
db: db,
|
db: db,
|
||||||
seen: make(map[string]struct{}),
|
seen: make(map[string]struct{}),
|
||||||
|
@ -51,29 +56,44 @@ func NewCrawler(db *sql.DB, domain string) *Crawler {
|
||||||
func (c *Crawler) Crawl() {
|
func (c *Crawler) Crawl() {
|
||||||
log.Printf("Indexing %s (domain %d)", c.Domain, c.DomainID)
|
log.Printf("Indexing %s (domain %d)", c.Domain, c.DomainID)
|
||||||
ctx := database.Context(context.Background(), c.db)
|
ctx := database.Context(context.Background(), c.db)
|
||||||
url, err := url.Parse(fmt.Sprintf("https://%s", c.Domain))
|
|
||||||
|
url, err := url.Parse(fmt.Sprintf("https://%s/robots.txt", c.Domain))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
resp, err := c.Get(ctx, url)
|
||||||
|
robots, err := robotstxt.FromResponse(resp)
|
||||||
|
resp.Body.Close()
|
||||||
|
if err == nil {
|
||||||
|
log.Println("Found applicable robots.txt")
|
||||||
|
c.Robots = robots.FindGroup(USER_AGENT)
|
||||||
|
}
|
||||||
|
if c.Robots != nil && c.Robots.CrawlDelay != 0 {
|
||||||
|
c.Delay = c.Robots.CrawlDelay
|
||||||
|
}
|
||||||
|
|
||||||
|
url, err = url.Parse(fmt.Sprintf("https://%s", c.Domain))
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
c.Schedule(url)
|
c.Schedule(url)
|
||||||
|
|
||||||
for len(c.schedule) != 0 {
|
for len(c.schedule) != 0 {
|
||||||
next := c.schedule[0]
|
next := c.schedule[0]
|
||||||
c.schedule = c.schedule[1:]
|
c.schedule = c.schedule[1:]
|
||||||
if err := c.Index(ctx, next); err != nil {
|
if err := c.Index(ctx, next); err != nil {
|
||||||
log.Fatal(err)
|
log.Println(err)
|
||||||
}
|
}
|
||||||
time.Sleep(c.Delay)
|
time.Sleep(c.Delay)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Crawler) Get(ctx context.Context, url *url.URL) (*http.Response, error) {
|
func (c *Crawler) Get(ctx context.Context, url *url.URL) (*http.Response, error) {
|
||||||
req, err := http.NewRequestWithContext(ctx,
|
req, err := http.NewRequestWithContext(ctx, "GET", url.String(), http.NoBody)
|
||||||
"GET", url.String(), http.NoBody)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
ua := "SearchHut Bot 0.0; https://sr.ht/~sircmpwn/searchhut"
|
req.Header.Add("User-Agent", USER_AGENT)
|
||||||
req.Header.Add("User-Agent", ua)
|
|
||||||
return c.Client.Do(req)
|
return c.Client.Do(req)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -81,6 +101,9 @@ func (c *Crawler) Schedule(url *url.URL) {
|
||||||
if url.User != nil || url.Host != c.Domain || url.Scheme != "https" {
|
if url.User != nil || url.Host != c.Domain || url.Scheme != "https" {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
if c.Robots != nil && !c.Robots.Test(url.Path) {
|
||||||
|
return
|
||||||
|
}
|
||||||
trimmed := *url
|
trimmed := *url
|
||||||
trimmed.RawQuery = ""
|
trimmed.RawQuery = ""
|
||||||
trimmed.Fragment = ""
|
trimmed.Fragment = ""
|
||||||
|
|
|
@ -17,7 +17,15 @@ func (c *Crawler) Index(ctx context.Context, url *url.URL) error {
|
||||||
log.Println(url.String())
|
log.Println(url.String())
|
||||||
|
|
||||||
resp, err := c.Get(ctx, url)
|
resp, err := c.Get(ctx, url)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
defer resp.Body.Close()
|
defer resp.Body.Close()
|
||||||
|
if resp.StatusCode != 200 {
|
||||||
|
log.Printf("Unexpected status code %d", resp.StatusCode)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
contentType := resp.Header.Get("Content-Type")
|
contentType := resp.Header.Get("Content-Type")
|
||||||
if contentType == "" {
|
if contentType == "" {
|
||||||
return nil
|
return nil
|
||||||
|
|
Loading…
Reference in a new issue