diff --git a/robotstxt.go b/robotstxt.go index 8a597b7..359d877 100644 --- a/robotstxt.go +++ b/robotstxt.go @@ -147,16 +147,8 @@ func (r *RobotsData) TestAgent(path, agent string) bool { // Find a group of rules that applies to this agent // From Google's spec: // The user-agent is non-case-sensitive. - if g := r.FindGroup(agent); g != nil { - // Find a rule that applies to this url - if r := g.findRule(path); r != nil { - return r.allow - } - } - - // From Google's spec: - // By default, there are no restrictions for crawling for the designated crawlers. - return true + g := r.FindGroup(agent) + return g.Test(path) } // From Google's spec: @@ -195,7 +187,8 @@ func (g *Group) Test(path string) bool { return r.allow } - // When no rule applies, allow by default + // From Google's spec: + // By default, there are no restrictions for crawling for the designated crawlers. return true }