]> granicus.if.org Git - python/commitdiff
bpo-35922: Fix RobotFileParser when robots.txt has no relevant crawl delay or request...
authorRémi Lapeyre <remi.lapeyre@henki.fr>
Sun, 16 Jun 2019 06:48:57 +0000 (08:48 +0200)
committerTal Einat <taleinat@gmail.com>
Sun, 16 Jun 2019 06:48:57 +0000 (09:48 +0300)
Co-Authored-By: Tal Einat <taleinat+github@gmail.com>
Lib/test/test_robotparser.py
Lib/urllib/robotparser.py
Misc/NEWS.d/next/Library/2019-06-11-19-34-29.bpo-35922.rxpzWr.rst [new file with mode: 0644]

index 84a267ad9567ee7c991918581958d0dbbdbf4392..77cd7c4d29dfe6f5f12ee3bc0578967db759a12d 100644 (file)
@@ -97,30 +97,38 @@ Disallow: /
 
 
 class BaseRequestRateTest(BaseRobotTest):
+    request_rate = None
+    crawl_delay = None
 
     def test_request_rate(self):
+        parser = self.parser
         for url in self.good + self.bad:
             agent, url = self.get_agent_and_url(url)
             with self.subTest(url=url, agent=agent):
-                if self.crawl_delay:
-                    self.assertEqual(
-                        self.parser.crawl_delay(agent), self.crawl_delay
-                    )
-                if self.request_rate:
+                self.assertEqual(parser.crawl_delay(agent), self.crawl_delay)
+
+                parsed_request_rate = parser.request_rate(agent)
+                self.assertEqual(parsed_request_rate, self.request_rate)
+                if self.request_rate is not None:
                     self.assertIsInstance(
-                        self.parser.request_rate(agent),
+                        parsed_request_rate,
                         urllib.robotparser.RequestRate
                     )
                     self.assertEqual(
-                        self.parser.request_rate(agent).requests,
+                        parsed_request_rate.requests,
                         self.request_rate.requests
                     )
                     self.assertEqual(
-                        self.parser.request_rate(agent).seconds,
+                        parsed_request_rate.seconds,
                         self.request_rate.seconds
                     )
 
 
+class EmptyFileTest(BaseRequestRateTest, unittest.TestCase):
+    robots_txt = ''
+    good = ['/foo']
+
+
 class CrawlDelayAndRequestRateTest(BaseRequestRateTest, unittest.TestCase):
     robots_txt = """\
 User-agent: figtree
@@ -141,10 +149,6 @@ Disallow: /%7ejoe/index.html
 
 class DifferentAgentTest(CrawlDelayAndRequestRateTest):
     agent = 'FigTree Robot libwww-perl/5.04'
-    # these are not actually tested, but we still need to parse it
-    # in order to accommodate the input parameters
-    request_rate = None
-    crawl_delay = None
 
 
 class InvalidRequestRateTest(BaseRobotTest, unittest.TestCase):
index 7089916a4f81cc0bc9158aa68d51be7c9ba5726b..c58565e39451461f03dc7d9fb652554c96615f00 100644 (file)
@@ -186,7 +186,9 @@ class RobotFileParser:
         for entry in self.entries:
             if entry.applies_to(useragent):
                 return entry.delay
-        return self.default_entry.delay
+        if self.default_entry:
+            return self.default_entry.delay
+        return None
 
     def request_rate(self, useragent):
         if not self.mtime():
@@ -194,7 +196,9 @@ class RobotFileParser:
         for entry in self.entries:
             if entry.applies_to(useragent):
                 return entry.req_rate
-        return self.default_entry.req_rate
+        if self.default_entry:
+            return self.default_entry.req_rate
+        return None
 
     def site_maps(self):
         if not self.sitemaps:
diff --git a/Misc/NEWS.d/next/Library/2019-06-11-19-34-29.bpo-35922.rxpzWr.rst b/Misc/NEWS.d/next/Library/2019-06-11-19-34-29.bpo-35922.rxpzWr.rst
new file mode 100644 (file)
index 0000000..5271a49
--- /dev/null
@@ -0,0 +1,4 @@
+Fix :meth:`RobotFileParser.crawl_delay` and
+:meth:`RobotFileParser.request_rate` to return ``None`` rather than
+raise :exc:`AttributeError` when no relevant rule is defined in the
+robots.txt file.  Patch by Rémi Lapeyre.