RobotTest(7, doc, good, bad)
-class TestCase(unittest.TestCase):
- def runTest(self):
+class NetworkTestCase(unittest.TestCase):
+
+ def testPasswordProtectedSite(self):
support.requires('network')
# whole site is password-protected.
url = 'http://mueblesmoraleda.com'
parser.read()
self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
+ def testPythonOrg(self):
+ support.requires('network')
+ parser = urllib.robotparser.RobotFileParser(
+ "http://www.python.org/robots.txt")
+ parser.read()
+ self.assertTrue(parser.can_fetch("*",
+ "http://www.python.org/robots.txt"))
+
def test_main():
+ support.run_unittest(NetworkTestCase)
support.run_unittest(tests)
- TestCase().run()
if __name__=='__main__':
support.Verbose = 1
elif err.code >= 400:
self.allow_all = True
else:
- self.parse(f.read().splitlines())
+ raw = f.read()
+ self.parse(raw.decode("utf-8").splitlines())
def _add_entry(self, entry):
if "*" in entry.useragents:
return True
# search for given user agent matches
# the first match counts
- url = urllib.parse.quote(urllib.parse.urlparse(urllib.parse.unquote(url))[2]) or "/"
+ url = urllib.parse.quote(
+ urllib.parse.urlparse(urllib.parse.unquote(url))[2])
+ if not url:
+ url = "/"
for entry in self.entries:
if entry.applies_to(useragent):
return entry.allowance(url)