]> granicus.if.org Git - pdns/commitdiff
rec api: add subtree option to the cache flush endpoint
authorCharles-Henri Bruyand <charles-henri.bruyand@open-xchange.com>
Wed, 2 May 2018 12:07:13 +0000 (14:07 +0200)
committerRemi Gacogne <remi.gacogne@powerdns.com>
Wed, 16 May 2018 09:30:48 +0000 (11:30 +0200)
(cherry picked from commit d19c22a15f8e75ebc6ff22665ba8e8c2152957db)

pdns/recursordist/docs/http-api/endpoint-cache.rst
pdns/ws-recursor.cc
regression-tests.api/runtests
regression-tests.api/runtests.py
regression-tests.api/test_Cache.py
regression-tests.api/test_helper.py

index 7a7848debca2542b2f4796a0b6cc07d6591590e2..4b642f32d53586c29074d01db1fbeaebf152c988 100644 (file)
@@ -7,6 +7,7 @@ Cache manipulation endpoint
 
   :query server_id: The name of the server
   :query domain: The domainname to flush for
+  :query subtree: If set to `true`, also flush the whole subtree (default=`false`)
 
   **Example Response:**
 
index 7b2c9bc85cea8e0a570f7afe27f74faf79dbac83..748441c9a25651437461e2bf13b30862229333a3 100644 (file)
@@ -373,10 +373,11 @@ static void apiServerCacheFlush(HttpRequest* req, HttpResponse* resp) {
     throw HttpMethodNotAllowedException();
 
   DNSName canon = apiNameToDNSName(req->getvars["domain"]);
+  bool subtree = (req->getvars["subtree"].compare("true") == 0);
 
-  int count = broadcastAccFunction<uint64_t>(boost::bind(pleaseWipeCache, canon, false));
-  count += broadcastAccFunction<uint64_t>(boost::bind(pleaseWipePacketCache, canon, false));
-  count += broadcastAccFunction<uint64_t>(boost::bind(pleaseWipeAndCountNegCache, canon, false));
+  int count = broadcastAccFunction<uint64_t>(boost::bind(pleaseWipeCache, canon, subtree));
+  count += broadcastAccFunction<uint64_t>(boost::bind(pleaseWipePacketCache, canon, subtree));
+  count += broadcastAccFunction<uint64_t>(boost::bind(pleaseWipeAndCountNegCache, canon, subtree));
   resp->setBody(Json::object {
     { "count", count },
     { "result", "Flushed cache." }
index beee380d3166cd7542cd6a592843c39002e94d2b..47f25f0f12c6a0838980bb8836613f1c5b435622 100755 (executable)
@@ -9,6 +9,8 @@ fi
 python -V
 pip install -r requirements.txt
 
+export SDIG=$(type -P sdig)
+
 set -e
 if [ "${PDNS_DEBUG}" = "YES" ]; then
   set -x
index 8435b5eee8021f49079f6737051d99e1a204cad2..b9586259edd854043fb4a624a4a18cff70787ff5 100755 (executable)
@@ -12,6 +12,7 @@ import time
 
 SQLITE_DB = 'pdns.sqlite3'
 WEBPORT = '5556'
+DNSPORT = 5555
 APIKEY = '1234567890abcdefghijklmnopq-key'
 PDNSUTIL_CMD = ["../pdns/pdnsutil", "--config-dir=."]
 
@@ -63,6 +64,12 @@ def ensure_empty_dir(name):
         shutil.rmtree(name)
     os.mkdir(name)
 
+def format_call_args(cmd):
+    return "$ '%s'" % ("' '".join(cmd))
+
+def run_check_call(cmd, *args, **kwargs):
+    print format_call_args(cmd)
+    subprocess.check_call(cmd, *args, **kwargs)
 
 wait = ('--wait' in sys.argv)
 if wait:
@@ -83,6 +90,14 @@ daemon = sys.argv[1]
 
 pdns_recursor = os.environ.get("PDNSRECURSOR", "../pdns/recursordist/pdns_recursor")
 
+# Take sdig if it exists (recursor in travis), otherwise build it from Authoritative source.
+sdig = os.environ.get("SDIG", "")
+if sdig:
+    sdig = os.path.abspath(sdig)
+if not sdig or not os.path.exists(sdig):
+    run_check_call(["make", "-C", "../pdns", "sdig"])
+    sdig = "../pdns/sdig"
+
 if daemon == 'authoritative':
 
     # Prepare sqlite DB with some zones.
@@ -155,6 +170,8 @@ test_env.update({
     'DAEMON': daemon,
     'SQLITE_DB': SQLITE_DB,
     'PDNSUTIL_CMD': ' '.join(PDNSUTIL_CMD),
+    'SDIG': sdig,
+    'DNSPORT': str(DNSPORT)
 })
 
 try:
index 6a3f618d51cf84686dafbeb5c83590009be2faf1..3ab4ea68c6891782b4d8169bc4f485f09738c6a1 100644 (file)
@@ -1,4 +1,4 @@
-from test_helper import ApiTestCase, is_auth, is_recursor
+from test_helper import ApiTestCase, is_auth, is_recursor, sdig
 
 
 class Servers(ApiTestCase):
@@ -9,6 +9,29 @@ class Servers(ApiTestCase):
         data = r.json()
         self.assertIn('count', data)
 
+    def test_flush_count(self):
+        sdig("ns1.example.com", 'A')
+        r = self.session.put(self.url("/api/v1/servers/localhost/cache/flush?domain=ns1.example.com."))
+        self.assert_success_json(r)
+        data = r.json()
+        self.assertIn('count', data)
+        self.assertEquals(1, data['count'])
+
+    def test_flush_subtree(self):
+        sdig("ns1.example.com", 'A')
+        sdig("ns2.example.com", 'A')
+        r = self.session.put(self.url("/api/v1/servers/localhost/cache/flush?domain=example.com.&subtree=false"))
+        self.assert_success_json(r)
+        data = r.json()
+        self.assertIn('count', data)
+        # Yes, this is 0 in 4.1.x, 1 in master, because in master we send a query for example.com "to create statistic data"
+        self.assertEquals(0, data['count'])
+        r = self.session.put(self.url("/api/v1/servers/localhost/cache/flush?domain=example.com.&subtree=true"))
+        self.assert_success_json(r)
+        data = r.json()
+        self.assertIn('count', data)
+        self.assertEquals(2, data['count'])
+
     def test_flush_root(self):
         r = self.session.put(self.url("/api/v1/servers/localhost/cache/flush?domain=."))
         self.assert_success_json(r)
index 14c2ea4c21bc289d1bca9cc21ac8a8c8feaddf79..46a80f831a7bcb377a7d9e2e7e1669a96f07149a 100644 (file)
@@ -9,7 +9,8 @@ import subprocess
 DAEMON = os.environ.get('DAEMON', 'authoritative')
 PDNSUTIL_CMD = os.environ.get('PDNSUTIL_CMD', 'NOT_SET BUT_THIS MIGHT_BE_A_LIST').split(' ')
 SQLITE_DB = os.environ.get('SQLITE_DB', 'pdns.sqlite3')
-
+SDIG = os.environ.get('SDIG', 'sdig')
+DNSPORT = os.environ.get('DNSPORT', '53')
 
 class ApiTestCase(unittest.TestCase):
 
@@ -77,3 +78,9 @@ def get_db_records(zonename, qtype):
 def pdnsutil_rectify(zonename):
     """Run pdnsutil rectify-zone on the given zone."""
     subprocess.check_call(PDNSUTIL_CMD + ['rectify-zone', zonename])
+
+def sdig(*args):
+    try:
+        return subprocess.check_call([SDIG, '127.0.0.1', str(DNSPORT)] + list(args))
+    except subprocess.CalledProcessError as except_inst:
+        raise RuntimeError("sdig %s %s failed: %s" % (command, args, except_inst.output.decode('ascii', errors='replace')))