]> granicus.if.org Git - graphviz/commitdiff
extokens: fix missing NUL terminator append in GVPR tokenization
authorMatthew Fernandez <matthew.fernandez@gmail.com>
Sat, 16 Oct 2021 20:52:57 +0000 (13:52 -0700)
committerMatthew Fernandez <matthew.fernandez@gmail.com>
Tue, 19 Oct 2021 14:46:14 +0000 (07:46 -0700)
This is the second half of a bug fix following the prior commit.

Commit 971293551421455a0d939b9f8cea17356b7968f8 refactored this code to avoid
the use of an SFIO buffer, inadvertently introducing a bug. The change did not
account for the source buffer not being NUL terminated. This fix wicks closer to
the original code, not assuming a NUL terminator and copying a known number of
bytes into the destination.

Fixes #2138.

CHANGELOG.md
lib/expr/exeval.c
rtest/test_regression.py

index 70b437a85022ffedd6c571aa7c65a3b5bfebbde3..eede971563d7b01ff6610da5e136f1c0f2a92d0c 100644 (file)
@@ -6,6 +6,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
 
 ## [Unreleased (2.49.3)]
 
+### Fixed
+
+- gvpr "split", "tokens", and "index" functions produce incorrect results #2138.
+  This was a regression that occurred between 2.47.1 and 2.47.2.
+
 ## [2.49.2] – 2021-10-16
 
 ### Changed
index c5abbf9e39f58b9169ed9d671d771a8aa860e989..92557f6869cdc2757ff9b43283b690444ca870e6 100644 (file)
@@ -934,7 +934,8 @@ extokens(Expr_t * ex, Exnode_t * expr, void *env)
                if (tok == NULL) {
                        tok = exnospace();
                } else {
-                       strncpy(tok, str, sz + 1);
+                       memcpy(tok, str, sz);
+                       tok[sz] = '\0';
                }
                addItem (arr, v, tok);
                v.integer++;
index 2fb0098e763c6a807b298b40e0d8c9568d12e311..8b888c2660f9f9e427874ffe7dc837ef9bd28a12 100644 (file)
@@ -1251,7 +1251,6 @@ def test_2131():
 @pytest.mark.skipif(shutil.which("gvpr") is None,
                     reason="gvpr not available")
 @pytest.mark.parametrize("examine", ("indices", "tokens"))
-@pytest.mark.xfail(strict=True)
 def test_2138(examine: str):
   """
   gvpr splitting and tokenizing should not result in trailing garbage