]> granicus.if.org Git - esp-idf/commitdiff
nvs_util: Add support for old and new multipage blob
authorShivani Tipnis <shivani@espressif.com>
Wed, 12 Sep 2018 12:17:24 +0000 (17:47 +0530)
committerShivani Tipnis <shivani@espressif.com>
Wed, 19 Sep 2018 16:30:08 +0000 (22:00 +0530)
components/nvs_flash/README.rst
components/nvs_flash/nvs_partition_generator/README.rst
components/nvs_flash/nvs_partition_generator/nvs_partition_gen.py
components/nvs_flash/nvs_partition_generator/sample_multipage_blob.csv [moved from components/nvs_flash/nvs_partition_generator/sample.csv with 88% similarity]
components/nvs_flash/nvs_partition_generator/sample_singlepage_blob.csv [new file with mode: 0644]
components/nvs_flash/nvs_partition_generator/testdata/sample_multipage_blob.bin [moved from components/nvs_flash/nvs_partition_generator/testdata/sample.bin with 100% similarity]
components/nvs_flash/nvs_partition_generator/testdata/sample_singlepage_blob.bin [new file with mode: 0644]
components/nvs_flash/test_nvs_host/test_nvs.cpp

index 7225ba52b4215f92c8534b541b328c24fd0c417c..2352236f8f7842fbb6a8f10367d6862b7c14a273 100644 (file)
@@ -147,6 +147,8 @@ Erased (2'b00)
     A key-value pair in this entry has been discarded. Contents of this entry will not be parsed anymore.
 
 
+.. _structure_of_entry:
+
 Structure of entry
 ^^^^^^^^^^^^^^^^^^
 
index 2dacb79a4ac59c687f3c83e39787c9729826beaf..34314da4a89bde794f7f793d67da83f0db27b252 100644 (file)
@@ -46,21 +46,58 @@ When a new namespace entry is encountered in the CSV file, each follow-up entrie
 
 .. note:: First entry in a CSV file should always be ``namespace`` entry.
 
+Multipage Blob Support
+----------------------
+
+By default, binary blobs are allowed to span over multiple pages and written in the format mentioned in section :ref:`structure_of_entry`. 
+If older format is intended to be used, the utility provides an option to disable this feature.
+
 Running the utility
 -------------------
 
-You can run the utility using below command::
+You can run the utility in two modes using below command:
+    - Multipage Blob Support Enabled (v2)
+    - Multipage Blob Support Disabled (v1)
+
+
+*Usage*::
 
-       python nvs_partition_gen.py [-h] input output size
+    python nvs_partition_gen.py [--version {v1,v2}] input output
 
 
 Positional arguments:
 
-| Arguments                                    | Description
-|      ---                                                     |       ---
-|  input                               | Path to CSV file to parse. Will use stdin if omitted (a sample.csv is provided)
-|  output                              | Path to output converted binary file. Will use stdout if omitted
-|  size                            | Size of NVS Partition in KB. E.g. 12KB
++------------------------+----------------------------------------------------------------------------------------------+
+|   Arguments            |                                     Description                                              |
++========================+==============================================================================================+
+| input                  |  Path to CSV file to parse. Will use stdin if omitted (sample files are provided)            |
++------------------------+----------------------------------------------------------------------------------------------+
+| output                 |  Path to output converted binary file. Will use stdout if omitted                            |
++------------------------+----------------------------------------------------------------------------------------------+
+
+Optional arguments:
+
++-------------------------------+---------------------------------------------------------------------------------------+
+|   Arguments                   |                                     Description                                       |
++===============================+=======================================================================================+
+| --version {v1,v2}             |  Set version. Default: v2                                                             |
++-------------------------------+---------------------------------------------------------------------------------------+
+
+
+*Multipage Blob Support Enabled Mode:*
+
+You can run the utility in this mode by setting the version parameter to v2, as shown below.
+A sample CSV file is provided with the utility::
+
+    python nvs_partition_gen.py sample_multipage_blob.csv partition_multipage_blob.bin --version v2
+
+
+*Multipage Blob Support Disabled Mode:*
+
+You can run the utility in this mode by setting the version parameter to v1, as shown below.
+A sample CSV file is provided with the utility::
+
+    python nvs_partition_gen.py sample_singlepage_blob.csv partition_single_page.bin --version v1
 
 
 Caveats
index 389e8d944332a3018345afa58b713c859d04ce75..8d78b908e72c1188452bdc03ccec3ea10cfc154f 100755 (executable)
@@ -32,7 +32,8 @@ from os import path
 class Page(object):
     PAGE_PARAMS = {
         "max_size": 4096,
-        "max_blob_size": 4000,
+        "max_old_blob_size": 1984,
+        "max_new_blob_size": 4000,
         "max_entries": 126
     }
 
@@ -57,10 +58,13 @@ class Page(object):
     CHUNK_ANY = 0xFF
     ACTIVE = 0xFFFFFFFE
     FULL = 0xFFFFFFFC
+    VERSION1=0xFF
+    VERSION2=0xFE
 
     def __init__(self, page_num, is_rsrv_page=False):
         self.entry_num = 0
         self.bitmap_array = array.array('B')
+        self.version = Page.VERSION2
         self.page_buf = bytearray(b'\xff')*Page.PAGE_PARAMS["max_size"]
         if not is_rsrv_page:
             self.bitmap_array = self.create_bitmap_array()
@@ -75,6 +79,11 @@ class Page(object):
         page_header[0:4] = struct.pack('<I', page_state_active_seq)
         # set page sequence number
         page_header[4:8] = struct.pack('<I', page_num)
+        # set version
+        if version == Page.VERSION2:
+            page_header[8] = Page.VERSION2
+        elif version == Page.VERSION1:
+            page_header[8] = Page.VERSION1
         # set header's CRC
         crc_data = page_header[4:28]
         crc = zlib.crc32(buffer(crc_data), 0xFFFFFFFF)
@@ -116,7 +125,7 @@ class Page(object):
         entry_struct[4:8] = struct.pack('<I', crc & 0xFFFFFFFF)
         return entry_struct
 
-    def write_varlen_binary_data(self, entry_struct, ns_index, key, data, data_size, total_entry_count, nvs_obj):
+    def write_varlen_binary_data(self, entry_struct, ns_index, key, data, data_size, total_entry_count,nvs_obj):
         chunk_start = 0
         chunk_count = 0
         chunk_index = Page.CHUNK_ANY
@@ -204,10 +213,22 @@ class Page(object):
                 self.write_entry_to_buf(entry_struct, 1)
                 break
 
+        return entry_struct
 
 
-        return entry_struct
+    def write_single_page_entry(self, entry_struct, data, datalen, data_entry_count):
+        # compute CRC of data
+        entry_struct[24:26] = struct.pack('<H', datalen)
+        crc = zlib.crc32(data, 0xFFFFFFFF)
+        entry_struct[28:32] = struct.pack('<I', crc & 0xFFFFFFFF)
 
+        # compute crc of entry header
+        entry_struct = self.set_crc_header(entry_struct)
+
+        # write entry header
+        self.write_entry_to_buf(entry_struct, 1)
+        # write actual data
+        self.write_entry_to_buf(data, data_entry_count)
 
 
     """
@@ -218,10 +239,15 @@ class Page(object):
         # Set size of data
         datalen = len(data)
 
-        if encoding == "string":
-            if datalen > Page.PAGE_PARAMS["max_blob_size"]:
+        if version == Page.VERSION1:
+            if datalen > Page.PAGE_PARAMS["max_old_blob_size"]:
                 raise InputError("%s: Size exceeds max allowed length." % key)
 
+        if version == Page.VERSION2:
+            if encoding == "string":
+                if datalen > Page.PAGE_PARAMS["max_new_blob_size"]:
+                    raise InputError("%s: Size exceeds max allowed length." % key)
+
         # Calculate no. of entries data will require
         rounded_size = (datalen + 31) & ~31
         data_entry_count = rounded_size / 32
@@ -237,11 +263,14 @@ class Page(object):
         # Set Namespace Index
         entry_struct[0] = ns_index
         # Set Span
-        if encoding == "string":
+        if version == Page.VERSION2:
+            if encoding == "string":
+                entry_struct[2] = data_entry_count + 1
+            # Set Chunk Index
+            chunk_index = Page.CHUNK_ANY
+            entry_struct[3] = chunk_index
+        else:
             entry_struct[2] = data_entry_count + 1
-        # Set Chunk Index
-        chunk_index = Page.CHUNK_ANY
-        entry_struct[3] = chunk_index
 
         # set key
         key_array = bytearray('\x00')*16
@@ -254,22 +283,12 @@ class Page(object):
         elif encoding in ["hex2bin", "binary", "base64"]:
             entry_struct[1] = Page.BLOB
 
-        if encoding == "binary" or encoding == "hex2bin" or encoding == "base64":
-            entry_struct = self.write_varlen_binary_data(entry_struct,ns_index,key,data,\
-            datalen,total_entry_count,nvs_obj)
+        if version == Page.VERSION2 and encoding == "binary" or encoding == "hex2bin" or encoding == "base64":
+                entry_struct = self.write_varlen_binary_data(entry_struct,ns_index,key,data,\
+                datalen,total_entry_count, nvs_obj)
         else:
-            # compute CRC of data
-            entry_struct[24:26] = struct.pack('<H', datalen)
-            crc = zlib.crc32(data, 0xFFFFFFFF)
-            entry_struct[28:32] = struct.pack('<I', crc & 0xFFFFFFFF)
+            self.write_single_page_entry(entry_struct, data, datalen, data_entry_count)
 
-            # compute crc of entry header
-            entry_struct = self.set_crc_header(entry_struct)
-
-            # write entry header
-            self.write_entry_to_buf(entry_struct, 1)
-            # write actual data
-            self.write_entry_to_buf(data, data_entry_count)
 
 
     """ Low-level function to write data of primitive type into page buffer. """
@@ -343,7 +362,7 @@ class NVS(object):
                 except InsufficientSizeError:
                     self.size = None
                     # Creating the last reserved page
-                    self.create_new_page(True)
+                    self.create_new_page(is_rsrv_page=True)
                     break
 
             result = self.get_binary_data()
@@ -357,6 +376,7 @@ class NVS(object):
             self.size = self.size - Page.PAGE_PARAMS["max_size"]
         self.page_num += 1
         new_page = Page(self.page_num, is_rsrv_page)
+        new_page.version = version
         self.pages.append(new_page)
         self.cur_page = new_page
         return new_page
@@ -400,7 +420,7 @@ class NVS(object):
                 self.cur_page.write_varlen_data(key, value, encoding, self.namespace_idx, self)
             except PageFullError:
                 new_page = self.create_new_page()
-                new_page.write_varlen_data(key, value, encoding, self.namespace_idx, self)
+                new_page.write_varlen_data(key, value, encoding, self.namespace_idx,  self)
                 pass
         elif encoding in primitive_encodings:
             try:
@@ -482,7 +502,7 @@ def nvs_close(nvs_instance):
     """
     nvs_instance.__exit__(None, None, None)
 
-def nvs_part_gen(input_filename=None, output_filename=None, input_size=None):
+def nvs_part_gen(input_filename=None, output_filename=None, input_size=None, version_no=None):
     """ Wrapper to generate nvs partition binary
 
     :param input_filename: Name of input file containing data
@@ -490,9 +510,20 @@ def nvs_part_gen(input_filename=None, output_filename=None, input_size=None):
     :param input_size: Size of partition
     :return: None
     """
+    global version
+    version = version_no
+
+    # Set size
+    input_size = int(input_size.split('KB')[0]) * 1024
+
     if input_size % 4096 !=0:
         sys.exit("Size parameter should be a multiple of 4KB.")
 
+    if version == 'v1':
+        version = Page.VERSION1
+    elif version == 'v2':
+        version = Page.VERSION2
+    
     # Update size as a page needs to be reserved of size 4KB
     input_size = input_size - Page.PAGE_PARAMS["max_size"]
 
@@ -503,8 +534,6 @@ def nvs_part_gen(input_filename=None, output_filename=None, input_size=None):
     output_file = open(output_filename, 'wb')
 
     with nvs_open(output_file, input_size) as nvs_obj:
-        # Update size as one page is created
-        #nvs_obj.size = input_size - Page.PAGE_PARAMS["max_size"]
         reader = csv.DictReader(input_file, delimiter=',')
         for row in reader:
             try:
@@ -513,7 +542,7 @@ def nvs_part_gen(input_filename=None, output_filename=None, input_size=None):
                 print(e)
                 input_file.close()
                 output_file.close()
-                exit(-2)
+                sys.exit(-2)
 
     input_file.close()
     output_file.close()
@@ -534,13 +563,20 @@ def main():
             "size",
             help='Size of NVS Partition in KB. Eg. 12KB')
 
+    parser.add_argument(
+            "--version",
+            help='Set version. Default: v2',
+            choices=['v1','v2'],
+            default='v2')
+
+
     args = parser.parse_args()
     input_filename = args.input
     output_filename = args.output
+    input_size = args.size
+    version_no = args.version
 
-    # Set size
-    input_size = int(args.size.split('KB')[0]) * 1024
-    nvs_part_gen(input_filename, output_filename, input_size)
+    nvs_part_gen(input_filename, output_filename, input_size, version_no)
 
 
 
similarity index 88%
rename from components/nvs_flash/nvs_partition_generator/sample.csv
rename to components/nvs_flash/nvs_partition_generator/sample_multipage_blob.csv
index bc70aa14c762a7a2583289eb3aa4d960b4bf24ca..353430a9322b7ad2fa87f3486eb389ad60dd48c5 100644 (file)
@@ -11,4 +11,4 @@ dummyBase64Key,data,base64,MTIzYWJj
 hexFileKey,file,hex2bin,testdata/sample.hex
 base64FileKey,file,base64,testdata/sample.base64
 stringFileKey,file,string,testdata/sample.txt
-binFileKey,file,binary,testdata/sample.bin
+binFileKey,file,binary,testdata/sample_multipage_blob.bin
diff --git a/components/nvs_flash/nvs_partition_generator/sample_singlepage_blob.csv b/components/nvs_flash/nvs_partition_generator/sample_singlepage_blob.csv
new file mode 100644 (file)
index 0000000..1ae3524
--- /dev/null
@@ -0,0 +1,14 @@
+key,type,encoding,value
+dummyNamespace,namespace,,
+dummyU8Key,data,u8,127
+dummyI8Key,data,i8,-128
+dummyU16Key,data,u16,32768
+dummyU32Key,data,u32,4294967295
+dummyI32Key,data,i32,-2147483648
+dummyStringKey,data,string,0A:0B:0C:0D:0E:0F
+dummyHex2BinKey,data,hex2bin,010203abcdef
+dummyBase64Key,data,base64,MTIzYWJj
+hexFileKey,file,hex2bin,testdata/sample.hex
+base64FileKey,file,base64,testdata/sample.base64
+stringFileKey,file,string,testdata/sample.txt
+binFileKey,file,binary,testdata/sample_singlepage_blob.bin
diff --git a/components/nvs_flash/nvs_partition_generator/testdata/sample_singlepage_blob.bin b/components/nvs_flash/nvs_partition_generator/testdata/sample_singlepage_blob.bin
new file mode 100644 (file)
index 0000000..f607291
--- /dev/null
@@ -0,0 +1 @@
+start0000000000000000000000start0123456789abcdef00000000000000000123456789abcdef00000000000000000123456789abcdef00000000000000000123456789abcdef00000000000000000123456789abcdef00000000000000000123456789abcdef0000000000000000
index 6c59f8fa0e344f8f7533be980b60f749415af666..32172cecd45058ea457956cf32a39d6832ff5dc0 100644 (file)
@@ -1991,15 +1991,17 @@ TEST_CASE("Recovery from power-off during modification of blob present in old-fo
 /* Add new tests above */
 /* This test has to be the final one */
 
-TEST_CASE("check partition generation utility", "[nvs_part_gen]")
+TEST_CASE("check partition generation utility with multipage blob support disabled", "[nvs_part_gen]")
 {
     int childpid = fork();
     if (childpid == 0) {
         exit(execlp("python", "python",
                 "../nvs_partition_generator/nvs_partition_gen.py",
-                "../nvs_partition_generator/sample.csv",
-                "../nvs_partition_generator/partition.bin", 
-                "12KB",NULL));
+                "../nvs_partition_generator/sample_singlepage_blob.csv",
+                "../nvs_partition_generator/partition_single_page.bin", 
+                "12KB",
+                "--version",
+                "v1",NULL));
     } else {
         CHECK(childpid > 0);
         int status;
@@ -2008,9 +2010,9 @@ TEST_CASE("check partition generation utility", "[nvs_part_gen]")
     }
 }
 
-TEST_CASE("read data from partition generated via partition generation utility", "[nvs_part_gen]")
+TEST_CASE("read data from partition generated via partition generation utility with multipage blob support disabled", "[nvs_part_gen]")
 {
-    SpiFlashEmulator emu("../nvs_partition_generator/partition.bin");
+    SpiFlashEmulator emu("../nvs_partition_generator/partition_single_page.bin");
     nvs_handle handle;
     TEST_ESP_OK( nvs_flash_init_custom("test", 0, 3) );
     TEST_ESP_OK( nvs_open_from_partition("test", "dummyNamespace", NVS_READONLY, &handle));
@@ -2059,7 +2061,7 @@ TEST_CASE("read data from partition generated via partition generation utility",
     size_t bin_len = sizeof(bin_data);
     char binfiledata[5200];
     ifstream file;
-    file.open("../nvs_partition_generator/testdata/sample.bin");
+    file.open("../nvs_partition_generator/testdata/sample_singlepage_blob.bin");
     file.read(binfiledata,5200);
     TEST_ESP_OK( nvs_get_blob(handle, "binFileKey", bin_data, &bin_len));
     CHECK(memcmp(bin_data, binfiledata, bin_len) == 0);
@@ -2068,6 +2070,86 @@ TEST_CASE("read data from partition generated via partition generation utility",
 
 }
 
+TEST_CASE("check partition generation utility with multipage blob support enabled", "[nvs_part_gen]")
+{
+    int childpid = fork();
+    if (childpid == 0) {
+        exit(execlp("python", "python",
+                "../nvs_partition_generator/nvs_partition_gen.py",
+                "../nvs_partition_generator/sample_multipage_blob.csv",
+                "../nvs_partition_generator/partition_multipage_blob.bin", 
+                "12KB",
+                "--version",
+                "v2",NULL));
+    } else {
+        CHECK(childpid > 0);
+        int status;
+        waitpid(childpid, &status, 0);
+        CHECK(WEXITSTATUS(status) != -1);
+    }
+}
+
+TEST_CASE("read data from partition generated via partition generation utility with multipage blob support enabled", "[nvs_part_gen]")
+{
+    SpiFlashEmulator emu("../nvs_partition_generator/partition_multipage_blob.bin");
+    nvs_handle handle;
+    TEST_ESP_OK( nvs_flash_init_custom("test", 0, 3) );
+    TEST_ESP_OK( nvs_open_from_partition("test", "dummyNamespace", NVS_READONLY, &handle));
+    uint8_t u8v;
+    TEST_ESP_OK( nvs_get_u8(handle, "dummyU8Key", &u8v));
+    CHECK(u8v == 127);
+    int8_t i8v;
+    TEST_ESP_OK( nvs_get_i8(handle, "dummyI8Key", &i8v));
+    CHECK(i8v == -128);
+    uint16_t u16v;
+    TEST_ESP_OK( nvs_get_u16(handle, "dummyU16Key", &u16v));
+    CHECK(u16v == 32768);
+    uint32_t u32v;
+    TEST_ESP_OK( nvs_get_u32(handle, "dummyU32Key", &u32v));
+    CHECK(u32v == 4294967295);
+    int32_t i32v;
+    TEST_ESP_OK( nvs_get_i32(handle, "dummyI32Key", &i32v));
+    CHECK(i32v == -2147483648);
+
+    char buf[64] = {0};
+    size_t buflen = 64;
+    TEST_ESP_OK( nvs_get_str(handle, "dummyStringKey", buf, &buflen));
+    CHECK(strncmp(buf, "0A:0B:0C:0D:0E:0F", buflen) == 0);
+
+    uint8_t hexdata[] = {0x01, 0x02, 0x03, 0xab, 0xcd, 0xef};
+    buflen = 64;
+    int j;
+    TEST_ESP_OK( nvs_get_blob(handle, "dummyHex2BinKey", buf, &buflen));
+    CHECK(memcmp(buf, hexdata, buflen) == 0);
+    
+    uint8_t base64data[] = {'1', '2', '3', 'a', 'b', 'c'};
+    TEST_ESP_OK( nvs_get_blob(handle, "dummyBase64Key", buf, &buflen));
+    CHECK(memcmp(buf, base64data, buflen) == 0);
+
+    buflen = 64;
+    uint8_t hexfiledata[] = {0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef};
+    TEST_ESP_OK( nvs_get_blob(handle, "hexFileKey", buf, &buflen));
+    CHECK(memcmp(buf, hexfiledata, buflen) == 0);
+
+    buflen = 64;
+    uint8_t strfiledata[64] = "abcdefghijklmnopqrstuvwxyz\0";
+    TEST_ESP_OK( nvs_get_str(handle, "stringFileKey", buf, &buflen));
+    CHECK(memcmp(buf, strfiledata, buflen) == 0);
+
+    char bin_data[5200];
+    size_t bin_len = sizeof(bin_data);
+    char binfiledata[5200];
+    ifstream file;
+    file.open("../nvs_partition_generator/testdata/sample_multipage_blob.bin");
+    file.read(binfiledata,5200);
+    TEST_ESP_OK( nvs_get_blob(handle, "binFileKey", bin_data, &bin_len));
+    CHECK(memcmp(bin_data, binfiledata, bin_len) == 0);
+
+    file.close();
+
+}
+
+
 TEST_CASE("dump all performance data", "[nvs]")
 {
     std::cout << "====================" << std::endl << "Dumping benchmarks" << std::endl;