self.check('sha1', "a" * 1000000,
"34aa973cd4c4daa4f61eeb2bdbad27316534016f")
+ @precisionbigmemtest(size=_4G + 5, memuse=1)
+ def test_case_sha1_huge(self, size):
+ if size == _4G + 5:
+ try:
+ self.check('sha1', 'A'*size,
+ '87d745c50e6b2879ffa0fb2c930e9fbfe0dc9a5b')
+ except OverflowError:
+ pass # 32-bit arch
# use the examples from Federal Information Processing Standards
# Publication 180-2, Secure Hash Standard, 2002 August 1
- Issue #14888: Fix misbehaviour of the _md5 module when called on data
larger than 2**32 bytes.
+- Issue #15908: Fix misbehaviour of the sha1 module when called on data
+ larger than 2**32 bytes.
+
- Issue #14875: Use float('inf') instead of float('1e66666') in the json module.
- Issue #14572: Prevent build failures with pre-3.5.0 versions of
static char *kwlist[] = {"string", NULL};
SHAobject *new;
Py_buffer view = { 0 };
+ Py_ssize_t n;
+ unsigned char *buf;
if (!PyArg_ParseTupleAndKeywords(args, kwdict, "|s*:new", kwlist,
&view)) {
PyBuffer_Release(&view);
return NULL;
}
- if (view.len > 0) {
- sha_update(new, (unsigned char*)view.buf,
- Py_SAFE_DOWNCAST(view.len, Py_ssize_t, unsigned int));
+
+ n = view.len;
+ buf = (unsigned char *) view.buf;
+ while (n > 0) {
+ Py_ssize_t nbytes;
+ if (n > INT_MAX)
+ nbytes = INT_MAX;
+ else
+ nbytes = n;
+ sha_update(new, buf,
+ Py_SAFE_DOWNCAST(nbytes, Py_ssize_t, unsigned int));
+ buf += nbytes;
+ n -= nbytes;
}
+
PyBuffer_Release(&view);
return (PyObject *)new;