import os
import abc
import codecs
+import warnings
# Import _thread instead of threading to reduce startup cost
try:
from _thread import allocate_lock as Lock
The constructor creates a BufferedWriter for the given writeable raw
stream. If the buffer_size is not given, it defaults to
- DEFAULT_BUFFER_SIZE. If max_buffer_size is omitted, it defaults to
- twice the buffer size.
+ DEFAULT_BUFFER_SIZE.
"""
+ _warning_stack_offset = 2
+
def __init__(self, raw,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
raw._checkWritable()
_BufferedIOMixin.__init__(self, raw)
if buffer_size <= 0:
raise ValueError("invalid buffer size")
+ if max_buffer_size is not None:
+ warnings.warn("max_buffer_size is deprecated", DeprecationWarning,
+ self._warning_stack_offset)
self.buffer_size = buffer_size
self._write_buf = bytearray()
self._write_lock = Lock()
reader and writer are RawIOBase objects that are readable and
writeable respectively. If the buffer_size is omitted it defaults to
- DEFAULT_BUFFER_SIZE. The max_buffer_size (for the buffered writer)
- defaults to twice the buffer size.
+ DEFAULT_BUFFER_SIZE.
"""
# XXX The usefulness of this (compared to having two separate IO
The arguments are two RawIO instances.
"""
+ if max_buffer_size is not None:
+ warnings.warn("max_buffer_size is deprecated", DeprecationWarning, 2)
reader._checkReadable()
writer._checkWritable()
self.reader = BufferedReader(reader, buffer_size)
- self.writer = BufferedWriter(writer, buffer_size, max_buffer_size)
+ self.writer = BufferedWriter(writer, buffer_size)
def read(self, n=None):
if n is None:
The constructor creates a reader and writer for a seekable stream,
raw, given in the first argument. If the buffer_size is omitted it
- defaults to DEFAULT_BUFFER_SIZE. The max_buffer_size (for the buffered
- writer) defaults to twice the buffer size.
+ defaults to DEFAULT_BUFFER_SIZE.
"""
+ _warning_stack_offset = 3
+
def __init__(self, raw,
buffer_size=DEFAULT_BUFFER_SIZE, max_buffer_size=None):
raw._checkSeekable()
import threading
import random
import unittest
+import warnings
import weakref
import gc
import abc
def test_write_non_blocking(self):
raw = self.MockNonBlockWriterIO()
- bufio = self.tp(raw, 8, 8)
+ bufio = self.tp(raw, 8)
self.assertEquals(bufio.write(b"abcd"), 4)
self.assertEquals(bufio.write(b"efghi"), 5)
self.assertRaises(IOError, bufio.tell)
self.assertRaises(IOError, bufio.write, b"abcdef")
+ def test_max_buffer_size_deprecation(self):
+ with support.check_warnings() as w:
+ warnings.simplefilter("always", DeprecationWarning)
+ self.tp(self.MockRawIO(), 8, 12)
+ self.assertEqual(len(w.warnings), 1)
+ warning = w.warnings[0]
+ self.assertTrue(warning.category is DeprecationWarning)
+ self.assertEqual(str(warning.message),
+ "max_buffer_size is deprecated")
+
+
class CBufferedWriterTest(BufferedWriterTest):
tp = io.BufferedWriter
pair = self.tp(r, w)
self.assertFalse(pair.closed)
+ def test_max_buffer_size_deprecation(self):
+ with support.check_warnings() as w:
+ warnings.simplefilter("always", DeprecationWarning)
+ self.tp(self.MockRawIO(), self.MockRawIO(), 8, 12)
+ self.assertEqual(len(w.warnings), 1)
+ warning = w.warnings[0]
+ self.assertTrue(warning.category is DeprecationWarning)
+ self.assertEqual(str(warning.message),
+ "max_buffer_size is deprecated")
+
# XXX More Tests
class CBufferedRWPairTest(BufferedRWPairTest):
def test_read_and_write(self):
raw = self.MockRawIO((b"asdf", b"ghjk"))
- rw = self.tp(raw, 8, 12)
+ rw = self.tp(raw, 8)
self.assertEqual(b"as", rw.read(2))
rw.write(b"ddd")
Library
-------
+- The max_buffer_size arguments of io.BufferedWriter, io.BufferedRWPair, and
+ io.BufferedRandom have been deprecated for removal in Python 3.2.
+
- Issue #5068: Fixed the tarfile._BZ2Proxy.read() method that would loop
forever on incomplete input. That caused tarfile.open() to hang when used
with mode 'r' or 'r:bz2' and a fileobj argument that contained no data or
};
\f
+
+static int
+complain_about_max_buffer_size(void)
+{
+ if (PyErr_WarnEx(PyExc_DeprecationWarning,
+ "max_buffer_size is deprecated", 1) < 0)
+ return 0;
+ return 1;
+}
+
/*
* class BufferedWriter
*/
/* TODO: properly deprecate max_buffer_size */
char *kwlist[] = {"raw", "buffer_size", "max_buffer_size", NULL};
Py_ssize_t buffer_size = DEFAULT_BUFFER_SIZE;
- Py_ssize_t max_buffer_size = -1;
+ Py_ssize_t max_buffer_size = -234;
PyObject *raw;
self->ok = 0;
return -1;
}
+ if (max_buffer_size != -234 && !complain_about_max_buffer_size())
+ return -1;
+
if (_PyIOBase_checkWritable(raw, Py_True) == NULL)
return -1;
"\n"
"reader and writer are RawIOBase objects that are readable and\n"
"writeable respectively. If the buffer_size is omitted it defaults to\n"
- "DEFAULT_BUFFER_SIZE. The max_buffer_size (for the buffered writer)\n"
- "defaults to twice the buffer size.\n"
+ "DEFAULT_BUFFER_SIZE.\n"
);
/* XXX The usefulness of this (compared to having two separate IO objects) is
{
PyObject *reader, *writer;
Py_ssize_t buffer_size = DEFAULT_BUFFER_SIZE;
- Py_ssize_t max_buffer_size = -1;
+ Py_ssize_t max_buffer_size = -234;
if (!PyArg_ParseTuple(args, "OO|nn:BufferedRWPair", &reader, &writer,
&buffer_size, &max_buffer_size)) {
return -1;
}
+ if (max_buffer_size != -234 && !complain_about_max_buffer_size())
+ return -1;
+
if (_PyIOBase_checkReadable(reader, Py_True) == NULL)
return -1;
if (_PyIOBase_checkWritable(writer, Py_True) == NULL)
if (self->reader == NULL)
return -1;
- args = Py_BuildValue("(nn)", buffer_size, max_buffer_size);
+ args = Py_BuildValue("(n)", buffer_size);
if (args == NULL) {
Py_CLEAR(self->reader);
return -1;
{
char *kwlist[] = {"raw", "buffer_size", "max_buffer_size", NULL};
Py_ssize_t buffer_size = DEFAULT_BUFFER_SIZE;
- Py_ssize_t max_buffer_size = -1;
+ Py_ssize_t max_buffer_size = -234;
PyObject *raw;
self->ok = 0;
return -1;
}
+ if (max_buffer_size != -234 && !complain_about_max_buffer_size())
+ return -1;
+
if (_PyIOBase_checkSeekable(raw, Py_True) == NULL)
return -1;
if (_PyIOBase_checkReadable(raw, Py_True) == NULL)