Fix #80384: limit read buffer size

In the case of a stream with no filters, php_stream_fill_read_buffer
only reads stream->chunk_size into the read buffer. If the stream has
filters attached, it could unnecessarily buffer a large amount of data.

With this change, php_stream_fill_read_buffer only proceeds until either
the requested size or stream->chunk_size is available in the read buffer.

Co-authored-by: Christoph M. Becker <cmbecker69@gmx.de>

Closes GH-6444.
This commit is contained in:
Adam Seitz 2020-12-02 00:40:16 +01:00 committed by Christoph M. Becker
parent b043759cb4
commit 70dfbe0068
4 changed files with 33 additions and 2 deletions

2
NEWS
View File

@ -4,6 +4,8 @@ PHP NEWS
- Core:
. Fixed bug #80523 (bogus parse error on >4GB source code). (Nikita)
. Fixed bug #80384 (filter buffers entire read until file closed). (Adam
Seitz, cmb)
- Date:
. Fixed bug #80376 (last day of the month causes runway cpu usage. (Derick)

View File

@ -52,6 +52,6 @@ fclose($f2);
--EXPECT--
filter onCreate
filtered 8192 bytes.
filtered 128 bytes and closing.
filtered 128 bytes and closing. Stream has reached end-of-file.
int(8320)
filter onClose

View File

@ -542,6 +542,7 @@ PHPAPI int _php_stream_fill_read_buffer(php_stream *stream, size_t size)
/* allocate/fill the buffer */
if (stream->readfilters.head) {
size_t to_read_now = MIN(size, stream->chunk_size);
char *chunk_buf;
php_stream_bucket_brigade brig_in = { NULL, NULL }, brig_out = { NULL, NULL };
php_stream_bucket_brigade *brig_inp = &brig_in, *brig_outp = &brig_out, *brig_swap;
@ -549,7 +550,7 @@ PHPAPI int _php_stream_fill_read_buffer(php_stream *stream, size_t size)
/* allocate a buffer for reading chunks */
chunk_buf = emalloc(stream->chunk_size);
while (!stream->eof && (stream->writepos - stream->readpos < (zend_off_t)size)) {
while (!stream->eof && (stream->writepos - stream->readpos < (zend_off_t)to_read_now)) {
ssize_t justread = 0;
int flags;
php_stream_bucket *bucket;

28
tests/basic/bug80384.phpt Normal file
View File

@ -0,0 +1,28 @@
--TEST--
Bug #80384 large reads cause filters to internally buffer large amounts of memory
--FILE--
<?php
/* First, create a file to read */
$tmp_filename = __DIR__ . "/bug80384.tmp";
$fp = fopen($tmp_filename, 'w');
for ($i=0; $i<1024; $i++) {
fwrite($fp, str_repeat('ABCDEFGH', 1024));
}
fclose($fp);
/* Stream the file through a filter */
$fp = fopen($tmp_filename, 'r');
$filter = stream_filter_append($fp, "string.rot13");
$mem_start = memory_get_usage();
fread($fp, 8 * 1024 * 1024);
$mem_final = memory_get_usage();
fclose($fp);
var_dump($mem_final - $mem_start < 32768);
?>
--CLEAN--
<?php
unlink(__DIR__ . "/bug80384.tmp");
?>
--EXPECT--
bool(true)