Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix for bug #4133. #762

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 36 additions & 11 deletions libraries/import.lib.php
Original file line number Diff line number Diff line change
Expand Up @@ -282,24 +282,29 @@ function PMA_lookForUse($buffer, $db, $reload)
return(array($db, $reload));
}


/**
* Returns next part of imported file/buffer
* Get max size to read
*
* @param int $size size of buffer to read
* (this is maximal size function will return)
* @param int $size Size to read
* @param int $read_multi Multiply read
*
* @return string part of file/buffer
* @access public
* @return int Max size to read
*/
function PMA_importGetNextChunk($size = 32768)
function PMA_importSizeToRead($size = null, $read_multi = null)
{
global $compression, $import_handle, $charset_conversion, $charset_of_file,
$read_multiply;
global $read_multiply;

if (null === $size) {
$size = 32768;
}

if (null === $read_multi) {
$read_multi = $read_multiply;
}

// Add some progression while reading large amount of data
if ($read_multiply <= 8) {
$size *= $read_multiply;
if ($read_multi <= 8) {
$size *= $read_multi;
} else {
$size *= 8;
}
Expand All @@ -310,6 +315,26 @@ function PMA_importGetNextChunk($size = 32768)
$size = $GLOBALS['read_limit'];
}

return $size;
}


/**
* Returns next part of imported file/buffer
*
* @param int $size size of buffer to read
* (this is maximal size function will return)
*
* @return string part of file/buffer
* @access public
*/
function PMA_importGetNextChunk($size = null)
{
global $compression, $import_handle, $charset_conversion, $charset_of_file,
$read_multiply;

$size = PMA_importSizeToRead($size, $read_multiply);

if (PMA_checkTimeout()) {
return false;
}
Expand Down
18 changes: 18 additions & 0 deletions libraries/plugins/import/ImportCsv.class.php
Original file line number Diff line number Diff line change
Expand Up @@ -247,8 +247,12 @@ public function doImport()
$col_count = 0;
$max_cols = 0;

$maxSizeToRead = PMA_importSizeToRead();

while (! ($finished && $i >= $len) && ! $error && ! $timeout_passed) {
//$data contains next lines to import.
$data = PMA_importGetNextChunk();
$lenData = strlen($data);
if ($data === false) {
// subtract data we didn't handle yet and stop processing
$GLOBALS['offset'] -= strlen($buffer);
Expand Down Expand Up @@ -315,6 +319,9 @@ public function doImport()
}
$fail = false;
$value = '';

//Read all the chars between the separator chars or until the
//end of the line.
while (($need_end
&& ( $ch != $csv_enclosed || $csv_enclosed == $csv_escaped ))
|| ( ! $need_end
Expand Down Expand Up @@ -355,11 +362,13 @@ public function doImport()
$value = null;
}

//If fail, complete $buffer and restart to read this value.
if ($fail) {
$i = $fallbacki;
$ch = $buffer[$i];
break;
}

// Need to strip trailing enclosing char?
if ($need_end && $ch == $csv_enclosed) {
if ($finished && $i == $len - 1) {
Expand All @@ -373,16 +382,25 @@ public function doImport()
$ch = $buffer[$i];
}
}

// Are we at the end?
if ($ch == $csv_new_line
|| ($csv_new_line == 'auto' && ($ch == "\r" || $ch == "\n"))
|| ($finished && $i == $len - 1)
) {
$csv_finish = true;
}

// Go to next char
if ($ch == $csv_terminated) {
if ($i == $len - 1) {
if ($lenData < $maxSizeToRead) {
$values[] = $value;
$values[] = '';
break;
}

//Else, if there is other thing to read...
$i = $fallbacki;
$ch = $buffer[$i];
break;
Expand Down