Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP

Loading…

Fix for bug #4133. #762

Closed
wants to merge 1 commit into from

3 participants

@Tithugues
Collaborator

Hi,

Here is a "partial" fix for bug #4133.
Why partial? Because the error is still thrown with a file terminated by a line like
"5,testu,"
And if the file as exactly 32765 characters.

But, I hope this is rare enough to accept this fix.

@coveralls

Coverage Status

Coverage decreased (-0.01%) when pulling 2620125 on Tithugues:fix-4133 into 7a6c258 on phpmyadmin:master.

@lem9
Owner

Thanks, however the other proposed fix was simpler.

@lem9 lem9 closed this
@Tithugues Tithugues deleted the unknown repository branch
@Tithugues
Collaborator

Oh yes... And it's cleaner !

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Commits on Nov 20, 2013
  1. @Tithugues

    Fix for bug #4133.

    Tithugues authored
This page is out of date. Refresh to see the latest.
View
47 libraries/import.lib.php
@@ -282,24 +282,29 @@ function PMA_lookForUse($buffer, $db, $reload)
return(array($db, $reload));
}
-
/**
- * Returns next part of imported file/buffer
+ * Get max size to read
*
- * @param int $size size of buffer to read
- * (this is maximal size function will return)
+ * @param int $size Size to read
+ * @param int $read_multi Multiply read
*
- * @return string part of file/buffer
- * @access public
+ * @return int Max size to read
*/
-function PMA_importGetNextChunk($size = 32768)
+function PMA_importSizeToRead($size = null, $read_multi = null)
{
- global $compression, $import_handle, $charset_conversion, $charset_of_file,
- $read_multiply;
+ global $read_multiply;
+
+ if (null === $size) {
+ $size = 32768;
+ }
+
+ if (null === $read_multi) {
+ $read_multi = $read_multiply;
+ }
// Add some progression while reading large amount of data
- if ($read_multiply <= 8) {
- $size *= $read_multiply;
+ if ($read_multi <= 8) {
+ $size *= $read_multi;
} else {
$size *= 8;
}
@@ -310,6 +315,26 @@ function PMA_importGetNextChunk($size = 32768)
$size = $GLOBALS['read_limit'];
}
+ return $size;
+}
+
+
+/**
+ * Returns next part of imported file/buffer
+ *
+ * @param int $size size of buffer to read
+ * (this is maximal size function will return)
+ *
+ * @return string part of file/buffer
+ * @access public
+ */
+function PMA_importGetNextChunk($size = null)
+{
+ global $compression, $import_handle, $charset_conversion, $charset_of_file,
+ $read_multiply;
+
+ $size = PMA_importSizeToRead($size, $read_multiply);
+
if (PMA_checkTimeout()) {
return false;
}
View
18 libraries/plugins/import/ImportCsv.class.php
@@ -247,8 +247,12 @@ public function doImport()
$col_count = 0;
$max_cols = 0;
+ $maxSizeToRead = PMA_importSizeToRead();
+
while (! ($finished && $i >= $len) && ! $error && ! $timeout_passed) {
+ //$data contains next lines to import.
$data = PMA_importGetNextChunk();
+ $lenData = strlen($data);
if ($data === false) {
// subtract data we didn't handle yet and stop processing
$GLOBALS['offset'] -= strlen($buffer);
@@ -315,6 +319,9 @@ public function doImport()
}
$fail = false;
$value = '';
+
+ //Read all the chars between the separator chars or until the
+ //end of the line.
while (($need_end
&& ( $ch != $csv_enclosed || $csv_enclosed == $csv_escaped ))
|| ( ! $need_end
@@ -355,11 +362,13 @@ public function doImport()
$value = null;
}
+ //If fail, complete $buffer and restart to read this value.
if ($fail) {
$i = $fallbacki;
$ch = $buffer[$i];
break;
}
+
// Need to strip trailing enclosing char?
if ($need_end && $ch == $csv_enclosed) {
if ($finished && $i == $len - 1) {
@@ -373,6 +382,7 @@ public function doImport()
$ch = $buffer[$i];
}
}
+
// Are we at the end?
if ($ch == $csv_new_line
|| ($csv_new_line == 'auto' && ($ch == "\r" || $ch == "\n"))
@@ -380,9 +390,17 @@ public function doImport()
) {
$csv_finish = true;
}
+
// Go to next char
if ($ch == $csv_terminated) {
if ($i == $len - 1) {
+ if ($lenData < $maxSizeToRead) {
+ $values[] = $value;
+ $values[] = '';
+ break;
+ }
+
+ //Else, if there is other thing to read...
$i = $fallbacki;
$ch = $buffer[$i];
break;
Something went wrong with that request. Please try again.