Browse Source

Fixed preview when streaming files

pull/124/head 4.0.4
Matthew Holt 10 years ago
parent
commit
b209603bad
  1. 2
      bower.json
  2. 2
      package.json
  3. 33
      papaparse.js
  4. 4
      papaparse.min.js
  5. 2
      parse.jquery.json

2
bower.json

@ -1,6 +1,6 @@
{ {
"name": "Papa-Parse", "name": "Papa-Parse",
"version": "4.0.3", "version": "4.0.4",
"main": "papaparse.js", "main": "papaparse.js",
"homepage": "http://papaparse.com", "homepage": "http://papaparse.com",
"authors": [ "authors": [

2
package.json

@ -1,6 +1,6 @@
{ {
"name": "papaparse", "name": "papaparse",
"version": "4.0.3", "version": "4.0.4",
"description": "Fast and powerful CSV parser for the browser that supports web workers and streaming large files. Converts CSV to JSON and JSON to CSV.", "description": "Fast and powerful CSV parser for the browser that supports web workers and streaming large files. Converts CSV to JSON and JSON to CSV.",
"keywords": [ "keywords": [
"csv", "csv",

33
papaparse.js

@ -1,6 +1,6 @@
/* /*
Papa Parse Papa Parse
v4.0.3 v4.0.4
https://github.com/mholt/PapaParse https://github.com/mholt/PapaParse
*/ */
(function(global) (function(global)
@ -397,7 +397,7 @@
if (!config.chunkSize) if (!config.chunkSize)
config.chunkSize = Papa.RemoteChunkSize; config.chunkSize = Papa.RemoteChunkSize;
var start = 0, fileSize = 0; var start = 0, fileSize = 0, rowCount = 0;
var aggregate = ""; var aggregate = "";
var partialLine = ""; var partialLine = "";
var xhr, url, nextChunk, finishedWithEntireFile; var xhr, url, nextChunk, finishedWithEntireFile;
@ -467,10 +467,10 @@
xhr.open("GET", url, !IS_WORKER); xhr.open("GET", url, !IS_WORKER);
if (config.step) if (config.step || config.chunk)
{ {
var end = start + configCopy.chunkSize - 1; // minus one because byte range is inclusive var end = start + configCopy.chunkSize - 1; // minus one because byte range is inclusive
if (fileSize && end > fileSize) // Hack around a Chrome bug: http://stackoverflow.com/q/24745095/1048862 if (fileSize && end > fileSize) // Hack around a Chrome bug: http://stackoverflow.com/q/24745095/1048862
end = fileSize; end = fileSize;
xhr.setRequestHeader("Range", "bytes="+start+"-"+end); xhr.setRequestHeader("Range", "bytes="+start+"-"+end);
} }
@ -503,7 +503,7 @@
aggregate += partialLine + xhr.responseText; aggregate += partialLine + xhr.responseText;
partialLine = ""; partialLine = "";
finishedWithEntireFile = !config.step || start > getFileSize(xhr); finishedWithEntireFile = (!config.step && !config.chunk) || start > getFileSize(xhr);
if (!finishedWithEntireFile) if (!finishedWithEntireFile)
{ {
@ -528,13 +528,17 @@
var results = handle.parse(aggregate); var results = handle.parse(aggregate);
aggregate = ""; aggregate = "";
if (results && results.data)
rowCount += results.data.length;
var finishedIncludingPreview = finishedWithEntireFile || (configCopy.preview && rowCount >= configCopy.preview);
if (IS_WORKER) if (IS_WORKER)
{ {
global.postMessage({ global.postMessage({
results: results, results: results,
workerId: Papa.WORKER_ID, workerId: Papa.WORKER_ID,
finished: finishedWithEntireFile finished: finishedIncludingPreview
}); });
} }
else if (isFunction(config.chunk)) else if (isFunction(config.chunk))
@ -543,10 +547,10 @@
results = undefined; results = undefined;
} }
if (finishedWithEntireFile && isFunction(userComplete)) if (isFunction(userComplete) && finishedIncludingPreview)
userComplete(results); userComplete(results);
if (!finishedWithEntireFile && !results.meta.paused) if (!finishedIncludingPreview && (!results || !results.meta.paused))
nextChunk(); nextChunk();
} }
@ -605,6 +609,7 @@
var slice; var slice;
var aggregate = ""; var aggregate = "";
var partialLine = ""; var partialLine = "";
var rowCount = 0;
var paused = false; var paused = false;
var self = this; var self = this;
var reader, nextChunk, slice, finishedWithEntireFile; var reader, nextChunk, slice, finishedWithEntireFile;
@ -657,7 +662,7 @@
function nextChunk() function nextChunk()
{ {
if (!finishedWithEntireFile) if (!finishedWithEntireFile && (!configCopy.preview || rowCount < configCopy.preview))
readChunk(); readChunk();
} }
@ -703,13 +708,17 @@
var results = handle.parse(aggregate); var results = handle.parse(aggregate);
aggregate = ""; aggregate = "";
if (results && results.data)
rowCount += results.data.length;
var finishedIncludingPreview = finishedWithEntireFile || (configCopy.preview && rowCount >= configCopy.preview);
if (IS_WORKER) if (IS_WORKER)
{ {
global.postMessage({ global.postMessage({
results: results, results: results,
workerId: Papa.WORKER_ID, workerId: Papa.WORKER_ID,
finished: finishedWithEntireFile finished: finishedIncludingPreview
}); });
} }
else if (isFunction(config.chunk)) else if (isFunction(config.chunk))
@ -720,10 +729,10 @@
results = undefined; results = undefined;
} }
if (finishedWithEntireFile && isFunction(userComplete)) if (isFunction(userComplete) && finishedIncludingPreview)
userComplete(results); userComplete(results);
if (!results || !results.meta.paused) if (!finishedIncludingPreview && (!results || !results.meta.paused))
nextChunk(); nextChunk();
} }

4
papaparse.min.js vendored

File diff suppressed because one or more lines are too long

2
parse.jquery.json

@ -1,6 +1,6 @@
{ {
"name": "parse", "name": "parse",
"version": "4.0.3", "version": "4.0.4",
"title": "Papa Parse", "title": "Papa Parse",
"description": "Powerful CSV parser that converts CSV to JSON and JSON to CSV. Supports web workers and streaming large files. Fastest CSV parser for JavaScript.", "description": "Powerful CSV parser that converts CSV to JSON and JSON to CSV. Supports web workers and streaming large files. Fastest CSV parser for JavaScript.",
"keywords": [ "keywords": [

Loading…
Cancel
Save