From 7dcced87d1ab53a7e09b5f12c96ddf5c67935c90 Mon Sep 17 00:00:00 2001 From: jaymeans Date: Mon, 13 Aug 2018 23:31:39 -0700 Subject: [PATCH] got test passing with new functionality --- papaparse.js | 34 +++++++++++++++++++++------------- 1 file changed, 21 insertions(+), 13 deletions(-) diff --git a/papaparse.js b/papaparse.js index 67f5e2e..d67dfae 100755 --- a/papaparse.js +++ b/papaparse.js @@ -280,6 +280,9 @@ /** quote character */ var _quoteChar = '"'; + /** whether to skip empty lines */ + var _skipEmptyLines = false; + unpackConfig(); var quoteCharRegex = new RegExp(_quoteChar, 'g'); @@ -290,9 +293,9 @@ if (_input instanceof Array) { if (!_input.length || _input[0] instanceof Array) - return serialize(null, _input); + return serialize(null, _input, _skipEmptyLines === 'greedy'); else if (typeof _input[0] === 'object') - return serialize(objectKeys(_input[0]), _input); + return serialize(objectKeys(_input[0]), _input, _skipEmptyLines === 'greedy'); } else if (typeof _input === 'object') { @@ -313,7 +316,7 @@ _input.data = [_input.data]; // handles input like [1,2,3] or ['asdf'] } - return serialize(_input.fields || [], _input.data || []); + return serialize(_input.fields || [], _input.data || [], _skipEmptyLines === 'greedy'); } // Default (any valid paths should return before this) @@ -335,6 +338,10 @@ || _config.quotes instanceof Array) _quotes = _config.quotes; + if (typeof _config.skipEmptyLines === 'boolean' + || typeof _config.skipEmptyLines === 'string') + _skipEmptyLines = _config.skipEmptyLines; + if (typeof _config.newline === 'string') _newline = _config.newline; @@ -358,7 +365,7 @@ } /** The double for loop that iterates the data and writes out a CSV string including header row */ - function serialize(fields, data) + function serialize(fields, data, greedySkip) { var csv = ''; @@ -388,18 +395,19 @@ { var maxCol = hasHeader ? fields.length : data[row].length; - for (var col = 0; col < maxCol; col++) + if (!greedySkip || data[row].join('').trim() !== '') { - if (col > 0) - csv += _delimiter; - var colIdx = hasHeader && dataKeyedByField ? fields[col] : col; - csv += safe(data[row][colIdx], col); + for (var col = 0; col < maxCol; col++) + { + if (col > 0) + csv += _delimiter; + var colIdx = hasHeader && dataKeyedByField ? fields[col] : col; + csv += safe(data[row][colIdx], col); + } + if (row < data.length - 1) + csv += _newline; } - - if (row < data.length - 1) - csv += _newline; } - return csv; }