Skip to content

Commit c520665

Browse files
committed
Fixing issue #12, added tests, bumped version up to 1.1.0
1 parent e7f6f0a commit c520665

File tree

11 files changed

+282
-171
lines changed

11 files changed

+282
-171
lines changed

README.md

Lines changed: 2 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -137,7 +137,7 @@ _Note_: This requires `mocha`, `should`, `async`, and `underscore`.
137137
## Features
138138

139139
- Header Generation (per document keys)
140-
- Verifies all documents have same schema
140+
- Verifies all documents have same schema (schema field order does not matter as of 1.1.0)
141141
- Supports sub-documents natively
142142
- Supports arrays as document values for both json2csv and csv2json
143143
- Custom ordering of columns (see F.A.Q. for more information)
@@ -151,9 +151,4 @@ __Yes.__ Currently, changing the order of the keys in the JSON document will als
151151

152152
## Milestones
153153
- Created: Apr 23, 2014
154-
- 1K Downloads/Month: January 15, 2015
155-
156-
## TODO
157-
- Use PARSE_CSV_NUMBERS option to actually convert numbers. Not currently implemented.
158-
- Respect nested arrays when in json2csv - Currently flattens them
159-
- If quotes in CSV header, strip them? Add as an option?
154+
- 1K Downloads/Month: January 15, 2015

bower.json

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
{
22
"name": "json-2-csv",
3-
"version": "1.0.7",
3+
"version": "1.1.0",
44
"homepage": "https://github.com/mrodrig/json-2-csv",
55
"moduleType": [
66
"node"
@@ -15,6 +15,6 @@
1515
],
1616
"dependencies": {
1717
"underscore": "1.6.0",
18-
"async": "0.2.10"
18+
"bluebird": "2.9.24"
1919
}
2020
}

lib/csv-2-json.js

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
'use strict';
22

3-
var _ = require('underscore'),
4-
async = require('async');
3+
var _ = require('underscore');
54

65
var options = {}; // Initialize the options - this will be populated when the csv2json function is called.
76

lib/json-2-csv.js

Lines changed: 72 additions & 47 deletions
Original file line numberDiff line numberDiff line change
@@ -1,33 +1,37 @@
11
'use strict';
22

33
var _ = require('underscore'),
4-
async = require('async');
4+
Promise = require('bluebird');
55

66
var options = {}; // Initialize the options - this will be populated when the json2csv function is called.
77

88
// Retrieve the headings for all documents and return it. This checks that all documents have the same schema.
9-
var generateHeading = function(data, cb) {
10-
var keys = _.map(_.keys(data), function (key, indx) { // for each key
11-
if (_.isObject(data[key])) {
12-
// if the data at the key is a document, then we retrieve the subHeading starting with an empty string heading and the doc
13-
return generateSubHeading('', data[key]);
14-
}
15-
return key;
16-
});
17-
18-
// TODO: check for consistent schema
9+
var generateHeading = function(data) {
10+
return new Promise(function (resolve, reject) {
11+
var keys = _.map(_.keys(data), function (key, indx) { // for each key
12+
if (_.isObject(data[key])) {
13+
// if the data at the key is a document, then we retrieve the subHeading starting with an empty string heading and the doc
14+
return generateSubHeading('', data[key]);
15+
}
16+
return key;
17+
});
1918

20-
keys = _.map(keys, function(keyList) {
21-
return _.flatten(keyList).join(options.DELIMITER.FIELD);
19+
// Check for a consistent schema that does not require the same order:
20+
// if we only have one document - then there is no possiblility of multiple schemas
21+
if (keys && keys.length <= 1) {
22+
return resolve(_.flatten(keys) || []);
23+
}
24+
// else - multiple documents - ensure only one schema (regardless of field ordering)
25+
var firstDocSchema = _.flatten(keys[0]);
26+
_.each(keys, function (keyList) {
27+
// If there is a difference between the schemas, throw the inconsistent schema error
28+
var diff = _.difference(firstDocSchema, _.flatten(keyList));
29+
if (!_.isEqual(diff, [])) {
30+
return reject(new Error('Not all documents have the same schema.'));
31+
}
32+
});
33+
return resolve(_.flatten(keys[0]));
2234
});
23-
24-
// Retrieve the unique array of headings (keys)
25-
keys = _.uniq(keys);
26-
27-
// If we have more than 1 unique list, then not all docs have the same schema - report an error
28-
if (keys.length > 1) { throw new Error('Not all documents have the same schema.', keys); }
29-
30-
return cb(null, keys);
3135
};
3236

3337
// Takes the parent heading and this doc's data and creates the subdocument headings (string)
@@ -41,7 +45,7 @@ var generateSubHeading = function(heading, data) {
4145
if (_.isObject(data[subKey]) && !_.isNull(data[subKey]) && _.isUndefined(data[subKey].length) && _.keys(data[subKey]).length > 0) { // If we have another nested document
4246
return generateSubHeading(newKey, data[subKey]); // Recur on the sub-document to retrieve the full key name
4347
} else {
44-
return options.DELIMITER.WRAP + newKey + options.DELIMITER.WRAP; // Set the key name since we don't have a sub document
48+
return newKey; // Set the key name since we don't have a sub document
4549
}
4650
});
4751

@@ -52,28 +56,40 @@ var generateSubHeading = function(heading, data) {
5256
var convertData = function (data, keys) {
5357
var output = [], // Array of CSV representing converted docs
5458
value; // Temporary variable to store the current data
55-
_.each(keys, function (key, indx) { // For each key
56-
value = data[key]; // Set the current data that we are looking at
57-
if (keys.indexOf(key) > -1) { // If the keys contain the current key, then process the data
58-
if (_.isArray(value)) { // We have an array of values
59-
output.push((options.DELIMITER.WRAP || '') + '[' + value.join(options.DELIMITER.ARRAY) + ']' + (options.DELIMITER.WRAP || ''));
60-
} else if (_.isDate(value)) { // If we have a date
61-
output.push(value.toString());
62-
} else if (_.isObject(value)) { // If we have an object
63-
output.push(convertData(value, _.keys(value))); // Push the recursively generated CSV
64-
} else {
65-
value = value == null ? '' : value.toString();
66-
output.push((options.DELIMITER.WRAP || '') + value + (options.DELIMITER.WRAP || '')); // Otherwise push the current value
67-
}
59+
60+
_.each(keys, function (key) { // For each key
61+
var indexOfPeriod = _.indexOf(key, '.');
62+
if (indexOfPeriod > -1) {
63+
var pathPrefix = key.slice(0, indexOfPeriod),
64+
pathRemainder = key.slice(indexOfPeriod+1);
65+
output.push(convertData(data[pathPrefix], [pathRemainder]));
66+
} else if (keys.indexOf(key) > -1) { // If the keys contain the current key, then process the data
67+
value = data[key]; // Set the current data that we are looking at
68+
convertField(value, output);
6869
}
6970
});
70-
return output.join(options.DELIMITER.FIELD); // Return the data joined by our field delimiter
71+
return output; // Return the data joined by our field delimiter
72+
};
73+
74+
var convertField = function (value, output) {
75+
if (_.isArray(value)) { // We have an array of values
76+
output.push(options.DELIMITER.WRAP + '[' + value.join(options.DELIMITER.ARRAY) + ']' + options.DELIMITER.WRAP);
77+
} else if (_.isDate(value)) { // If we have a date
78+
output.push(value.toString());
79+
} else if (_.isObject(value)) { // If we have an object
80+
output.push(convertData(value, _.keys(value))); // Push the recursively generated CSV
81+
} else {
82+
value = value === null ? '' : value.toString();
83+
output.push(options.DELIMITER.WRAP + value + options.DELIMITER.WRAP); // Otherwise push the current value
84+
}
7185
};
7286

7387
// Generate the CSV representing the given data.
74-
var generateCsv = function (data, cb) {
88+
var generateCsv = function (data, headingKeys) {
7589
// Reduce each JSON document in data to a CSV string and append it to the CSV accumulator
76-
return cb(null, _.reduce(data, function (csv, doc) { return csv += convertData(doc, _.keys(doc)) + options.EOL; }, ''));
90+
return Promise.resolve([headingKeys, _.reduce(data, function (csv, doc) {
91+
return csv += _.flatten(convertData(doc, headingKeys)).join(options.DELIMITER.FIELD) + options.EOL;
92+
}, '')]);
7793
};
7894

7995
module.exports = {
@@ -82,24 +98,33 @@ module.exports = {
8298
// Takes options as a document, data as a JSON document array, and a callback that will be used to report the results
8399
json2csv: function (opts, data, callback) {
84100
if (!callback) { throw new Error('A callback is required!'); } // If a callback wasn't provided, throw an error
101+
85102
if (!opts) { return callback(new Error('Options were not passed and are required.')); } // Shouldn't happen, but just in case
86103
else { options = opts; } // Options were passed, set the global options value
104+
87105
if (!data) { return callback(new Error('Cannot call json2csv on ' + data + '.')); } // If we don't receive data, report an error
106+
88107
if (!_.isObject(data)) { // If the data was not a single document or an array of documents
89108
return callback(new Error('Data provided was not an array of documents.')); // Report the error back to the caller
90109
} else if (_.isObject(data) && !data.length) { // Single document, not an array
91110
data = [data]; // Convert to an array of the given document
92111
}
93112

94-
// Retrieve the heading and the CSV asynchronously in parallel
95-
async.parallel([_.partial(generateHeading, data), _.partial(generateCsv, data)], function (err, res) {
96-
if (!err) {
97-
// Data received with no errors, join the two responses with an end of line delimiter to setup heading and CSV body
98-
return callback(null, res.join(options.EOL));
99-
} else {
100-
return callback(err, null); // Report received error back to caller
101-
}
102-
});
113+
// Retrieve the heading and then generate the CSV with the keys that are identified
114+
generateHeading(data)
115+
.then(_.partial(generateCsv, data))
116+
.spread(function (csvHeading, csvData) {
117+
if (options.DELIMITER.WRAP) {
118+
csvHeading = _.map(csvHeading, function(headingKey) {
119+
return options.DELIMITER.WRAP + headingKey + options.DELIMITER.WRAP;
120+
});
121+
}
122+
csvHeading = csvHeading.join(options.DELIMITER.FIELD);
123+
return callback(null, [csvHeading, csvData].join(options.EOL));
124+
})
125+
.catch(function (err) {
126+
return callback(err);
127+
});
103128
}
104129

105130
};

package.json

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
"author": "mrodrig",
33
"name": "json-2-csv",
44
"description": "A JSON to CSV and CSV to JSON converter that natively supports sub-documents and auto-generates the CSV heading.",
5-
"version": "1.0.8",
5+
"version": "1.1.0",
66
"repository": {
77
"type": "git",
88
"url": "http://github.com/mrodrig/json-2-csv.git"
@@ -24,7 +24,7 @@
2424
],
2525
"dependencies": {
2626
"underscore": "1.6.0",
27-
"async": "0.2.10"
27+
"bluebird": "~2.9.24"
2828
},
2929
"devDependencies": {
3030
"mocha": "~1.14.0",
@@ -35,4 +35,4 @@
3535
"node": "*"
3636
},
3737
"license": "MIT"
38-
}
38+
}

test/JSON/differentSchemas.json

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
[
2+
{ "carModel" : "Audi", "price" : "10000", "color" : "blue", "mileage" : "7200" },
3+
{ "carModel" : "BMW", "price" : "15000", "color" : "red" },
4+
{ "carModel" : "Mercedes", "price" : "20000", "color" : "yellow" },
5+
{ "carModel" : "Porsche", "price" : "30000", "color" : "green" }
6+
]
Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
[
2+
{ "carModel" : "Audi", "price" : "10000", "color" : "blue" },
3+
{ "carModel" : "BMW", "color" : "red", "price" : "15000" },
4+
{ "price" : "20000", "color" : "yellow", "carModel" : "Mercedes" },
5+
{ "carModel" : "Porsche", "price" : "30000", "color" : "green" }
6+
]

0 commit comments

Comments
 (0)