1
1
'use strict' ;
2
2
3
3
var _ = require ( 'underscore' ) ,
4
- async = require ( 'async ' ) ;
4
+ Promise = require ( 'bluebird ' ) ;
5
5
6
6
var options = { } ; // Initialize the options - this will be populated when the json2csv function is called.
7
7
8
8
// Retrieve the headings for all documents and return it. This checks that all documents have the same schema.
9
- var generateHeading = function ( data , cb ) {
10
- var keys = _ . map ( _ . keys ( data ) , function ( key , indx ) { // for each key
11
- if ( _ . isObject ( data [ key ] ) ) {
12
- // if the data at the key is a document, then we retrieve the subHeading starting with an empty string heading and the doc
13
- return generateSubHeading ( '' , data [ key ] ) ;
14
- }
15
- return key ;
16
- } ) ;
17
-
18
- // TODO: check for consistent schema
9
+ var generateHeading = function ( data ) {
10
+ return new Promise ( function ( resolve , reject ) {
11
+ var keys = _ . map ( _ . keys ( data ) , function ( key , indx ) { // for each key
12
+ if ( _ . isObject ( data [ key ] ) ) {
13
+ // if the data at the key is a document, then we retrieve the subHeading starting with an empty string heading and the doc
14
+ return generateSubHeading ( '' , data [ key ] ) ;
15
+ }
16
+ return key ;
17
+ } ) ;
19
18
20
- keys = _ . map ( keys , function ( keyList ) {
21
- return _ . flatten ( keyList ) . join ( options . DELIMITER . FIELD ) ;
19
+ // Check for a consistent schema that does not require the same order:
20
+ // if we only have one document - then there is no possiblility of multiple schemas
21
+ if ( keys && keys . length <= 1 ) {
22
+ return resolve ( _ . flatten ( keys ) || [ ] ) ;
23
+ }
24
+ // else - multiple documents - ensure only one schema (regardless of field ordering)
25
+ var firstDocSchema = _ . flatten ( keys [ 0 ] ) ;
26
+ _ . each ( keys , function ( keyList ) {
27
+ // If there is a difference between the schemas, throw the inconsistent schema error
28
+ var diff = _ . difference ( firstDocSchema , _ . flatten ( keyList ) ) ;
29
+ if ( ! _ . isEqual ( diff , [ ] ) ) {
30
+ return reject ( new Error ( 'Not all documents have the same schema.' ) ) ;
31
+ }
32
+ } ) ;
33
+ return resolve ( _ . flatten ( keys [ 0 ] ) ) ;
22
34
} ) ;
23
-
24
- // Retrieve the unique array of headings (keys)
25
- keys = _ . uniq ( keys ) ;
26
-
27
- // If we have more than 1 unique list, then not all docs have the same schema - report an error
28
- if ( keys . length > 1 ) { throw new Error ( 'Not all documents have the same schema.' , keys ) ; }
29
-
30
- return cb ( null , keys ) ;
31
35
} ;
32
36
33
37
// Takes the parent heading and this doc's data and creates the subdocument headings (string)
@@ -41,7 +45,7 @@ var generateSubHeading = function(heading, data) {
41
45
if ( _ . isObject ( data [ subKey ] ) && ! _ . isNull ( data [ subKey ] ) && _ . isUndefined ( data [ subKey ] . length ) && _ . keys ( data [ subKey ] ) . length > 0 ) { // If we have another nested document
42
46
return generateSubHeading ( newKey , data [ subKey ] ) ; // Recur on the sub-document to retrieve the full key name
43
47
} else {
44
- return options . DELIMITER . WRAP + newKey + options . DELIMITER . WRAP ; // Set the key name since we don't have a sub document
48
+ return newKey ; // Set the key name since we don't have a sub document
45
49
}
46
50
} ) ;
47
51
@@ -52,28 +56,40 @@ var generateSubHeading = function(heading, data) {
52
56
var convertData = function ( data , keys ) {
53
57
var output = [ ] , // Array of CSV representing converted docs
54
58
value ; // Temporary variable to store the current data
55
- _ . each ( keys , function ( key , indx ) { // For each key
56
- value = data [ key ] ; // Set the current data that we are looking at
57
- if ( keys . indexOf ( key ) > - 1 ) { // If the keys contain the current key, then process the data
58
- if ( _ . isArray ( value ) ) { // We have an array of values
59
- output . push ( ( options . DELIMITER . WRAP || '' ) + '[' + value . join ( options . DELIMITER . ARRAY ) + ']' + ( options . DELIMITER . WRAP || '' ) ) ;
60
- } else if ( _ . isDate ( value ) ) { // If we have a date
61
- output . push ( value . toString ( ) ) ;
62
- } else if ( _ . isObject ( value ) ) { // If we have an object
63
- output . push ( convertData ( value , _ . keys ( value ) ) ) ; // Push the recursively generated CSV
64
- } else {
65
- value = value == null ? '' : value . toString ( ) ;
66
- output . push ( ( options . DELIMITER . WRAP || '' ) + value + ( options . DELIMITER . WRAP || '' ) ) ; // Otherwise push the current value
67
- }
59
+
60
+ _ . each ( keys , function ( key ) { // For each key
61
+ var indexOfPeriod = _ . indexOf ( key , '.' ) ;
62
+ if ( indexOfPeriod > - 1 ) {
63
+ var pathPrefix = key . slice ( 0 , indexOfPeriod ) ,
64
+ pathRemainder = key . slice ( indexOfPeriod + 1 ) ;
65
+ output . push ( convertData ( data [ pathPrefix ] , [ pathRemainder ] ) ) ;
66
+ } else if ( keys . indexOf ( key ) > - 1 ) { // If the keys contain the current key, then process the data
67
+ value = data [ key ] ; // Set the current data that we are looking at
68
+ convertField ( value , output ) ;
68
69
}
69
70
} ) ;
70
- return output . join ( options . DELIMITER . FIELD ) ; // Return the data joined by our field delimiter
71
+ return output ; // Return the data joined by our field delimiter
72
+ } ;
73
+
74
+ var convertField = function ( value , output ) {
75
+ if ( _ . isArray ( value ) ) { // We have an array of values
76
+ output . push ( options . DELIMITER . WRAP + '[' + value . join ( options . DELIMITER . ARRAY ) + ']' + options . DELIMITER . WRAP ) ;
77
+ } else if ( _ . isDate ( value ) ) { // If we have a date
78
+ output . push ( value . toString ( ) ) ;
79
+ } else if ( _ . isObject ( value ) ) { // If we have an object
80
+ output . push ( convertData ( value , _ . keys ( value ) ) ) ; // Push the recursively generated CSV
81
+ } else {
82
+ value = value === null ? '' : value . toString ( ) ;
83
+ output . push ( options . DELIMITER . WRAP + value + options . DELIMITER . WRAP ) ; // Otherwise push the current value
84
+ }
71
85
} ;
72
86
73
87
// Generate the CSV representing the given data.
74
- var generateCsv = function ( data , cb ) {
88
+ var generateCsv = function ( data , headingKeys ) {
75
89
// Reduce each JSON document in data to a CSV string and append it to the CSV accumulator
76
- return cb ( null , _ . reduce ( data , function ( csv , doc ) { return csv += convertData ( doc , _ . keys ( doc ) ) + options . EOL ; } , '' ) ) ;
90
+ return Promise . resolve ( [ headingKeys , _ . reduce ( data , function ( csv , doc ) {
91
+ return csv += _ . flatten ( convertData ( doc , headingKeys ) ) . join ( options . DELIMITER . FIELD ) + options . EOL ;
92
+ } , '' ) ] ) ;
77
93
} ;
78
94
79
95
module . exports = {
@@ -82,24 +98,33 @@ module.exports = {
82
98
// Takes options as a document, data as a JSON document array, and a callback that will be used to report the results
83
99
json2csv : function ( opts , data , callback ) {
84
100
if ( ! callback ) { throw new Error ( 'A callback is required!' ) ; } // If a callback wasn't provided, throw an error
101
+
85
102
if ( ! opts ) { return callback ( new Error ( 'Options were not passed and are required.' ) ) ; } // Shouldn't happen, but just in case
86
103
else { options = opts ; } // Options were passed, set the global options value
104
+
87
105
if ( ! data ) { return callback ( new Error ( 'Cannot call json2csv on ' + data + '.' ) ) ; } // If we don't receive data, report an error
106
+
88
107
if ( ! _ . isObject ( data ) ) { // If the data was not a single document or an array of documents
89
108
return callback ( new Error ( 'Data provided was not an array of documents.' ) ) ; // Report the error back to the caller
90
109
} else if ( _ . isObject ( data ) && ! data . length ) { // Single document, not an array
91
110
data = [ data ] ; // Convert to an array of the given document
92
111
}
93
112
94
- // Retrieve the heading and the CSV asynchronously in parallel
95
- async . parallel ( [ _ . partial ( generateHeading , data ) , _ . partial ( generateCsv , data ) ] , function ( err , res ) {
96
- if ( ! err ) {
97
- // Data received with no errors, join the two responses with an end of line delimiter to setup heading and CSV body
98
- return callback ( null , res . join ( options . EOL ) ) ;
99
- } else {
100
- return callback ( err , null ) ; // Report received error back to caller
101
- }
102
- } ) ;
113
+ // Retrieve the heading and then generate the CSV with the keys that are identified
114
+ generateHeading ( data )
115
+ . then ( _ . partial ( generateCsv , data ) )
116
+ . spread ( function ( csvHeading , csvData ) {
117
+ if ( options . DELIMITER . WRAP ) {
118
+ csvHeading = _ . map ( csvHeading , function ( headingKey ) {
119
+ return options . DELIMITER . WRAP + headingKey + options . DELIMITER . WRAP ;
120
+ } ) ;
121
+ }
122
+ csvHeading = csvHeading . join ( options . DELIMITER . FIELD ) ;
123
+ return callback ( null , [ csvHeading , csvData ] . join ( options . EOL ) ) ;
124
+ } )
125
+ . catch ( function ( err ) {
126
+ return callback ( err ) ;
127
+ } ) ;
103
128
}
104
129
105
130
} ;
0 commit comments