module.exports = function(options) { if (!options) { options = {}; } return H.pipeline( H.split(), H.compact(), H.map(JSON.parse), H.filter(H.curry(hasType, options.types)), H.filter(hasGeometry), H.map(H.curry(pitToFeature, options.data)) ); };
/** * Use the dataprocess as a pipeline running a maximum of N number of activities * where N is the max number of concurrent operations across the activities. * @param input * @param activities * @param concurrency */ function oneAtATime(input, activities, concurrency) { const process = []; // Apply each activity to the results stream for (let lcv = 0; lcv < activities.length; lcv++) { const activity = activities[lcv]; // Apply the activity to each record const type = activity.type; const action = activity.action; switch (type.toLowerCase()) { case 'map': process.push($.flatMap(record => $(action(record)))); break; case 'filter': process.push($.flatMap(record => $(action(record) .then(result => [record, result])))); process.push($.filter(t => t[1])); process.push($.map(t => t[0])); break; case 'flatten': process.push($.flatten()); break; case 'compact': process.push($.compact()); break; default: throw Error(`The activity type '${type}' is not supported.`, 'NOT_SUPPORTED'); break; } } // Function to process an individual record // const processRecord = record => { // console.log('PROCESS_RECORD', record); // return $(process.reduce(Q.when, Q(record))); // }; if (concurrency) return input .map(record => $([record]).through(pipeline(process))) .mergeWithLimit(concurrency); else return input .map(record => $([record]).through(pipeline(process))) .merge(); }