prototype.transform = function(input) { log.debug(input, ['plucking']); var g = this._graph, field = this.param('field'); // For now, this transform ASSUMES just one incoming tuple, which will be // completely replaced by the plucked values. assert(input.add.length < 2, "The pluck transform can only replace a single added datum."); var pluckedValues = field.accessor(input.add[0]); // returns plucked array // add _id properties? not clear if this is needed var nextAvailableID = 1000000; var assignUniqueID = function(obj) { if (!('_id' in obj)) { obj._id = ('_'+ ++nextAvailableID); } } pluckedValues.forEach(assignUniqueID); input.add = pluckedValues; // replace the incoming tuple if (this.reevaluate(input)) { // Actually, we can also replace just one modified tuple, if found assert(input.mod.length < 2, "The pluck transform can only replace a single modified datum."); pluckedValues = field.accessor(input.mod[0]); pluckedValues.forEach(assignUniqueID); input.mod = pluckedValues; } // return the modified ChangeSet return input; };
prototype.transform = function(input) { log.debug(input, ['linkpath']); var output = this._output, shape = shapes[this.param('shape')] || shapes.line, sourceX = this.param('sourceX').accessor, sourceY = this.param('sourceY').accessor, targetX = this.param('targetX').accessor, targetY = this.param('targetY').accessor, tension = this.param('tension'); function set(t) { var path = shape(sourceX(t), sourceY(t), targetX(t), targetY(t), tension); Tuple.set(t, output.path, path); } input.add.forEach(set); if (this.reevaluate(input)) { input.mod.forEach(set); input.rem.forEach(set); } input.fields[output.path] = 1; return input; };
prototype.transform = function(input) { log.debug(input, ['geo']); var output = this._output, lon = this.param('lon').accessor, lat = this.param('lat').accessor, proj = Geo.d3Projection.call(this); function set(t) { var ll = [lon(t), lat(t)]; var xy = proj(ll) || [null, null]; Tuple.set(t, output.x, xy[0]); Tuple.set(t, output.y, xy[1]); } input.add.forEach(set); if (this.reevaluate(input)) { input.mod.forEach(set); input.rem.forEach(set); } input.fields[output.x] = 1; input.fields[output.y] = 1; return input; };
prototype.transform = function(input, reset) { log.debug(input, ['aggregate']); var output = ChangeSet.create(input), aggr = this.aggr(), out = this._out, args = this._args, reeval = true, p = Tuple.prev, add, rem, mod, mark, i; // Upon reset, retract prior tuples and re-initialize. if (reset) { output.rem.push.apply(output.rem, aggr.result()); aggr.clear(); this._aggr = null; aggr = this.aggr(); } // Get update methods according to input type. if (this._type === TYPES.TUPLE) { add = function(x) { aggr._add(x); Tuple.prev_init(x); }; rem = function(x) { aggr._rem(p(x)); }; mod = function(x) { aggr._mod(x, p(x)); }; mark = function(x) { aggr._markMod(x, p(x)); }; } else { var gby = this._acc.groupby, val = this._acc.value, get = this._type === TYPES.VALUE ? val : function(x) { return { _id: x._id, groupby: gby(x), value: val(x) }; }; add = function(x) { aggr._add(get(x)); Tuple.prev_init(x); }; rem = function(x) { aggr._rem(get(p(x))); }; mod = function(x) { aggr._mod(get(x), get(p(x))); }; mark = function(x) { aggr._mark(get(x), get(p(x))); }; } input.add.forEach(add); if (reset) { // A signal change triggered reflow. Add everything. // No need for rem, we cleared the aggregator. input.mod.forEach(add); } else { input.rem.forEach(rem); // If possible, check argument fields to see if we need to re-process mods. if (args) for (i=0, reeval=false; i<args.length; ++i) { if (input.fields[args[i]]) { reeval = true; break; } } input.mod.forEach(reeval ? mod : mark); } // Indicate output fields and return aggregate tuples. for (i=0; i<out.length; ++i) { output.fields[out[i]] = 1; } return (aggr._input = input, aggr.changes(output)); };
proto.evaluate = function(input) { log.debug(input, ['building', (this._from || this._def.from), this._def.type]); var self = this, def = this._mark.def, props = def.properties || {}, update = props.update || {}, output, fullUpdate, fcs, data, name; if (this._ds) { output = ChangeSet.create(input); // We need to determine if any encoder dependencies have been updated. // However, the encoder's data source will likely be updated, and shouldn't // trigger all items to mod. data = output.data[(name=this._ds.name())]; delete output.data[name]; fullUpdate = this._encoder.reevaluate(output); output.data[name] = data; // If a scale or signal in the update propset has been updated, // send forward all items for reencoding if we do an early return. if (fullUpdate) output.mod = this._mark.items.slice(); fcs = this._ds.last(); if (!fcs) throw Error('Builder evaluated before backing DataSource.'); if (fcs.stamp > this._stamp) { output = join.call(this, fcs, this._ds.values(), true, fullUpdate); } } else { data = dl.isFunction(this._def.from) ? this._def.from() : [Sentinel]; output = join.call(this, input, data); } // Stash output before Bounder for downstream reactive geometry. this._output = output = this._graph.evaluate(output, this._encoder); // Add any new scale references to the dependency list, and ensure // they're connected. if (update.nested && update.nested.length) { dl.keys(this._mark._scaleRefs).forEach(function(s) { var scale = self._parent.scale(s); if (!scale) return; scale.addListener(self); self.dependency(Deps.SCALES, s); self._encoder.dependency(Deps.SCALES, s); }); } // Supernodes calculate bounds too, but only on items marked dirty. if (this._isSuper) { output.mod = output.mod.filter(function(x) { return x._dirty; }); output = this._graph.evaluate(output, this._bounder); } return output; };
prototype.batchTransform = function(input, data) { log.debug(input, ['wordcloud']); // get variables var layout = this._layout, output = this._output, fontSize = this.param('fontSize'), range = fontSize.accessor && this.param('fontScale'), size, scale; fontSize = fontSize.accessor || d3.functor(fontSize); // create font size scaling function as needed if (range.length) { scale = d3.scale.sqrt() .domain(dl.extent(data, size=fontSize)) .range(range); fontSize = function(x) { return scale(size(x)); }; } // configure layout layout .size(this.param('size')) .text(get(this.param('text'))) .padding(this.param('padding')) .spiral(this.param('spiral')) .rotate(get(this.param('rotate'))) .font(get(this.param('font'))) .fontStyle(get(this.param('fontStyle'))) .fontWeight(get(this.param('fontWeight'))) .fontSize(fontSize) .words(data.map(wrap)) // wrap to avoid tuple writes .on('end', function(words) { var size = layout.size(), dx = size[0] >> 1, dy = size[1] >> 1, w, t, i, len; for (i=0, len=words.length; i<len; ++i) { w = words[i]; t = w._tuple; Tuple.set(t, output.x, w.x + dx); Tuple.set(t, output.y, w.y + dy); Tuple.set(t, output.font, w.font); Tuple.set(t, output.fontSize, w.size); Tuple.set(t, output.fontStyle, w.style); Tuple.set(t, output.fontWeight, w.weight); Tuple.set(t, output.rotate, w.rotate); } }) .start(); // return changeset for (var key in output) input.fields[output[key]] = 1; return input; };
prototype.transform = function(nodeInput, reset) { log.debug(nodeInput, ['force']); reset = reset - (nodeInput.signals.active ? 1 : 0); // get variables var interactive = this.param('interactive'), linkSource = this.param('links').source, linkInput = linkSource.last(), active = this.param('active'), output = this._output, layout = this._layout, nodes = this._nodes, links = this._links; // configure nodes, links and layout if (linkInput.stamp < nodeInput.stamp) linkInput = null; this.configure(nodeInput, linkInput, interactive, reset); // run batch layout if (!interactive) { var iterations = this.param('iterations'); for (var i=0; i<iterations; ++i) layout.tick(); layout.stop(); } // update node positions this.update(active); // re-up alpha on parameter change if (reset || active !== this._prev && active && active.update) { layout.alpha(this.param('alpha')); // re-start layout } // update active node status, if (active !== this._prev) { this._prev = active; } // process removed nodes or edges if (nodeInput.rem.length) { layout.nodes(this._nodes = Tuple.idFilter(nodes, nodeInput.rem)); } if (linkInput && linkInput.rem.length) { layout.links(this._links = Tuple.idFilter(links, linkInput.rem)); } // return changeset nodeInput.fields[output.x] = 1; nodeInput.fields[output.y] = 1; return nodeInput; };
node.evaluate = function(input) { if (predicate !== null) { // TODO: predicate args var db = model.values(Deps.DATA, predicate.data || EMPTY), sg = model.values(Deps.SIGNALS, predicate.signals || EMPTY); reeval = predicate.call(predicate, {}, db, sg, model._predicates); } log.debug(input, [def.type+"ing", reeval]); if (!reeval || (!isClear && !input.signals[signalName])) return input; var datum = {}, value = signal ? model.signalRef(def.signal) : null, d = model.data(ds.name), t = null; datum[def.field] = value; // We have to modify ds._data so that subsequent pulses contain // our dynamic data. W/o modifying ds._data, only the output // collector will contain dynamic tuples. if (def.type === Types.INSERT) { t = Tuple.ingest(datum); input.add.push(t); d._data.push(t); } else if (def.type === Types.REMOVE) { filter(def.field, value, input.add, input.rem); filter(def.field, value, input.mod, input.rem); d._data = d._data.filter(function(x) { return x[def.field] !== value; }); } else if (def.type === Types.TOGGLE) { var add = [], rem = []; filter(def.field, value, input.rem, add); filter(def.field, value, input.add, rem); filter(def.field, value, input.mod, rem); if (!(add.length || rem.length)) add.push(Tuple.ingest(datum)); input.add.push.apply(input.add, add); d._data.push.apply(d._data, add); input.rem.push.apply(input.rem, rem); d._data = d._data.filter(function(x) { return rem.indexOf(x) === -1; }); } else if (def.type === Types.CLEAR) { input.rem.push.apply(input.rem, input.add); input.rem.push.apply(input.rem, input.mod); input.add = []; input.mod = []; d._data = []; } input.fields[def.field] = 1; return input; };
prototype.batchTransform = function(input, data) { log.debug(input, ['imputing']); var groupby = this.param('groupby'), orderby = this.param('orderby'), method = this.param('method'), value = this.param('value'), field = this.param('field'), get = field.accessor, name = field.field, prev = this._imputed || [], curr = [], groups = partition(data, groupby.accessor, orderby.accessor), domain = groups.domain, group, i, j, n, m, t; function getval(x) { return x == null ? null : get(x); } for (j=0, m=groups.length; j<m; ++j) { group = groups[j]; // determine imputation value if (method !== 'value') { value = dl[method](group, getval); } // add tuples for missing values for (i=0, n=group.length; i<n; ++i) { if (group[i] == null) { t = tuple(groupby.field, group.values, orderby.field, domain[i]); t[name] = value; curr.push(t); } } } // update changeset with imputed tuples for (i=0, n=curr.length; i<n; ++i) { input.add.push(curr[i]); } for (i=0, n=prev.length; i<n; ++i) { input.rem.push(prev[i]); } this._imputed = curr; return input; };
proto.evaluate = function(input) { log.debug(input, ['bounds', this._mark.marktype]); var mark = this._mark, type = mark.marktype, isGrp = type === 'group', items = mark.items, hasLegends = dl.array(mark.def.legends).length > 0, bounds = mark.bounds, rebound = !bounds || input.rem.length, i, ilen, j, jlen, group, legend; if (type === 'line' || type === 'area') { bound.mark(mark, null, isGrp && !hasLegends); } else { input.add.forEach(function(item) { bound.item(item); rebound = rebound || (bounds && !bounds.encloses(item.bounds)); }); input.mod.forEach(function(item) { rebound = rebound || (bounds && bounds.alignsWith(item.bounds)); bound.item(item); }); if (rebound) { bounds = mark.bounds && mark.bounds.clear() || (mark.bounds = new Bounds()); for (i=0, ilen=items.length; i<ilen; ++i) bounds.union(items[i].bounds); } } if (isGrp && hasLegends) { for (i=0, ilen=items.length; i<ilen; ++i) { group = items[i]; group._legendPositions = null; for (j=0, jlen=group.legendItems.length; j<jlen; ++j) { legend = group.legendItems[j]; Encoder.update(this._graph, input.trans, 'legendPosition', legend.items, input.dirty); bound.mark(legend, null, false); } } bound.mark(mark, null, true); } return df.ChangeSet.create(input, true); };
proto.evaluate = function(input) { var self = this, fn = function(group) { scale.call(self, group); }; this._updated = false; input.add.forEach(fn); input.mod.forEach(fn); // Scales are at the end of an encoding pipeline, so they should forward a // reflow pulse. Thus, if multiple scales update in the parent group, we don't // reevaluate child marks multiple times. if (this._updated) { input.scales[this._def.name] = 1; log.debug(input, ["scale", this._def.name]); } return df.ChangeSet.create(input, true); };
prototype.transform = function(input) { log.debug(input, ['formulating']); var g = this._graph, field = this.param('field'), expr = this.param('expr'), signals = g.signalValues(this.dependency(Deps.SIGNALS)); function set(x) { Tuple.set(x, field, expr(x, null, signals)); } input.add.forEach(set); if (this.reevaluate(input)) { input.mod.forEach(set); } input.fields[field] = 1; return input; };
prototype.transform = function(input) { log.debug(input, ['geopath']); var output = this._output, geojson = this.param('field').accessor || dl.identity, proj = Geo.d3Projection.call(this), path = d3.geo.path().projection(proj); function set(t) { Tuple.set(t, output.path, path(geojson(t))); } input.add.forEach(set); if (this.reevaluate(input)) { input.mod.forEach(set); input.rem.forEach(set); } input.fields[output.path] = 1; return input; };
prototype.transform = function(input) { log.debug(input, ['formulating']); var field = this.param('field'), expr = this.param('expr'), updated = false; function set(x) { Tuple.set(x, field, expr(x)); updated = true; } input.add.forEach(set); if (this.reevaluate(input)) { input.mod.forEach(set); } if (updated) input.fields[field] = 1; return input; };
prototype.batchTransform = function(input, data) { log.debug(input, ['treeifying']); var fields = this.param('groupby').field, childField = this._output.children, parentField = this._output.parent, summary = [{name:'*', ops: ['values'], as: [childField]}], aggrs = fields.map(function(f) { return dl.groupby(f).summarize(summary); }), prev = this._internal || [], curr = [], i, n; function level(index, node, values) { var vals = aggrs[index].execute(values); node[childField] = vals; vals.forEach(function(n) { n[parentField] = node; curr.push(Tuple.ingest(n)); if (index+1 < fields.length) level(index+1, n, n[childField]); else n[childField].forEach(function(c) { c[parentField] = n; }); }); } var root = Tuple.ingest({}); root[parentField] = null; curr.push(root); level(0, root, data); // update changeset with internal nodes for (i=0, n=curr.length; i<n; ++i) { input.add.push(curr[i]); } for (i=0, n=prev.length; i<n; ++i) { input.rem.push(prev[i]); } this._internal = curr; return input; };
prototype.batchTransform = function(input, data) { log.debug(input, ['voronoi']); // get variables var pathname = this._output.path; // configure layout var polygons = this._layout .clipExtent(this.param('clipExtent')) .x(this.param('x').accessor) .y(this.param('y').accessor) (data); // build and assign path strings for (var i=0; i<data.length; ++i) { Tuple.set(data[i], pathname, 'M' + polygons[i].join('L') + 'Z'); } // return changeset input.fields[pathname] = 1; return input; };
prototype.transform = function(input) { log.debug(input, ['filtering']); var output = df.ChangeSet.create(input), graph = this._graph, skip = this._skip, test = this.param('test'), signals = graph.signalValues(this.dependency(Deps.SIGNALS)); input.rem.forEach(function(x) { if (skip[x._id] !== 1) output.rem.push(x); else skip[x._id] = 0; }); input.add.forEach(function(x) { if (test(x, null, signals)) output.add.push(x); else skip[x._id] = 1; }); input.mod.forEach(function(x) { var b = test(x, null, signals), s = (skip[x._id] === 1); if (b && s) { skip[x._id] = 0; output.add.push(x); } else if (b && !s) { output.mod.push(x); } else if (!b && s) { // do nothing, keep skip true } else { // !b && !s output.rem.push(x); skip[x._id] = 1; } }); return output; };
prototype.batchTransform = function(input, data) { log.debug(input, ['stacking']); var groupby = this.param('groupby').accessor, sortby = dl.comparator(this.param('sortby').field), field = this.param('field').accessor, offset = this.param('offset'), output = this._output; // partition, sum, and sort the stack groups var groups = partition(data, groupby, sortby, field); // compute stack layouts per group for (var i=0, max=groups.max; i<groups.length; ++i) { var group = groups[i], sum = group.sum, off = offset==='center' ? (max - sum)/2 : 0, scale = offset==='normalize' ? (1/sum) : 1, j, x, a, b = off, v = 0; // set stack coordinates for each datum in group for (j=0; j<group.length; ++j) { x = group[j]; a = b; // use previous value for start point v += field(x); b = scale * v + off; // compute end point Tuple.set(x, output.start, a); Tuple.set(x, output.end, b); Tuple.set(x, output.mid, 0.5 * (a + b)); } } input.fields[output.start] = 1; input.fields[output.end] = 1; input.fields[output.mid] = 1; return input; };
v._renderNode.evaluate = function(input) { log.debug(input, ['rendering']); var s = v._model.scene(), h = v._handler; if (h && h.scene) h.scene(s); if (input.trans) { input.trans.start(function(items) { v._renderer.render(s, items); }); } else if (v._repaint) { v._renderer.render(s); } else if (input.dirty.length) { v._renderer.render(s, input.dirty); } if (input.dirty.length) { input.dirty.forEach(function(i) { i._dirty = false; }); s.items[0]._dirty = false; } v._repaint = v._skipSignals = false; return input; };
node.evaluate = function(input) { var db, sg; if (predicate !== null) { // TODO: predicate args db = model.values(Deps.DATA, predicate.data || EMPTY); sg = model.values(Deps.SIGNALS, predicate.signals || EMPTY); reeval = predicate.call(predicate, {}, db, sg, model._predicates); } if (exprTrigger !== null) { sg = model.values(Deps.SIGNALS, exprTrigger.globals || EMPTY); reeval = exprTrigger.fn(); } log.debug(input, [def.type+"ing", reeval]); if (!reeval || (!isClear && !input.signals[signalName])) return input; var value = signal ? model.signalRef(def.signal) : null, d = model.data(ds.name), t = null, add = [], rem = [], up = 0, datum; if (dl.isObject(value)) { datum = value; if (!def.field) { fields = dl.keys(datum); getters = fields.map(dl.accessor); setters = fields.map(dl.mutator); } } else { datum = {}; setters.forEach(function(f) { f(datum, value); }); } // We have to modify ds._data so that subsequent pulses contain // our dynamic data. W/o modifying ds._data, only the output // collector will contain dynamic tuples. if (def.type === Types.INSERT) { insert(input, datum, d); } else if (def.type === Types.REMOVE) { filter(getters, value, input.mod, input.rem); filter(getters, value, input.add, rem); filter(getters, value, d._data, rem); } else if (def.type === Types.UPSERT) { input.mod.forEach(function(x) { var every = getters.every(function(f) { return f(x) === f(datum); }); if (every) up = (dl.extend(x, datum), up+1); }); if (up === 0) insert(input, datum, d); } else if (def.type === Types.TOGGLE) { // If tuples are in mod, remove them. filter(getters, value, input.mod, rem); input.rem.push.apply(input.rem, rem); // If tuples are in add, they've been added to backing data source, // but no downstream operators will have seen it yet. filter(getters, value, input.add, add); if (add.length || rem.length) { d._data = d._data.filter(function(x) { return rem.indexOf(x) < 0 && add.indexOf(x) < 0; }); } else { // If the tuples aren't seen in the changeset, add a new tuple. // Note, tuple might be in input.rem, but we ignore this and just // re-add a new tuple for simplicity. input.add.push(t=Tuple.ingest(datum)); d._data.push(t); } } else if (def.type === Types.CLEAR) { input.rem.push.apply(input.rem, input.mod.splice(0)); input.add.splice(0); d._data.splice(0); } fields.forEach(function(f) { input.fields[f] = 1; }); return input; };
function disconnect_cell(facet) { log.debug({}, ['disconnecting cell', this.tuple._id]); var pipeline = this.ds.pipeline(); facet.removeListener(pipeline[0]); facet._graph.disconnect(pipeline); }