| (function(exports){ |
| crossfilter.version = "1.0.3"; |
| function crossfilter_identity(d) { |
| return d; |
| } |
| crossfilter.permute = permute; |
| |
| function permute(array, index) { |
| for (var i = 0, n = index.length, copy = new Array(n); i < n; ++i) { |
| copy[i] = array[index[i]]; |
| } |
| return copy; |
| } |
| var bisect = crossfilter.bisect = bisect_by(crossfilter_identity); |
| |
| bisect.by = bisect_by; |
| |
| function bisect_by(f) { |
| |
| // Locate the insertion point for x in a to maintain sorted order. The |
| // arguments lo and hi may be used to specify a subset of the array which |
| // should be considered; by default the entire array is used. If x is already |
| // present in a, the insertion point will be before (to the left of) any |
| // existing entries. The return value is suitable for use as the first |
| // argument to `array.splice` assuming that a is already sorted. |
| // |
| // The returned insertion point i partitions the array a into two halves so |
| // that all v < x for v in a[lo:i] for the left side and all v >= x for v in |
| // a[i:hi] for the right side. |
| function bisectLeft(a, x, lo, hi) { |
| while (lo < hi) { |
| var mid = lo + hi >> 1; |
| if (f(a[mid]) < x) lo = mid + 1; |
| else hi = mid; |
| } |
| return lo; |
| } |
| |
| // Similar to bisectLeft, but returns an insertion point which comes after (to |
| // the right of) any existing entries of x in a. |
| // |
| // The returned insertion point i partitions the array into two halves so that |
| // all v <= x for v in a[lo:i] for the left side and all v > x for v in |
| // a[i:hi] for the right side. |
| function bisectRight(a, x, lo, hi) { |
| while (lo < hi) { |
| var mid = lo + hi >> 1; |
| if (x < f(a[mid])) hi = mid; |
| else lo = mid + 1; |
| } |
| return lo; |
| } |
| |
| bisectRight.right = bisectRight; |
| bisectRight.left = bisectLeft; |
| return bisectRight; |
| } |
| var heap = crossfilter.heap = heap_by(crossfilter_identity); |
| |
| heap.by = heap_by; |
| |
| function heap_by(f) { |
| |
| // Builds a binary heap within the specified array a[lo:hi]. The heap has the |
| // property such that the parent a[lo+i] is always less than or equal to its |
| // two children: a[lo+2*i+1] and a[lo+2*i+2]. |
| function heap(a, lo, hi) { |
| var n = hi - lo, |
| i = (n >>> 1) + 1; |
| while (--i > 0) sift(a, i, n, lo); |
| return a; |
| } |
| |
| // Sorts the specified array a[lo:hi] in descending order, assuming it is |
| // already a heap. |
| function sort(a, lo, hi) { |
| var n = hi - lo, |
| t; |
| while (--n > 0) t = a[lo], a[lo] = a[lo + n], a[lo + n] = t, sift(a, 1, n, lo); |
| return a; |
| } |
| |
| // Sifts the element a[lo+i-1] down the heap, where the heap is the contiguous |
| // slice of array a[lo:lo+n]. This method can also be used to update the heap |
| // incrementally, without incurring the full cost of reconstructing the heap. |
| function sift(a, i, n, lo) { |
| var d = a[--lo + i], |
| x = f(d), |
| child; |
| while ((child = i << 1) <= n) { |
| if (child < n && f(a[lo + child]) > f(a[lo + child + 1])) child++; |
| if (x <= f(a[lo + child])) break; |
| a[lo + i] = a[lo + child]; |
| i = child; |
| } |
| a[lo + i] = d; |
| } |
| |
| heap.sort = sort; |
| return heap; |
| } |
| var heapselect = crossfilter.heapselect = heapselect_by(crossfilter_identity); |
| |
| heapselect.by = heapselect_by; |
| |
| function heapselect_by(f) { |
| var heap = heap_by(f); |
| |
| // Returns a new array containing the top k elements in the array a[lo:hi]. |
| // The returned array is not sorted, but maintains the heap property. If k is |
| // greater than hi - lo, then fewer than k elements will be returned. The |
| // order of elements in a is unchanged by this operation. |
| function heapselect(a, lo, hi, k) { |
| var queue = new Array(k = Math.min(hi - lo, k)), |
| min, |
| i, |
| x, |
| d; |
| |
| for (i = 0; i < k; ++i) queue[i] = a[lo++]; |
| heap(queue, 0, k); |
| |
| if (lo < hi) { |
| min = f(queue[0]); |
| do { |
| if (x = f(d = a[lo]) > min) { |
| queue[0] = d; |
| min = f(heap(queue, 0, k)[0]); |
| } |
| } while (++lo < hi); |
| } |
| |
| return queue; |
| } |
| |
| return heapselect; |
| } |
| var insertionsort = crossfilter.insertionsort = insertionsort_by(crossfilter_identity); |
| |
| insertionsort.by = insertionsort_by; |
| |
| function insertionsort_by(f) { |
| |
| function insertionsort(a, lo, hi) { |
| for (var i = lo + 1; i < hi; ++i) { |
| for (var j = i, t = a[i], x = f(t); j > lo && f(a[j - 1]) > x; --j) { |
| a[j] = a[j - 1]; |
| } |
| a[j] = t; |
| } |
| return a; |
| } |
| |
| return insertionsort; |
| } |
| // Algorithm designed by Vladimir Yaroslavskiy. |
| // Implementation based on the Dart project; see lib/dart/LICENSE for details. |
| |
| var quicksort = crossfilter.quicksort = quicksort_by(crossfilter_identity); |
| |
| quicksort.by = quicksort_by; |
| |
| function quicksort_by(f) { |
| var insertionsort = insertionsort_by(f); |
| |
| function sort(a, lo, hi) { |
| return (hi - lo < quicksort_sizeThreshold |
| ? insertionsort |
| : quicksort)(a, lo, hi); |
| } |
| |
| function quicksort(a, lo, hi) { |
| |
| // Compute the two pivots by looking at 5 elements. |
| var sixth = (hi - lo) / 6 | 0, |
| i1 = lo + sixth, |
| i5 = hi - 1 - sixth, |
| i3 = lo + hi - 1 >> 1, // The midpoint. |
| i2 = i3 - sixth, |
| i4 = i3 + sixth; |
| |
| var e1 = a[i1], x1 = f(e1), |
| e2 = a[i2], x2 = f(e2), |
| e3 = a[i3], x3 = f(e3), |
| e4 = a[i4], x4 = f(e4), |
| e5 = a[i5], x5 = f(e5); |
| |
| var t; |
| |
| // Sort the selected 5 elements using a sorting network. |
| if (x1 > x2) t = e1, e1 = e2, e2 = t, t = x1, x1 = x2, x2 = t; |
| if (x4 > x5) t = e4, e4 = e5, e5 = t, t = x4, x4 = x5, x5 = t; |
| if (x1 > x3) t = e1, e1 = e3, e3 = t, t = x1, x1 = x3, x3 = t; |
| if (x2 > x3) t = e2, e2 = e3, e3 = t, t = x2, x2 = x3, x3 = t; |
| if (x1 > x4) t = e1, e1 = e4, e4 = t, t = x1, x1 = x4, x4 = t; |
| if (x3 > x4) t = e3, e3 = e4, e4 = t, t = x3, x3 = x4, x4 = t; |
| if (x2 > x5) t = e2, e2 = e5, e5 = t, t = x2, x2 = x5, x5 = t; |
| if (x2 > x3) t = e2, e2 = e3, e3 = t, t = x2, x2 = x3, x3 = t; |
| if (x4 > x5) t = e4, e4 = e5, e5 = t, t = x4, x4 = x5, x5 = t; |
| |
| var pivot1 = e2, pivotValue1 = x2, |
| pivot2 = e4, pivotValue2 = x4; |
| |
| // e2 and e4 have been saved in the pivot variables. They will be written |
| // back, once the partitioning is finished. |
| a[i1] = e1; |
| a[i2] = a[lo]; |
| a[i3] = e3; |
| a[i4] = a[hi - 1]; |
| a[i5] = e5; |
| |
| var less = lo + 1, // First element in the middle partition. |
| great = hi - 2; // Last element in the middle partition. |
| |
| // Note that for value comparison, <, <=, >= and > coerce to a primitive via |
| // Object.prototype.valueOf; == and === do not, so in order to be consistent |
| // with natural order (such as for Date objects), we must do two compares. |
| var pivotsEqual = pivotValue1 <= pivotValue2 && pivotValue1 >= pivotValue2; |
| if (pivotsEqual) { |
| |
| // Degenerated case where the partitioning becomes a dutch national flag |
| // problem. |
| // |
| // [ | < pivot | == pivot | unpartitioned | > pivot | ] |
| // ^ ^ ^ ^ ^ |
| // left less k great right |
| // |
| // a[left] and a[right] are undefined and are filled after the |
| // partitioning. |
| // |
| // Invariants: |
| // 1) for x in ]left, less[ : x < pivot. |
| // 2) for x in [less, k[ : x == pivot. |
| // 3) for x in ]great, right[ : x > pivot. |
| for (var k = less; k <= great; ++k) { |
| var ek = a[k], xk = f(ek); |
| if (xk < pivotValue1) { |
| if (k !== less) { |
| a[k] = a[less]; |
| a[less] = ek; |
| } |
| ++less; |
| } else if (xk > pivotValue1) { |
| |
| // Find the first element <= pivot in the range [k - 1, great] and |
| // put [:ek:] there. We know that such an element must exist: |
| // When k == less, then el3 (which is equal to pivot) lies in the |
| // interval. Otherwise a[k - 1] == pivot and the search stops at k-1. |
| // Note that in the latter case invariant 2 will be violated for a |
| // short amount of time. The invariant will be restored when the |
| // pivots are put into their final positions. |
| while (true) { |
| var greatValue = f(a[great]); |
| if (greatValue > pivotValue1) { |
| great--; |
| // This is the only location in the while-loop where a new |
| // iteration is started. |
| continue; |
| } else if (greatValue < pivotValue1) { |
| // Triple exchange. |
| a[k] = a[less]; |
| a[less++] = a[great]; |
| a[great--] = ek; |
| break; |
| } else { |
| a[k] = a[great]; |
| a[great--] = ek; |
| // Note: if great < k then we will exit the outer loop and fix |
| // invariant 2 (which we just violated). |
| break; |
| } |
| } |
| } |
| } |
| } else { |
| |
| // We partition the list into three parts: |
| // 1. < pivot1 |
| // 2. >= pivot1 && <= pivot2 |
| // 3. > pivot2 |
| // |
| // During the loop we have: |
| // [ | < pivot1 | >= pivot1 && <= pivot2 | unpartitioned | > pivot2 | ] |
| // ^ ^ ^ ^ ^ |
| // left less k great right |
| // |
| // a[left] and a[right] are undefined and are filled after the |
| // partitioning. |
| // |
| // Invariants: |
| // 1. for x in ]left, less[ : x < pivot1 |
| // 2. for x in [less, k[ : pivot1 <= x && x <= pivot2 |
| // 3. for x in ]great, right[ : x > pivot2 |
| for (var k = less; k <= great; k++) { |
| var ek = a[k], xk = f(ek); |
| if (xk < pivotValue1) { |
| if (k !== less) { |
| a[k] = a[less]; |
| a[less] = ek; |
| } |
| ++less; |
| } else { |
| if (xk > pivotValue2) { |
| while (true) { |
| var greatValue = f(a[great]); |
| if (greatValue > pivotValue2) { |
| great--; |
| if (great < k) break; |
| // This is the only location inside the loop where a new |
| // iteration is started. |
| continue; |
| } else { |
| // a[great] <= pivot2. |
| if (greatValue < pivotValue1) { |
| // Triple exchange. |
| a[k] = a[less]; |
| a[less++] = a[great]; |
| a[great--] = ek; |
| } else { |
| // a[great] >= pivot1. |
| a[k] = a[great]; |
| a[great--] = ek; |
| } |
| break; |
| } |
| } |
| } |
| } |
| } |
| } |
| |
| // Move pivots into their final positions. |
| // We shrunk the list from both sides (a[left] and a[right] have |
| // meaningless values in them) and now we move elements from the first |
| // and third partition into these locations so that we can store the |
| // pivots. |
| a[lo] = a[less - 1]; |
| a[less - 1] = pivot1; |
| a[hi - 1] = a[great + 1]; |
| a[great + 1] = pivot2; |
| |
| // The list is now partitioned into three partitions: |
| // [ < pivot1 | >= pivot1 && <= pivot2 | > pivot2 ] |
| // ^ ^ ^ ^ |
| // left less great right |
| |
| // Recursive descent. (Don't include the pivot values.) |
| sort(a, lo, less - 1); |
| sort(a, great + 2, hi); |
| |
| if (pivotsEqual) { |
| // All elements in the second partition are equal to the pivot. No |
| // need to sort them. |
| return a; |
| } |
| |
| // In theory it should be enough to call _doSort recursively on the second |
| // partition. |
| // The Android source however removes the pivot elements from the recursive |
| // call if the second partition is too large (more than 2/3 of the list). |
| if (less < i1 && great > i5) { |
| var lessValue, greatValue; |
| while ((lessValue = f(a[less])) <= pivotValue1 && lessValue >= pivotValue1) ++less; |
| while ((greatValue = f(a[great])) <= pivotValue2 && greatValue >= pivotValue2) --great; |
| |
| // Copy paste of the previous 3-way partitioning with adaptions. |
| // |
| // We partition the list into three parts: |
| // 1. == pivot1 |
| // 2. > pivot1 && < pivot2 |
| // 3. == pivot2 |
| // |
| // During the loop we have: |
| // [ == pivot1 | > pivot1 && < pivot2 | unpartitioned | == pivot2 ] |
| // ^ ^ ^ |
| // less k great |
| // |
| // Invariants: |
| // 1. for x in [ *, less[ : x == pivot1 |
| // 2. for x in [less, k[ : pivot1 < x && x < pivot2 |
| // 3. for x in ]great, * ] : x == pivot2 |
| for (var k = less; k <= great; k++) { |
| var ek = a[k], xk = f(ek); |
| if (xk <= pivotValue1 && xk >= pivotValue1) { |
| if (k !== less) { |
| a[k] = a[less]; |
| a[less] = ek; |
| } |
| less++; |
| } else { |
| if (xk <= pivotValue2 && xk >= pivotValue2) { |
| while (true) { |
| var greatValue = f(a[great]); |
| if (greatValue <= pivotValue2 && greatValue >= pivotValue2) { |
| great--; |
| if (great < k) break; |
| // This is the only location inside the loop where a new |
| // iteration is started. |
| continue; |
| } else { |
| // a[great] < pivot2. |
| if (greatValue < pivotValue1) { |
| // Triple exchange. |
| a[k] = a[less]; |
| a[less++] = a[great]; |
| a[great--] = ek; |
| } else { |
| // a[great] == pivot1. |
| a[k] = a[great]; |
| a[great--] = ek; |
| } |
| break; |
| } |
| } |
| } |
| } |
| } |
| } |
| |
| // The second partition has now been cleared of pivot elements and looks |
| // as follows: |
| // [ * | > pivot1 && < pivot2 | * ] |
| // ^ ^ |
| // less great |
| // Sort the second partition using recursive descent. |
| |
| // The second partition looks as follows: |
| // [ * | >= pivot1 && <= pivot2 | * ] |
| // ^ ^ |
| // less great |
| // Simply sort it by recursive descent. |
| |
| return sort(a, less, great + 1); |
| } |
| |
| return sort; |
| } |
| |
| var quicksort_sizeThreshold = 32; |
| var crossfilter_array8 = crossfilter_arrayUntyped, |
| crossfilter_array16 = crossfilter_arrayUntyped, |
| crossfilter_array32 = crossfilter_arrayUntyped, |
| crossfilter_arrayLengthen = crossfilter_identity, |
| crossfilter_arrayWiden = crossfilter_identity; |
| |
| if (typeof Uint8Array !== "undefined") { |
| crossfilter_array8 = function(n) { return new Uint8Array(n); }; |
| crossfilter_array16 = function(n) { return new Uint16Array(n); }; |
| crossfilter_array32 = function(n) { return new Uint32Array(n); }; |
| |
| crossfilter_arrayLengthen = function(array, length) { |
| var copy = new array.constructor(length); |
| copy.set(array); |
| return copy; |
| }; |
| |
| crossfilter_arrayWiden = function(array, width) { |
| var copy; |
| switch (width) { |
| case 16: copy = crossfilter_array16(array.length); break; |
| case 32: copy = crossfilter_array32(array.length); break; |
| default: throw new Error("invalid array width!"); |
| } |
| copy.set(array); |
| return copy; |
| }; |
| } |
| |
| function crossfilter_arrayUntyped(n) { |
| return new Array(n); |
| } |
| function crossfilter_filterExact(bisect, value) { |
| return function(values) { |
| var n = values.length; |
| return [bisect.left(values, value, 0, n), bisect.right(values, value, 0, n)]; |
| }; |
| } |
| |
| function crossfilter_filterRange(bisect, range) { |
| var min = range[0], |
| max = range[1]; |
| return function(values) { |
| var n = values.length; |
| return [bisect.left(values, min, 0, n), bisect.left(values, max, 0, n)]; |
| }; |
| } |
| |
| function crossfilter_filterAll(values) { |
| return [0, values.length]; |
| } |
| function crossfilter_null() { |
| return null; |
| } |
| function crossfilter_zero() { |
| return 0; |
| } |
| function crossfilter_reduceIncrement(p) { |
| return p + 1; |
| } |
| |
| function crossfilter_reduceDecrement(p) { |
| return p - 1; |
| } |
| |
| function crossfilter_reduceAdd(f) { |
| return function(p, v) { |
| return p + +f(v); |
| }; |
| } |
| |
| function crossfilter_reduceSubtract(f) { |
| return function(p, v) { |
| return p - f(v); |
| }; |
| } |
| exports.crossfilter = crossfilter; |
| |
| function crossfilter() { |
| var crossfilter = { |
| add: add, |
| dimension: dimension, |
| groupAll: groupAll, |
| size: size |
| }; |
| |
| var data = [], // the records |
| n = 0, // the number of records; data.length |
| m = 0, // number of dimensions in use |
| M = 8, // number of dimensions that can fit in `filters` |
| filters = crossfilter_array8(0), // M bits per record; 1 is filtered out |
| filterListeners = [], // when the filters change |
| dataListeners = []; // when data is added |
| |
| // Adds the specified new records to this crossfilter. |
| function add(newData) { |
| var n0 = n, |
| n1 = newData.length; |
| |
| // If there's actually new data to add… |
| // Merge the new data into the existing data. |
| // Lengthen the filter bitset to handle the new records. |
| // Notify listeners (dimensions and groups) that new data is available. |
| if (n1) { |
| data = data.concat(newData); |
| filters = crossfilter_arrayLengthen(filters, n += n1); |
| dataListeners.forEach(function(l) { l(newData, n0, n1); }); |
| } |
| |
| return crossfilter; |
| } |
| |
| // Adds a new dimension with the specified value accessor function. |
| function dimension(value) { |
| var dimension = { |
| filter: filter, |
| filterExact: filterExact, |
| filterRange: filterRange, |
| filterAll: filterAll, |
| top: top, |
| group: group, |
| groupAll: groupAll |
| }; |
| |
| var one = 1 << m++, // bit mask, e.g., 00001000 |
| zero = ~one, // inverted one, e.g., 11110111 |
| values, // sorted, cached array |
| index, // value rank ↦ object id |
| newValues, // temporary array storing newly-added values |
| newIndex, // temporary array storing newly-added index |
| sort = quicksort_by(function(i) { return newValues[i]; }), |
| refilter = crossfilter_filterAll, // for recomputing filter |
| indexListeners = [], // when data is added |
| lo0 = 0, |
| hi0 = 0; |
| |
| // Updating a dimension is a two-stage process. First, we must update the |
| // associated filters for the newly-added records. Once all dimensions have |
| // updated their filters, the groups are notified to update. |
| dataListeners.unshift(preAdd); |
| dataListeners.push(postAdd); |
| |
| // Incorporate any existing data into this dimension, and make sure that the |
| // filter bitset is wide enough to handle the new dimension. |
| if (m > M) filters = crossfilter_arrayWiden(filters, M <<= 1); |
| preAdd(data, 0, n); |
| postAdd(data, 0, n); |
| |
| // Incorporates the specified new records into this dimension. |
| // This function is responsible for updating filters, values, and index. |
| function preAdd(newData, n0, n1) { |
| |
| // Permute new values into natural order using a sorted index. |
| newValues = newData.map(value); |
| newIndex = sort(crossfilter_range(n1), 0, n1); |
| newValues = permute(newValues, newIndex); |
| |
| // Bisect newValues to determine which new records are selected. |
| var bounds = refilter(newValues), lo1 = bounds[0], hi1 = bounds[1], i; |
| for (i = 0; i < lo1; ++i) filters[newIndex[i] + n0] |= one; |
| for (i = hi1; i < n1; ++i) filters[newIndex[i] + n0] |= one; |
| |
| // If this dimension previously had no data, then we don't need to do the |
| // more expensive merge operation; use the new values and index as-is. |
| if (!n0) { |
| values = newValues; |
| index = newIndex; |
| lo0 = lo1; |
| hi0 = hi1; |
| return; |
| } |
| |
| var oldValues = values, |
| oldIndex = index, |
| i0 = 0, |
| i1 = 0; |
| |
| // Otherwise, create new arrays into which to merge new and old. |
| values = new Array(n); |
| index = crossfilter_index(n, n); |
| |
| // Merge the old and new sorted values, and old and new index. |
| for (i = 0; i0 < n0 && i1 < n1; ++i) { |
| if (oldValues[i0] < newValues[i1]) { |
| values[i] = oldValues[i0]; |
| index[i] = oldIndex[i0++]; |
| } else { |
| values[i] = newValues[i1]; |
| index[i] = newIndex[i1++] + n0; |
| } |
| } |
| |
| // Add any remaining old values. |
| for (; i0 < n0; ++i0, ++i) { |
| values[i] = oldValues[i0]; |
| index[i] = oldIndex[i0]; |
| } |
| |
| // Add any remaining new values. |
| for (; i1 < n1; ++i1, ++i) { |
| values[i] = newValues[i1]; |
| index[i] = newIndex[i1] + n0; |
| } |
| |
| // Bisect again to recompute lo0 and hi0. |
| bounds = refilter(values), lo0 = bounds[0], hi0 = bounds[1]; |
| } |
| |
| // When all filters have updated, notify index listeners of the new values. |
| function postAdd(newData, n0, n1) { |
| indexListeners.forEach(function(l) { l(newValues, newIndex, n0, n1); }); |
| newValues = newIndex = null; |
| } |
| |
| // Updates the selected values based on the specified bounds [lo, hi]. |
| // This implementation is used by all the public filter methods. |
| function filterIndex(bounds) { |
| var i, |
| j, |
| k, |
| lo1 = bounds[0], |
| hi1 = bounds[1], |
| added = [], |
| removed = []; |
| |
| // Fast incremental update based on previous lo index. |
| if (lo1 < lo0) { |
| for (i = lo1, j = Math.min(lo0, hi1); i < j; ++i) { |
| filters[k = index[i]] ^= one; |
| added.push(k); |
| } |
| } else if (lo1 > lo0) { |
| for (i = lo0, j = Math.min(lo1, hi0); i < j; ++i) { |
| filters[k = index[i]] ^= one; |
| removed.push(k); |
| } |
| } |
| |
| // Fast incremental update based on previous hi index. |
| if (hi1 > hi0) { |
| for (i = Math.max(lo1, hi0), j = hi1; i < j; ++i) { |
| filters[k = index[i]] ^= one; |
| added.push(k); |
| } |
| } else if (hi1 < hi0) { |
| for (i = Math.max(lo0, hi1), j = hi0; i < j; ++i) { |
| filters[k = index[i]] ^= one; |
| removed.push(k); |
| } |
| } |
| |
| lo0 = lo1; |
| hi0 = hi1; |
| filterListeners.forEach(function(l) { l(one, added, removed); }); |
| return dimension; |
| } |
| |
| // Filters this dimension using the specified range, value, or null. |
| // If the range is null, this is equivalent to filterAll. |
| // If the range is an array, this is equivalent to filterRange. |
| // Otherwise, this is equivalent to filterExact. |
| function filter(range) { |
| return range == null |
| ? filterAll() : Array.isArray(range) |
| ? filterRange(range) |
| : filterExact(range); |
| } |
| |
| // Filters this dimension to select the exact value. |
| function filterExact(value) { |
| return filterIndex((refilter = crossfilter_filterExact(bisect, value))(values)); |
| } |
| |
| // Filters this dimension to select the specified range [lo, hi]. |
| // The lower bound is inclusive, and the upper bound is exclusive. |
| function filterRange(range) { |
| return filterIndex((refilter = crossfilter_filterRange(bisect, range))(values)); |
| } |
| |
| // Clears any filters on this dimension. |
| function filterAll() { |
| return filterIndex((refilter = crossfilter_filterAll)(values)); |
| } |
| |
| // Returns the top K selected records, based on this dimension's order. |
| // Note: observes this dimension's filter, unlike group and groupAll. |
| function top(k) { |
| var array = [], |
| i = hi0, |
| j; |
| |
| while (--i >= lo0 && k > 0) { |
| if (!filters[j = index[i]]) { |
| array.push(data[j]); |
| --k; |
| } |
| } |
| |
| return array; |
| } |
| |
| // Adds a new group to this dimension, using the specified key function. |
| function group(key) { |
| var group = { |
| top: top, |
| all: all, |
| reduce: reduce, |
| reduceCount: reduceCount, |
| reduceSum: reduceSum, |
| order: order, |
| orderNatural: orderNatural, |
| size: size |
| }; |
| |
| var groups, // array of {key, value} |
| groupIndex, // object id ↦ group id |
| groupWidth = 8, |
| groupCapacity = crossfilter_capacity(groupWidth), |
| k = 0, // cardinality |
| select, |
| heap, |
| reduceAdd, |
| reduceRemove, |
| reduceInitial, |
| update = crossfilter_null, |
| reset = crossfilter_null, |
| resetNeeded = true; |
| |
| if (arguments.length < 1) key = crossfilter_identity; |
| |
| // The group listens to the crossfilter for when any dimension changes, so |
| // that it can update the associated reduce values. It must also listen to |
| // the parent dimension for when data is added, and compute new keys. |
| filterListeners.push(update); |
| indexListeners.push(add); |
| |
| // Incorporate any existing data into the grouping. |
| add(values, index, 0, n); |
| |
| // Incorporates the specified new values into this group. |
| // This function is responsible for updating groups and groupIndex. |
| function add(newValues, newIndex, n0, n1) { |
| var oldGroups = groups, |
| reIndex = crossfilter_index(k, groupCapacity), |
| add = reduceAdd, |
| initial = reduceInitial, |
| k0 = k, // old cardinality |
| i0 = 0, // index of old group |
| i1 = 0, // index of new record |
| j, // object id |
| g0, // old group |
| x0, // old key |
| x1, // new key |
| g, // group to add |
| x; // key of group to add |
| |
| // If a reset is needed, we don't need to update the reduce values. |
| if (resetNeeded) add = initial = crossfilter_null; |
| |
| // Reset the new groups (k is a lower bound). |
| // Also, make sure that groupIndex exists and is long enough. |
| groups = new Array(k), k = 0; |
| groupIndex = k0 > 1 ? crossfilter_arrayLengthen(groupIndex, n) : crossfilter_index(n, groupCapacity); |
| |
| // Get the first old key (x0 of g0), if it exists. |
| if (k0) x0 = (g0 = oldGroups[0]).key; |
| |
| // Find the first new key (x1), skipping NaN keys. |
| while (i1 < n1 && !((x1 = key(newValues[i1])) >= x1)) ++i1; |
| |
| // While new keys remain… |
| while (i1 < n1) { |
| |
| // Determine the lesser of the two current keys; new and old. |
| // If there are no old keys remaining, then always add the new key. |
| if (g0 && x0 <= x1) { |
| g = g0, x = x0; |
| |
| // Record the new index of the old group. |
| reIndex[i0] = k; |
| |
| // Retrieve the next old key. |
| if (g0 = oldGroups[++i0]) x0 = g0.key; |
| } else { |
| g = {key: x1, value: initial()}, x = x1; |
| } |
| |
| // Add the lesser group. |
| groups[k] = g; |
| |
| // Add any selected records belonging to the added group, while |
| // advancing the new key and populating the associated group index. |
| while (!(x1 > x)) { |
| groupIndex[j = newIndex[i1] + n0] = k; |
| if (!(filters[j] & zero)) g.value = add(g.value, data[j]); |
| if (++i1 >= n1) break; |
| x1 = key(newValues[i1]); |
| } |
| |
| groupIncrement(); |
| } |
| |
| // Add any remaining old groups that were greater than all new keys. |
| // No incremental reduce is needed; these groups have no new records. |
| // Also record the new index of the old group. |
| while (i0 < k0) { |
| groups[reIndex[i0] = k] = oldGroups[i0++]; |
| groupIncrement(); |
| } |
| |
| // If we added any new groups before any old groups, |
| // update the group index of all the old records. |
| if (k > i0) for (i0 = 0; i0 < n0; ++i0) { |
| groupIndex[i0] = reIndex[groupIndex[i0]]; |
| } |
| |
| // Modify the update and reset behavior based on the cardinality. |
| // If the cardinality is less than or equal to one, then the groupIndex |
| // is not needed. If the cardinality is zero, then there are no records |
| // and therefore no groups to update or reset. Note that we also must |
| // change the registered listener to point to the new method. |
| j = filterListeners.indexOf(update); |
| if (k > 1) { |
| update = updateMany; |
| reset = resetMany; |
| } else { |
| if (k === 1) { |
| update = updateOne; |
| reset = resetOne; |
| } else { |
| update = crossfilter_null; |
| reset = crossfilter_null; |
| } |
| groupIndex = null; |
| } |
| filterListeners[j] = update; |
| |
| // Count the number of added groups, |
| // and widen the group index as needed. |
| function groupIncrement() { |
| if (++k === groupCapacity) { |
| reIndex = crossfilter_arrayWiden(reIndex, groupWidth <<= 1); |
| groupIndex = crossfilter_arrayWiden(groupIndex, groupWidth); |
| groupCapacity = crossfilter_capacity(groupWidth); |
| } |
| } |
| } |
| |
| // Reduces the specified selected or deselected records. |
| // This function is only used when the cardinality is greater than 1. |
| function updateMany(filterOne, added, removed) { |
| if (filterOne === one || resetNeeded) return; |
| |
| var i, |
| k, |
| n, |
| g; |
| |
| // Add the added values. |
| for (i = 0, n = added.length; i < n; ++i) { |
| if (!(filters[k = added[i]] & zero)) { |
| g = groups[groupIndex[k]]; |
| g.value = reduceAdd(g.value, data[k]); |
| } |
| } |
| |
| // Remove the removed values. |
| for (i = 0, n = removed.length; i < n; ++i) { |
| if ((filters[k = removed[i]] & zero) === filterOne) { |
| g = groups[groupIndex[k]]; |
| g.value = reduceRemove(g.value, data[k]); |
| } |
| } |
| } |
| |
| // Reduces the specified selected or deselected records. |
| // This function is only used when the cardinality is 1. |
| function updateOne(filterOne, added, removed) { |
| if (filterOne === one || resetNeeded) return; |
| |
| var i, |
| k, |
| n, |
| g = groups[0]; |
| |
| // Add the added values. |
| for (i = 0, n = added.length; i < n; ++i) { |
| if (!(filters[k = added[i]] & zero)) { |
| g.value = reduceAdd(g.value, data[k]); |
| } |
| } |
| |
| // Remove the removed values. |
| for (i = 0, n = removed.length; i < n; ++i) { |
| if ((filters[k = removed[i]] & zero) === filterOne) { |
| g.value = reduceRemove(g.value, data[k]); |
| } |
| } |
| } |
| |
| // Recomputes the group reduce values from scratch. |
| // This function is only used when the cardinality is greater than 1. |
| function resetMany() { |
| var i, |
| g; |
| |
| // Reset all group values. |
| for (i = 0; i < k; ++i) { |
| groups[i].value = reduceInitial(); |
| } |
| |
| // Add any selected records. |
| for (i = 0; i < n; ++i) { |
| if (!(filters[i] & zero)) { |
| g = groups[groupIndex[i]]; |
| g.value = reduceAdd(g.value, data[i]); |
| } |
| } |
| } |
| |
| // Recomputes the group reduce values from scratch. |
| // This function is only used when the cardinality is 1. |
| function resetOne() { |
| var i, |
| g = groups[0]; |
| |
| // Reset the singleton group values. |
| g.value = reduceInitial(); |
| |
| // Add any selected records. |
| for (i = 0; i < n; ++i) { |
| if (!(filters[i] & zero)) { |
| g.value = reduceAdd(g.value, data[i]); |
| } |
| } |
| } |
| |
| // Returns the array of group values, in the dimension's natural order. |
| function all() { |
| if (resetNeeded) reset(), resetNeeded = false; |
| return groups; |
| } |
| |
| // Returns a new array containing the top K group values, in reduce order. |
| function top(k) { |
| var top = select(all(), 0, groups.length, k); |
| return heap.sort(top, 0, top.length); |
| } |
| |
| // Sets the reduce behavior for this group to use the specified functions. |
| // This method lazily recomputes the reduce values, waiting until needed. |
| function reduce(add, remove, initial) { |
| reduceAdd = add; |
| reduceRemove = remove; |
| reduceInitial = initial; |
| resetNeeded = true; |
| return group; |
| } |
| |
| // A convenience method for reducing by count. |
| function reduceCount() { |
| return reduce(crossfilter_reduceIncrement, crossfilter_reduceDecrement, crossfilter_zero); |
| } |
| |
| // A convenience method for reducing by sum(value). |
| function reduceSum(value) { |
| return reduce(crossfilter_reduceAdd(value), crossfilter_reduceSubtract(value), crossfilter_zero); |
| } |
| |
| // Sets the reduce order, using the specified accessor. |
| function order(value) { |
| select = heapselect_by(valueOf); |
| heap = heap_by(valueOf); |
| function valueOf(d) { return value(d.value); } |
| return group; |
| } |
| |
| // A convenience method for natural ordering by reduce value. |
| function orderNatural() { |
| return order(crossfilter_identity); |
| } |
| |
| // Returns the cardinality of this group, irrespective of any filters. |
| function size() { |
| return k; |
| } |
| |
| return reduceCount().orderNatural(); |
| } |
| |
| // A convenience function for generating a singleton group. |
| function groupAll() { |
| var g = group(crossfilter_null), all = g.all; |
| delete g.all; |
| delete g.top; |
| delete g.order; |
| delete g.orderNatural; |
| delete g.size; |
| g.value = function() { return all()[0].value; }; |
| return g; |
| } |
| |
| return dimension; |
| } |
| |
| // A convenience method for groupAll on a dummy dimension. |
| // This implementation can be optimized since it is always cardinality 1. |
| function groupAll() { |
| var group = { |
| reduce: reduce, |
| reduceCount: reduceCount, |
| reduceSum: reduceSum, |
| value: value |
| }; |
| |
| var reduceValue, |
| reduceAdd, |
| reduceRemove, |
| reduceInitial, |
| resetNeeded = true; |
| |
| // The group listens to the crossfilter for when any dimension changes, so |
| // that it can update the reduce value. It must also listen to the parent |
| // dimension for when data is added. |
| filterListeners.push(update); |
| dataListeners.push(add); |
| |
| // For consistency; actually a no-op since resetNeeded is true. |
| add(data, 0, n); |
| |
| // Incorporates the specified new values into this group. |
| function add(newData, n0, n1) { |
| var i; |
| |
| if (resetNeeded) return; |
| |
| // Add the added values. |
| for (i = n0; i < n; ++i) { |
| if (!filters[i]) { |
| reduceValue = reduceAdd(reduceValue, data[i]); |
| } |
| } |
| } |
| |
| // Reduces the specified selected or deselected records. |
| function update(filterOne, added, removed) { |
| var i, |
| k, |
| n; |
| |
| if (resetNeeded) return; |
| |
| // Add the added values. |
| for (i = 0, n = added.length; i < n; ++i) { |
| if (!filters[k = added[i]]) { |
| reduceValue = reduceAdd(reduceValue, data[k]); |
| } |
| } |
| |
| // Remove the removed values. |
| for (i = 0, n = removed.length; i < n; ++i) { |
| if (filters[k = removed[i]] === filterOne) { |
| reduceValue = reduceRemove(reduceValue, data[k]); |
| } |
| } |
| } |
| |
| // Recomputes the group reduce value from scratch. |
| function reset() { |
| var i; |
| |
| reduceValue = reduceInitial(); |
| |
| for (i = 0; i < n; ++i) { |
| if (!filters[i]) { |
| reduceValue = reduceAdd(reduceValue, data[i]); |
| } |
| } |
| } |
| |
| // Sets the reduce behavior for this group to use the specified functions. |
| // This method lazily recomputes the reduce value, waiting until needed. |
| function reduce(add, remove, initial) { |
| reduceAdd = add; |
| reduceRemove = remove; |
| reduceInitial = initial; |
| resetNeeded = true; |
| return group; |
| } |
| |
| // A convenience method for reducing by count. |
| function reduceCount() { |
| return reduce(crossfilter_reduceIncrement, crossfilter_reduceDecrement, crossfilter_zero); |
| } |
| |
| // A convenience method for reducing by sum(value). |
| function reduceSum(value) { |
| return reduce(crossfilter_reduceAdd(value), crossfilter_reduceSubtract(value), crossfilter_zero); |
| } |
| |
| // Returns the computed reduce value. |
| function value() { |
| if (resetNeeded) reset(), resetNeeded = false; |
| return reduceValue; |
| } |
| |
| return reduceCount(); |
| } |
| |
| // Returns the number of records in this crossfilter, irrespective of any filters. |
| function size() { |
| return n; |
| } |
| |
| return arguments.length |
| ? add(arguments[0]) |
| : crossfilter; |
| } |
| |
| // Returns an array of size n, big enough to store ids up to m. |
| function crossfilter_index(n, m) { |
| return (m < 0x101 |
| ? crossfilter_array8 : m < 0x10001 |
| ? crossfilter_array16 |
| : crossfilter_array32)(n); |
| } |
| |
| // Constructs a new array of size n, with sequential values from 0 to n - 1. |
| function crossfilter_range(n) { |
| var range = crossfilter_index(n, n); |
| for (var i = -1; ++i < n;) range[i] = i; |
| return range; |
| } |
| |
| function crossfilter_capacity(w) { |
| return w === 8 |
| ? 0x100 : w === 16 |
| ? 0x10000 |
| : 0x100000000; |
| } |
| })(this); |